query_id
stringlengths
32
32
query
stringlengths
7
4.32k
positive_passages
listlengths
1
1
negative_passages
listlengths
88
101
869a687f314723af129dc669b3e1ec6e
DeleteCollection deletes a collection of objects.
[ { "docid": "6d99b39b56c4544a27ef7814835bf459", "score": "0.6825743", "text": "func (c *FakeKmsCryptoKeyIamMembers) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(kmscryptokeyiammembersResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.KmsCryptoKeyIamMemberList{})\n\treturn err\n}", "title": "" } ]
[ { "docid": "cdfd17257b3a46c1372f196037846e66", "score": "0.7737077", "text": "func (c *UserPolicyClient) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(resourceName).\n\t\tVersionedParams(&listOpts, dockerauthv1alpha1.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "ade215c87c1d48457785e241d61cd69e", "score": "0.7658931", "text": "func (c *proxiers) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"proxiers\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "b6492815efd29771ba77a7c2f184555b", "score": "0.75911736", "text": "func (c *webhookIdentityProviders) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"webhookidentityproviders\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "35455babc69871392ef9cefd21cc7637", "score": "0.75905794", "text": "func (c *tridentBackends) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"tridentbackends\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "32e1cc5cd6dfb3af7a91f0284dc828db", "score": "0.75606257", "text": "func (c *idlers) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"idlers\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "a92fbe39205f96c9a131d60c2489147b", "score": "0.7556562", "text": "func (c *resourceSets) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"resourcesets\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "670d69b4a9b3ec01892934250f5a8225", "score": "0.75407207", "text": "func (c *meshGateways) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"meshgateways\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "414b6325607b78271c8fcc1f400a4690", "score": "0.75234604", "text": "func (c *kuberaBackups) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"kuberabackups\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "6eb89eaea30ece9f12c7b8318f5b3f09", "score": "0.75142545", "text": "func (c *appawareHorizontalPodAutoscalers) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"appawarehorizontalpodautoscalers\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "2c138fc9ac0fc9b290ce1fd888267385", "score": "0.7498828", "text": "func (c *FakeConsoles) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewRootDeleteCollectionAction(consolesResource, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &configv1.ConsoleList{})\n\treturn err\n}", "title": "" }, { "docid": "b3ddac71302e6bb9d2dc35e00af9a333", "score": "0.7481251", "text": "func (c *pXBucketClaims) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"pxbucketclaims\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "dc36e98a10cf869cfa96bd371dd14412", "score": "0.747121", "text": "func (c *directCSIDrives) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"directcsidrives\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "ce405f1128a8d2c2b1fcf9de424d66d1", "score": "0.7445744", "text": "func (c *volumeManagers) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"volumemanagers\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "14991f7f828bc6c50ed790ca9e3514bc", "score": "0.7442958", "text": "func (c *FakeOpenShiftAPIServers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewRootDeleteCollectionAction(openshiftapiserversResource, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &operatorv1.OpenShiftAPIServerList{})\n\treturn err\n}", "title": "" }, { "docid": "8c44ffd90aad6b39a1aa0f70591509cd", "score": "0.74269223", "text": "func (c *consoleTemplates) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"consoletemplates\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "47b68e0835e07525e6a0444b50c39cea", "score": "0.7411912", "text": "func DeleteCollection() error {\n\tvar db *sql.DB\n\tvar err error\n\tlogger := servicelog.GetInstance()\n\tif db, err = connectDatastore(); err != nil {\n\t\tlogger.Println(time.Now().UTC(), \"Error Connecting to datastore in DeleteCollection\")\n\t\treturn err\n\t}\n\tdefer db.Close()\n\t_, err = db.Exec(\"DELETE FROM ToDo\")\n\tif err != nil {\n\t\tlogger.Println(time.Now().UTC(), \"Error executing query in DeleteCollection\")\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "ef07aef4dadc1c4aedc289749b22ca15", "score": "0.7402773", "text": "func (c *FakeMoodles) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(moodlesResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &moodlecontrollerv1.MoodleList{})\n\treturn err\n}", "title": "" }, { "docid": "9d7cf90b5e7b098e1d3a13b6ffae523f", "score": "0.7372163", "text": "func (c *iBMMQTargets) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"ibmmqtargets\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "cff75f5617a5889f4726ec1144afae4f", "score": "0.7366214", "text": "func (c *reboots) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"reboots\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "89a45113f3e7b0022bedbb303043421c", "score": "0.73545456", "text": "func (c *daemonstools) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"daemonstools\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "46be02b3ca8ac229a54393270dd73c2b", "score": "0.7346574", "text": "func (c *machineImages) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\treturn c.client.Delete().\n\t\tResource(\"machineimages\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "c80112cc6d40e217537cc07e9f481d17", "score": "0.73386335", "text": "func (c *FakeDialogflowCXWebhooks) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(dialogflowcxwebhooksResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.DialogflowCXWebhookList{})\n\treturn err\n}", "title": "" }, { "docid": "03f1958b2b5bc83df0c060f0e93fbba1", "score": "0.7334515", "text": "func (c *manageds) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"manageds\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "dbb2f27527356ca932859c492dfc2421", "score": "0.7316255", "text": "func (c *quarksJobs) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"quarksjobs\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "bb4cb14441f9b677160e98fa6e3fe0cf", "score": "0.731318", "text": "func (c *sSORules) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"ssorules\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "0a86e8bd44c641918e1eea138578aa35", "score": "0.7300107", "text": "func (c *notificationManagers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"notificationmanagers\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "372bcf458ccbb69ea5a9a67c87c05ef0", "score": "0.72870505", "text": "func (c *dLPDeidentifyTemplates) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"dlpdeidentifytemplates\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "312f8c22d6ee31a52cee7d825e5d21b5", "score": "0.72676814", "text": "func (c *FakeInMemoryChannels) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(inmemorychannelsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &messagingv1.InMemoryChannelList{})\n\treturn err\n}", "title": "" }, { "docid": "bcc880364119bb56bc2223ab6f95965a", "score": "0.7262489", "text": "func (c *petSets) DeleteCollection(options *api.DeleteOptions, listOptions api.ListOptions) error {\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"petsets\").\n\t\tVersionedParams(&listOptions, api.ParameterCodec).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "126f9b5fafb10b2c62b3f5f4c6afb6f2", "score": "0.726066", "text": "func (c *sampleSources) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"samplesources\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "be335a258ceaef5c6ee69b09081b3468", "score": "0.7248944", "text": "func (c *FakeFleetAutoscalers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(fleetautoscalersResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &autoscalingv1.FleetAutoscalerList{})\n\treturn err\n}", "title": "" }, { "docid": "5f993b027ab8889b5def3589e1d72a83", "score": "0.7246595", "text": "func (c *securityContextConstraintses) DeleteCollection(options *meta_v1.DeleteOptions, listOptions meta_v1.ListOptions) error {\n\treturn c.client.Delete().\n\t\tResource(\"securitycontextconstraintses\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "aa8cdacca145ff73488027123fe11742", "score": "0.72160417", "text": "func (c *raftReplicas) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"raftreplicas\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "5f3e5c8f985ac52c3271e4a489287221", "score": "0.7213732", "text": "func (c *virtulMachines) DeleteCollection(opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"virtulmachines\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "26ac8d388230eb9cfbe42dafdf106250", "score": "0.72119105", "text": "func (client *Client) DeleteCollection(request *DeleteCollectionRequest) (response *DeleteCollectionResponse, err error) {\n\tresponse = CreateDeleteCollectionResponse()\n\terr = client.DoAction(request, response)\n\treturn\n}", "title": "" }, { "docid": "65748281cba57846fd184e5753f8a7d4", "score": "0.7202564", "text": "func (c *FakeFoos) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(foosResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha2.FooList{})\n\treturn err\n}", "title": "" }, { "docid": "bd83d7b56a6738c40a7b84b8dcc8e1bf", "score": "0.7182291", "text": "func (c *clusterActiveOperands) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"clusteractiveoperands\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "0f6d2fa9ccd68639b9e93fde908f5baa", "score": "0.71812046", "text": "func (c *rSAKeys) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"rsakeys\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "a5b9a97ea139b742d30333239a6df51f", "score": "0.7168789", "text": "func (c *scyllaDBMonitorings) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"scylladbmonitorings\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "422d85047a99cc84460c052a34542761", "score": "0.71637654", "text": "func (c *FakePortworxDiags) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(portworxdiagsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &portworxv1.PortworxDiagList{})\n\treturn err\n}", "title": "" }, { "docid": "ed5c2993e0fbda82532075411c2d1fc1", "score": "0.7157912", "text": "func (c *tokenRequests) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"tokenrequests\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "8b54449050ddeb0b922df6e96f451a07", "score": "0.7140683", "text": "func (c *oSConfigGuestPolicies) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"osconfigguestpolicies\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "606cac1c2e95c1b490877d6a3c548653", "score": "0.7140425", "text": "func (c *FakeKmsCryptoKeys) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(kmscryptokeysResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.KmsCryptoKeyList{})\n\treturn err\n}", "title": "" }, { "docid": "a7dd8143f8ae5cf05961c033406f1dc8", "score": "0.713361", "text": "func (c *FakeHelmRequests) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(helmrequestsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.HelmRequestList{})\n\treturn err\n}", "title": "" }, { "docid": "da9676f689a729e52896085510d401d9", "score": "0.7130856", "text": "func (c *daskWorkerGroups) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"daskworkergroups\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "f26ceed23986b6de120e259968af71b1", "score": "0.7101278", "text": "func (c *pDDrivers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"pddrivers\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "c75d4829f810477a4a9ba517fa147cf5", "score": "0.70952505", "text": "func (c *FakeSchedulers) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionActionWithMultiTenancy(schedulersResource, c.ns, listOptions, c.te)\n\n\t_, err := c.Fake.Invokes(action, &schedulerv1.SchedulerList{})\n\treturn err\n}", "title": "" }, { "docid": "e2cc1f55b10e88cbe8fb3a99db72c205", "score": "0.7084225", "text": "func (c *groupBindings) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"groupbindings\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "28e26927a2c3f580ce7c9168d7b3258d", "score": "0.708261", "text": "func (c *FakeShadowEndpointSlices) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(shadowendpointslicesResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.ShadowEndpointSliceList{})\n\treturn err\n}", "title": "" }, { "docid": "d5ec3c213b82c0e090750032411ab669", "score": "0.7081329", "text": "func (c *FakeSsmResourceDataSyncs) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(ssmresourcedatasyncsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.SsmResourceDataSyncList{})\n\treturn err\n}", "title": "" }, { "docid": "b3a0da446a12f51481f3109a0e7a81c2", "score": "0.7076991", "text": "func (c *FakeUserSSHKeys) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewRootDeleteCollectionAction(usersshkeysResource, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &kubermaticv1.UserSSHKeyList{})\n\treturn err\n}", "title": "" }, { "docid": "0f83759c31ef8f43eacee2e06786a045", "score": "0.7068945", "text": "func (c *FakePruneServices) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(pruneservicesResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.PruneServiceList{})\n\treturn err\n}", "title": "" }, { "docid": "e113c1c6aca0a3c505a22d852216c597", "score": "0.70616704", "text": "func (c *azureKeyVaultSecretIdentities) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"azurekeyvaultsecretidentities\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "8798378da8eca495eb6d65f5ada607c5", "score": "0.70521086", "text": "func (c *FakeRecoveryReplicatedVms) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(recoveryreplicatedvmsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.RecoveryReplicatedVmList{})\n\treturn err\n}", "title": "" }, { "docid": "035f22df1ef19c65e449fdccc0affebc", "score": "0.70509326", "text": "func (s *Service) DeleteCollection(ctx context.Context, resourceID string) (string, error) {\n\turlParams := make(map[string]string)\n\turlParams[\"ID\"] = resourceID\n\n\treturn s.Delete(ctx, resources.CollectionType, urlParams)\n}", "title": "" }, { "docid": "663a2ae4cdcb431d0fdf7076665312d0", "score": "0.7043188", "text": "func (c *clusterTriggerBindings) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"clustertriggerbindings\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "068f37f04508757e69b71a09b6f82755", "score": "0.7040483", "text": "func (c *FakeActiveMQArtemisScaledowns) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(activemqartemisscaledownsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v2alpha1.ActiveMQArtemisScaledownList{})\n\treturn err\n}", "title": "" }, { "docid": "7692f83a42be737664248b6a5a10f5da", "score": "0.7040016", "text": "func (c *FakeTridentBackends) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(tridentbackendsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &netappv1.TridentBackendList{})\n\treturn err\n}", "title": "" }, { "docid": "1539830cdd50a49af73321371c677ae2", "score": "0.70171916", "text": "func (c *FakePintaJobs) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(pintajobsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &pintav1.PintaJobList{})\n\treturn err\n}", "title": "" }, { "docid": "e2a34bdd4f9f165e98c07bbac1ab8a19", "score": "0.7015238", "text": "func (c *FakeCatzzzLoggers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(catzzzloggersResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &catzzzloggerv1.CatzzzLoggerList{})\n\treturn err\n}", "title": "" }, { "docid": "33fecbcc85ecf17815dad291f8753faf", "score": "0.7011216", "text": "func (c *FakeMessagingInfrastructures) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(messaginginfrastructuresResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &enmassev1.MessagingInfrastructureList{})\n\treturn err\n}", "title": "" }, { "docid": "0b53a9823907a1fbe562975a7bceb2b2", "score": "0.70085895", "text": "func (c *FakeCpus) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(cpusResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.CpuList{})\n\treturn err\n}", "title": "" }, { "docid": "503a1810d1cb9bcbd5932e3fc6012c3f", "score": "0.70037776", "text": "func (c *storageBackendContents) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"storagebackendcontents\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "ad4fc4bdbe95f3e5a890e959170466a8", "score": "0.7002065", "text": "func (c *messagingUsers) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"messagingusers\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "ff144481914852b4bb83dfbe112f3cce", "score": "0.6997816", "text": "func (c *FakeRemoteIstios) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(remoteistiosResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1beta1.RemoteIstioList{})\n\treturn err\n}", "title": "" }, { "docid": "000ac2d00b7fe3aee555de1cb8e6c670", "score": "0.69876844", "text": "func (c *mCOConfigs) DeleteCollection(options *metav1.DeleteOptions, listOptions metav1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"mcoconfigs\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "2d24378728efc3e73ec4fd7b86c9b9f7", "score": "0.6987288", "text": "func (c *bigQueryDatasetAccesses) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"bigquerydatasetaccesses\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "a4f4dc1852e245bfa06a6113dccf1580", "score": "0.6984819", "text": "func (c *FakeMWInjectors) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(mwinjectorsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &mwinjector_v1.MWInjectorList{})\n\treturn err\n}", "title": "" }, { "docid": "2ba97661638fc6d116aa1deccbd98c85", "score": "0.6984347", "text": "func (c *FakeManageds) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(managedsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.ManagedList{})\n\treturn err\n}", "title": "" }, { "docid": "79dfaaffe6876ebb66f66e3d3be096d4", "score": "0.69833654", "text": "func (c *FakeAciIstioOperators) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(aciistiooperatorsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &aciistiov1.AciIstioOperatorList{})\n\treturn err\n}", "title": "" }, { "docid": "f937a881b40dce282f64b00cecf7ea17", "score": "0.69734454", "text": "func (c *eventPolicyBindings) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"eventpolicybindings\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "10e331f7f1e304f7ee1d01607744b423", "score": "0.6970304", "text": "func (c *FakeSecretRequests) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(secretrequestsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.SecretRequestList{})\n\treturn err\n}", "title": "" }, { "docid": "ed0261d3c60518057c65269a712b6103", "score": "0.6954562", "text": "func (c *FakeClusterActiveOperands) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewRootDeleteCollectionAction(clusteractiveoperandsResource, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.ClusterActiveOperandList{})\n\treturn err\n}", "title": "" }, { "docid": "30ce0a088446b4ab09e2d2f86c892fbd", "score": "0.69519335", "text": "func (c *FakeApigeeSyncAuthorizations) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(apigeesyncauthorizationsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.ApigeeSyncAuthorizationList{})\n\treturn err\n}", "title": "" }, { "docid": "d673d13389d3c34babb893a354cc2bda", "score": "0.69511336", "text": "func (c *FakeEgressIPs) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewRootDeleteCollectionAction(egressipsResource, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &egressipv1.EgressIPList{})\n\treturn err\n}", "title": "" }, { "docid": "b9f9e26aeac38814bd43f8115058baa7", "score": "0.6948324", "text": "func (c *FakeSecretClaims) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(secretclaimsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.SecretClaimList{})\n\treturn err\n}", "title": "" }, { "docid": "d2647a26e02ce52b128961a51fbfa813", "score": "0.6940845", "text": "func (c *FakeTektonResults) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewRootDeleteCollectionAction(tektonresultsResource, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.TektonResultList{})\n\treturn err\n}", "title": "" }, { "docid": "bb630fdea6e46477040c40d4dfdbcd56", "score": "0.6934509", "text": "func (c *FakeLicensemanagerAssociations) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(licensemanagerassociationsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.LicensemanagerAssociationList{})\n\treturn err\n}", "title": "" }, { "docid": "154d527a1c3bb7f6dbdef91893f7995e", "score": "0.6933559", "text": "func (c *awsApiGatewayBasePathMappings) DeleteCollection(options *meta_v1.DeleteOptions, listOptions meta_v1.ListOptions) error {\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"awsapigatewaybasepathmappings\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "20f99ed71ff89dec36176b90bad191bc", "score": "0.69307435", "text": "func (c *FakeCloudListeners) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(cloudlistenersResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &networkv1.CloudListenerList{})\n\treturn err\n}", "title": "" }, { "docid": "f46fdae3a885a770e385b1e5aa4837ba", "score": "0.6915529", "text": "func (c *FakeDistccs) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(distccsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.DistccList{})\n\treturn err\n}", "title": "" }, { "docid": "2f41404dfa820aaba73a4afcf3f78607", "score": "0.69025", "text": "func (c *FakeKmsGrants) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(kmsgrantsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.KmsGrantList{})\n\treturn err\n}", "title": "" }, { "docid": "4e850106fd8f82a6c96376ee35a70332", "score": "0.68964016", "text": "func (c *FakeTrainingSets) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(trainingsetsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha2.TrainingSetList{})\n\treturn err\n}", "title": "" }, { "docid": "14dc9f7123a4aba3999fe0da00ab3da2", "score": "0.68881327", "text": "func (c *FakeProviderHosts) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(providerhostsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &akashnetworkv1.ProviderHostList{})\n\treturn err\n}", "title": "" }, { "docid": "b43e896e0ce0c9e1edd017062552f09b", "score": "0.6876298", "text": "func (c *FakeS3BucketPolicies) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(s3bucketpoliciesResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.S3BucketPolicyList{})\n\treturn err\n}", "title": "" }, { "docid": "c4808f1c9a3e47333bf2ec3cec5fc1af", "score": "0.6872839", "text": "func (c *FakeSriovNetworks) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(sriovnetworksResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &sriovnetworkv1.SriovNetworkList{})\n\treturn err\n}", "title": "" }, { "docid": "d653947c4c2927f40b2a0d6cd167c4f0", "score": "0.68682927", "text": "func (c *dgraphClusters) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"dgraphclusters\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "17c7298e949ee55b2c923025facc855b", "score": "0.68681324", "text": "func (c *FakeStaticIPs) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(staticipsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &ipkeeperv1.StaticIPList{})\n\treturn err\n}", "title": "" }, { "docid": "6fb1a727a031fbfccd553b1f5f95327a", "score": "0.68673915", "text": "func (c *FakeServiceAccountKeys) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(serviceaccountkeysResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.ServiceAccountKeyList{})\n\treturn err\n}", "title": "" }, { "docid": "0c8ab1f6b3dbd127cfc68a618e8b59ab", "score": "0.68607736", "text": "func (c *FakeSsmActivations) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(ssmactivationsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.SsmActivationList{})\n\treturn err\n}", "title": "" }, { "docid": "71f7e48d6a657d4f3fa9e9576687a4fd", "score": "0.6857446", "text": "func (c *FakeApigeeInstanceAttachments) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(apigeeinstanceattachmentsResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.ApigeeInstanceAttachmentList{})\n\treturn err\n}", "title": "" }, { "docid": "6084dce3c7556951089dd09a9aee5734", "score": "0.6855346", "text": "func (c *FakeRandomSecrets) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(randomsecretsResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.RandomSecretList{})\n\treturn err\n}", "title": "" }, { "docid": "368981d2d31ed938c13c85c74724d426", "score": "0.6842894", "text": "func (c *FakeEndpointSlices) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(endpointslicesResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1.EndpointSliceList{})\n\treturn err\n}", "title": "" }, { "docid": "04f29bc7eee8f286f593893012ce76c5", "score": "0.6838448", "text": "func (c *draughtsmanConfigs) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOptions.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tNamespace(c.ns).\n\t\tResource(\"draughtsmanconfigs\").\n\t\tVersionedParams(&listOptions, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(options).\n\t\tDo().\n\t\tError()\n}", "title": "" }, { "docid": "b93449bf0bae80569cd964bc54e4e5d2", "score": "0.6825864", "text": "func (c *FakeIothubDpsCertificates) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(iothubdpscertificatesResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.IothubDpsCertificateList{})\n\treturn err\n}", "title": "" }, { "docid": "67061ecfcb1ac1c4ffdddb7a16e9deb4", "score": "0.68213606", "text": "func (c *globalReports) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\tvar timeout time.Duration\n\tif listOpts.TimeoutSeconds != nil {\n\t\ttimeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second\n\t}\n\treturn c.client.Delete().\n\t\tResource(\"globalreports\").\n\t\tVersionedParams(&listOpts, scheme.ParameterCodec).\n\t\tTimeout(timeout).\n\t\tBody(&opts).\n\t\tDo(ctx).\n\t\tError()\n}", "title": "" }, { "docid": "077d83ece717036c2f1efa26496591fa", "score": "0.6816463", "text": "func (c *FakeApplies) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(appliesResource, c.ns, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.ApplyList{})\n\treturn err\n}", "title": "" }, { "docid": "3e07817b084f568a0d04c0a2c7152831", "score": "0.6815332", "text": "func (c *FakeBridgeInstances) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(bridgeinstancesResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.BridgeInstanceList{})\n\treturn err\n}", "title": "" }, { "docid": "80c6368d31482f75712e49b0de6e0f5a", "score": "0.6815069", "text": "func (c *FakeAdvancedImages) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {\n\taction := testing.NewDeleteCollectionAction(advancedimagesResource, c.ns, listOpts)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.AdvancedImageList{})\n\treturn err\n}", "title": "" }, { "docid": "3902aff2f16722508191549c3a615225", "score": "0.68144417", "text": "func (c *FakeClusterChannelProvisioners) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {\n\taction := testing.NewRootDeleteCollectionAction(clusterchannelprovisionersResource, listOptions)\n\n\t_, err := c.Fake.Invokes(action, &v1alpha1.ClusterChannelProvisionerList{})\n\treturn err\n}", "title": "" } ]
0d2a200e6206e87ca295a1a81135576a
CreateTechMesToUpdate provides a mock function with given fields: response
[ { "docid": "78a899234f95ff2e9f3309e602b0ce4f", "score": "0.8082773", "text": "func (_m *IUseCaseChat) CreateTechMesToUpdate(response models.Response) (*models.Chat, error) {\n\tret := _m.Called(response)\n\n\tvar r0 *models.Chat\n\tif rf, ok := ret.Get(0).(func(models.Response) *models.Chat); ok {\n\t\tr0 = rf(response)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*models.Chat)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(models.Response) error); ok {\n\t\tr1 = rf(response)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" } ]
[ { "docid": "e0d4014f0acdbd58dda077a0fd1f21b6", "score": "0.6111615", "text": "func (_m *ClientMock) Update(_a0 *table.Table, args db_models.DbDTO) error {\n\tret := _m.Called(_a0, args)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*table.Table, db_models.DbDTO) error); ok {\n\t\tr0 = rf(_a0, args)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "896ce9de5f15c9e99f74625a34ae4fe5", "score": "0.6017119", "text": "func (_m *IUseCaseChat) CreateChatAndTechMes(response models.Response) (*models.Chat, error) {\n\tret := _m.Called(response)\n\n\tvar r0 *models.Chat\n\tif rf, ok := ret.Get(0).(func(models.Response) *models.Chat); ok {\n\t\tr0 = rf(response)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*models.Chat)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(models.Response) error); ok {\n\t\tr1 = rf(response)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "a4a810945d9c8aff36a4aac3a6fd865f", "score": "0.59383", "text": "func (_m *mockTaskManager) Update(ctx context.Context, task *Task, props ...string) error {\n\t_va := make([]interface{}, len(props))\n\tfor _i := range props {\n\t\t_va[_i] = props[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, task)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *Task, ...string) error); ok {\n\t\tr0 = rf(ctx, task, props...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "a02295310405090ccf20cb37fd044b99", "score": "0.58288354", "text": "func (_m *ProductServiceServer) Update(_a0 context.Context, _a1 *proto.ProductRequest) (*proto.ProductResponse, error) {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 *proto.ProductResponse\n\tif rf, ok := ret.Get(0).(func(context.Context, *proto.ProductRequest) *proto.ProductResponse); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*proto.ProductResponse)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *proto.ProductRequest) error); ok {\n\t\tr1 = rf(_a0, _a1)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "c8723030c6fe0e4fc8328ac4cb85ecf2", "score": "0.57627964", "text": "func (_m *Manager) Update(ctx context.Context, r *scan.Report, cols ...string) error {\n\t_va := make([]interface{}, len(cols))\n\tfor _i := range cols {\n\t\t_va[_i] = cols[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, r)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *scan.Report, ...string) error); ok {\n\t\tr0 = rf(ctx, r, cols...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "fe21cd622a72bcbca326512bca6bdf1b", "score": "0.57570755", "text": "func (_m *ProvisionWatcherClient) Update(ctx context.Context, reqs []requests.UpdateProvisionWatcherRequest) ([]common.BaseResponse, errors.EdgeX) {\n\tret := _m.Called(ctx, reqs)\n\n\tvar r0 []common.BaseResponse\n\tif rf, ok := ret.Get(0).(func(context.Context, []requests.UpdateProvisionWatcherRequest) []common.BaseResponse); ok {\n\t\tr0 = rf(ctx, reqs)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]common.BaseResponse)\n\t\t}\n\t}\n\n\tvar r1 errors.EdgeX\n\tif rf, ok := ret.Get(1).(func(context.Context, []requests.UpdateProvisionWatcherRequest) errors.EdgeX); ok {\n\t\tr1 = rf(ctx, reqs)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(errors.EdgeX)\n\t\t}\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "7dc07c9ded70e03adb22c49de1dae829", "score": "0.5747125", "text": "func (_m *mockTestClient) Update(rq *testUpdate) (*testResponse, error) {\n\tret := _m.Called(rq)\n\n\tvar r0 *testResponse\n\tif rf, ok := ret.Get(0).(func(*testUpdate) *testResponse); ok {\n\t\tr0 = rf(rq)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*testResponse)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*testUpdate) error); ok {\n\t\tr1 = rf(rq)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "4e5795b186c3707f47b361bb6068c92a", "score": "0.5742611", "text": "func (_m *MerchantRepository) Update(_a0 *model.Merchants) (*model.Merchants, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 *model.Merchants\n\tif rf, ok := ret.Get(0).(func(*model.Merchants) *model.Merchants); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.Merchants)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*model.Merchants) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "710ac89db9fdbc9fa96514147c78530f", "score": "0.56709224", "text": "func (_m *MockStore) Update(ctx context.Context, organizationID uint, repository Repository) error {\n\tret := _m.Called(ctx, organizationID, repository)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, uint, Repository) error); ok {\n\t\tr0 = rf(ctx, organizationID, repository)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "11c3b67c2d87fec35b7f348eee3cee98", "score": "0.5668745", "text": "func (_m *TaskManager) Update(p *tasks.UpdatePayload) error {\n\tret := _m.Called(p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*tasks.UpdatePayload) error); ok {\n\t\tr0 = rf(p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "f0e7a26f191fb4a51bccf70b2eb2db5e", "score": "0.5667445", "text": "func (_m *TenantMappingRepository) Update(ctx context.Context, _a1 *model.BusinessTenantMapping) error {\n\tret := _m.Called(ctx, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *model.BusinessTenantMapping) error); ok {\n\t\tr0 = rf(ctx, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "26fb048153841f9551468e1bbd48591c", "score": "0.56528866", "text": "func (_m *ProjectUsecase) Update(ctx context.Context, ar *domain.Project) error {\n\tret := _m.Called(ctx, ar)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *domain.Project) error); ok {\n\t\tr0 = rf(ctx, ar)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "1d0dea0279ffdfa13bbb6adb25a55c71", "score": "0.5650865", "text": "func (m *MockGeneralRepository) UpdateFields(arg0, arg1 string, arg2 interface{}, arg3 ...string) error {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1, arg2}\n\tfor _, a := range arg3 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"UpdateFields\", varargs...)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "dc7ea94780e87ccb2bbf79024460634b", "score": "0.5647081", "text": "func (m *MockOrmer) Update(arg0 interface{}, arg1 ...string) (int64, error) {\n\tvarargs := []interface{}{arg0}\n\tfor _, a := range arg1 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"Update\", varargs...)\n\tret0, _ := ret[0].(int64)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "8a393a41675248f01bb0ea3089620595", "score": "0.5638414", "text": "func (m *MockVirtualMachinesClientAPI) Update(ctx context.Context, resourceGroupName, VMName string, parameters compute.VirtualMachineUpdate) (compute.VirtualMachinesUpdateFuture, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", ctx, resourceGroupName, VMName, parameters)\n\tret0, _ := ret[0].(compute.VirtualMachinesUpdateFuture)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "38438a6097ca208b4d32bd2e54bf86b0", "score": "0.5631809", "text": "func (_m *ClientMock) SafeUpdate(_a0 *table.Table, args db_models.DbDTO) (bool, error) {\n\tret := _m.Called(_a0, args)\n\n\tvar r0 bool\n\tif rf, ok := ret.Get(0).(func(*table.Table, db_models.DbDTO) bool); ok {\n\t\tr0 = rf(_a0, args)\n\t} else {\n\t\tr0 = ret.Get(0).(bool)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*table.Table, db_models.DbDTO) error); ok {\n\t\tr1 = rf(_a0, args)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "6a8ec6d09c59b342ccbdbe6e3953ffa1", "score": "0.5616679", "text": "func (_m *Service) Update(toUpdate *models.City) error {\n\tret := _m.Called(toUpdate)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*models.City) error); ok {\n\t\tr0 = rf(toUpdate)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "35b2c24ac78b4b500768a4b1125bf4f8", "score": "0.5606185", "text": "func TestUpdateDeviceTwinFields(t *testing.T) {\n\t// ormerMock is mocked Ormer implementation\n\tvar ormerMock *beego.MockOrmer\n\t// querySeterMock is mocked QuerySeter implementation\n\tvar querySeterMock *beego.MockQuerySeter\n\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tormerMock = beego.NewMockOrmer(mockCtrl)\n\tquerySeterMock = beego.NewMockQuerySeter(mockCtrl)\n\tdbm.DBAccess = ormerMock\n\n\tcases := []struct {\n\t\t// name is name of the testcase\n\t\tname string\n\t\t// filterReturn is the return of mock interface querySeterMock's filter function\n\t\tfilterReturn orm.QuerySeter\n\t\t// updateReturnInt is the first return of mock interface querySeterMock's update function\n\t\tupdateReturnInt int64\n\t\t// updateReturnErr is the second return of mock interface querySeterMocks's update function also expected error\n\t\tupdateReturnErr error\n\t\t// queryTableReturn is the return of mock interface ormerMock's QueryTable function\n\t\tqueryTableReturn orm.QuerySeter\n\t}{{\n\t\t// Success Case\n\t\tname: \"SuccessCase\",\n\t\tfilterReturn: querySeterMock,\n\t\tupdateReturnInt: int64(1),\n\t\tupdateReturnErr: nil,\n\t\tqueryTableReturn: querySeterMock,\n\t}, {\n\t\t// Failure Case\n\t\tname: \"FailureCase\",\n\t\tfilterReturn: querySeterMock,\n\t\tupdateReturnInt: int64(0),\n\t\tupdateReturnErr: errFailedDBOperation,\n\t\tqueryTableReturn: querySeterMock,\n\t},\n\t}\n\n\t// run the test cases\n\tfor _, test := range cases {\n\t\tt.Run(test.name, func(t *testing.T) {\n\t\t\tquerySeterMock.EXPECT().Filter(gomock.Any(), gomock.Any()).Return(test.filterReturn).Times(2)\n\t\t\tquerySeterMock.EXPECT().Update(gomock.Any()).Return(test.updateReturnInt, test.updateReturnErr).Times(1)\n\t\t\tormerMock.EXPECT().QueryTable(gomock.Any()).Return(test.queryTableReturn).Times(1)\n\t\t\terr := UpdateDeviceTwinFields(\"test\", \"test\", make(map[string]interface{}))\n\t\t\tif test.updateReturnErr != err {\n\t\t\t\tt.Errorf(\"UpdateDeviceTwinFields Case failed: wanted error %v and got error %v\", test.updateReturnErr, err)\n\t\t\t}\n\t\t})\n\t}\n}", "title": "" }, { "docid": "24d6dfd8d3f5a5fc017b8ced4309f2d1", "score": "0.5580331", "text": "func (_m *Service) Update(_a0 *admin.Domain) (*admin.Domain, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 *admin.Domain\n\tif rf, ok := ret.Get(0).(func(*admin.Domain) *admin.Domain); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*admin.Domain)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*admin.Domain) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "a747fe17b864604e8a46699edf3deb54", "score": "0.55782247", "text": "func (_m *Manager) UpdateReportData(ctx context.Context, uuid string, _a2 string) error {\n\tret := _m.Called(ctx, uuid, _a2)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {\n\t\tr0 = rf(ctx, uuid, _a2)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "6571b54d893635db53a53aa50cc369d9", "score": "0.5568844", "text": "func (_m *Repository) Update(ctx context.Context, item *model.SystemAuth) error {\n\tret := _m.Called(ctx, item)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *model.SystemAuth) error); ok {\n\t\tr0 = rf(ctx, item)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "ecabf1ca86d101d8f03a4bc4b452211f", "score": "0.55484223", "text": "func (_m *DAO) Update(ctx context.Context, r *model.Robot, props ...string) error {\n\t_va := make([]interface{}, len(props))\n\tfor _i := range props {\n\t\t_va[_i] = props[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, r)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *model.Robot, ...string) error); ok {\n\t\tr0 = rf(ctx, r, props...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "d6d782e47029ea5349743b855e63480f", "score": "0.55475307", "text": "func (_m *SystemAuthService) Update(ctx context.Context, item *model.SystemAuth) error {\n\tret := _m.Called(ctx, item)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *model.SystemAuth) error); ok {\n\t\tr0 = rf(ctx, item)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "e56413aa6a65bdb1716691ffc1e98253", "score": "0.55213726", "text": "func (m *MockFeiraApp) Update(ctx context.Context, id string, feira model.FeiraRequest) (*model.Feira, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", ctx, id, feira)\n\tret0, _ := ret[0].(*model.Feira)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "3e1874135c0cad5bcc45d3fffaf9d290", "score": "0.5520671", "text": "func (_m *TriggerI) Update(_a0 string, _a1 uuid.UUID, _a2 map[string]string) error {\n\tret := _m.Called(_a0, _a1, _a2)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uuid.UUID, map[string]string) error); ok {\n\t\tr0 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "a0e8edaa4191c4a1da8b66847f1c57d9", "score": "0.5502873", "text": "func (m *MockProduct) Update(arg0 string, arg1 interface{}) error {\n\tret := m.ctrl.Call(m, \"Update\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "ff04c551eaeac045ab65ef078239977f", "score": "0.5495314", "text": "func (_m *MockUserRepository) UpdateByID(id int64, confirmationToken []byte, createdAt *time.Time, createdBy nilt.Int64, firstName nilt.String, isActive nilt.Bool, isConfirmed nilt.Bool, isStaff nilt.Bool, isSuperuser nilt.Bool, lastLoginAt *time.Time, lastName nilt.String, password []byte, updatedAt *time.Time, updatedBy nilt.Int64, username nilt.String) (*userEntity, error) {\n\tret := _m.Called(id, confirmationToken, createdAt, createdBy, firstName, isActive, isConfirmed, isStaff, isSuperuser, lastLoginAt, lastName, password, updatedAt, updatedBy, username)\n\n\tvar r0 *userEntity\n\tif rf, ok := ret.Get(0).(func(int64, []byte, *time.Time, nilt.Int64, nilt.String, nilt.Bool, nilt.Bool, nilt.Bool, nilt.Bool, *time.Time, nilt.String, []byte, *time.Time, nilt.Int64, nilt.String) *userEntity); ok {\n\t\tr0 = rf(id, confirmationToken, createdAt, createdBy, firstName, isActive, isConfirmed, isStaff, isSuperuser, lastLoginAt, lastName, password, updatedAt, updatedBy, username)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*userEntity)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(int64, []byte, *time.Time, nilt.Int64, nilt.String, nilt.Bool, nilt.Bool, nilt.Bool, nilt.Bool, *time.Time, nilt.String, []byte, *time.Time, nilt.Int64, nilt.String) error); ok {\n\t\tr1 = rf(id, confirmationToken, createdAt, createdBy, firstName, isActive, isConfirmed, isStaff, isSuperuser, lastLoginAt, lastName, password, updatedAt, updatedBy, username)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "802396bf06844e1b53edce652856fda4", "score": "0.5484425", "text": "func TestUpdateMentorRegistrationService(t *testing.T) {\n\tSetupTestDB(t)\n\n\tupdated_registration := getBaseMentorRegistration()\n\tupdated_registration.Data[\"id\"] = \"testid\"\n\tupdated_registration.Data[\"firstName\"] = \"first2\"\n\tupdated_registration.Data[\"lastName\"] = \"last2\"\n\terr := service.UpdateMentorRegistration(\"testid\", updated_registration)\n\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tmentor_registration, err := service.GetMentorRegistration(\"testid\")\n\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\texpected_registration := getBaseMentorRegistration()\n\texpected_registration.Data[\"id\"] = \"testid\"\n\texpected_registration.Data[\"firstName\"] = \"first2\"\n\texpected_registration.Data[\"lastName\"] = \"last2\"\n\n\tif !reflect.DeepEqual(mentor_registration.Data[\"firstName\"], expected_registration.Data[\"firstName\"]) {\n\t\tt.Errorf(\"Wrong mentor info.\\nExpected %v\\ngot %v\\n\", expected_registration.Data[\"firstName\"], mentor_registration.Data[\"firstName\"])\n\t}\n\n\tCleanupTestDB(t)\n}", "title": "" }, { "docid": "855ee7f161a0e76f9b8dbc9e44e7fe9f", "score": "0.5483382", "text": "func (m *MockSdkOperations) Update(o runtime.Object) error {\n\tret := m.ctrl.Call(m, \"Update\", o)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "8a70605b235ab3833bf5340eb90290af", "score": "0.5483289", "text": "func (_m *ServiceDefinitionService) Update(application string, serviceDef *model.ServiceDefinition) (model.ServiceDefinition, apperrors.AppError) {\n\tret := _m.Called(application, serviceDef)\n\n\tvar r0 model.ServiceDefinition\n\tif rf, ok := ret.Get(0).(func(string, *model.ServiceDefinition) model.ServiceDefinition); ok {\n\t\tr0 = rf(application, serviceDef)\n\t} else {\n\t\tr0 = ret.Get(0).(model.ServiceDefinition)\n\t}\n\n\tvar r1 apperrors.AppError\n\tif rf, ok := ret.Get(1).(func(string, *model.ServiceDefinition) apperrors.AppError); ok {\n\t\tr1 = rf(application, serviceDef)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(apperrors.AppError)\n\t\t}\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "99b77fc45857fd86403fbc34e28d5427", "score": "0.54832417", "text": "func (m *MockServerClient) Update(arg0 context.Context, arg1 *hcloud.Server, arg2 hcloud.ServerUpdateOpts) (*hcloud.Server, *hcloud.Response, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(*hcloud.Server)\n\tret1, _ := ret[1].(*hcloud.Response)\n\tret2, _ := ret[2].(error)\n\treturn ret0, ret1, ret2\n}", "title": "" }, { "docid": "3d90d86ee4c3454c1d16b0d3c0d3a875", "score": "0.5480012", "text": "func (_m *ApplicationTemplateVersionRepository) Update(ctx context.Context, _a1 model.ApplicationTemplateVersion) error {\n\tret := _m.Called(ctx, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, model.ApplicationTemplateVersion) error); ok {\n\t\tr0 = rf(ctx, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "8ff43850160a2f85ac7d576fd4c2ddd7", "score": "0.5471882", "text": "func TestUpdateDeviceTwinField(t *testing.T) {\n\t// ormerMock is mocked Ormer implementation\n\tvar ormerMock *beego.MockOrmer\n\t// querySeterMock is mocked QuerySeter implementation\n\tvar querySeterMock *beego.MockQuerySeter\n\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tormerMock = beego.NewMockOrmer(mockCtrl)\n\tquerySeterMock = beego.NewMockQuerySeter(mockCtrl)\n\tdbm.DBAccess = ormerMock\n\n\tcases := []struct {\n\t\t// name is name of the testcase\n\t\tname string\n\t\t// filterReturn is the return of mock interface querySeterMock's filter function\n\t\tfilterReturn orm.QuerySeter\n\t\t// updateReturnInt is the first return of mock interface querySeterMock's update function\n\t\tupdateReturnInt int64\n\t\t// updateReturnErr is the second return of mock interface querySeterMocks's update function also expected error\n\t\tupdateReturnErr error\n\t\t// queryTableReturn is the return of mock interface ormerMock's QueryTable function\n\t\tqueryTableReturn orm.QuerySeter\n\t}{{\n\t\t// Success Case\n\t\tname: \"SuccessCase\",\n\t\tfilterReturn: querySeterMock,\n\t\tupdateReturnInt: int64(1),\n\t\tupdateReturnErr: nil,\n\t\tqueryTableReturn: querySeterMock,\n\t}, {\n\t\t// Failure Case\n\t\tname: \"FailureCase\",\n\t\tfilterReturn: querySeterMock,\n\t\tupdateReturnInt: int64(0),\n\t\tupdateReturnErr: errFailedDBOperation,\n\t\tqueryTableReturn: querySeterMock,\n\t},\n\t}\n\n\t// run the test cases\n\tfor _, test := range cases {\n\t\tt.Run(test.name, func(t *testing.T) {\n\t\t\tquerySeterMock.EXPECT().Filter(gomock.Any(), gomock.Any()).Return(test.filterReturn).Times(2)\n\t\t\tquerySeterMock.EXPECT().Update(gomock.Any()).Return(test.updateReturnInt, test.updateReturnErr).Times(1)\n\t\t\tormerMock.EXPECT().QueryTable(gomock.Any()).Return(test.queryTableReturn).Times(1)\n\t\t\terr := UpdateDeviceTwinField(\"test\", \"test\", \"test\", \"test\")\n\t\t\tif test.updateReturnErr != err {\n\t\t\t\tt.Errorf(\"UpdateDeviceTwinField Case failed: wanted error %v and got error %v\", test.updateReturnErr, err)\n\t\t\t}\n\t\t})\n\t}\n}", "title": "" }, { "docid": "485ccb3da8a45a4a628140ca44081d55", "score": "0.54672295", "text": "func (_m *File) Update(ctx context.Context, request model.UpdateFileRequest) (string, error) {\n\tret := _m.Called(ctx, request)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(context.Context, model.UpdateFileRequest) string); ok {\n\t\tr0 = rf(ctx, request)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, model.UpdateFileRequest) error); ok {\n\t\tr1 = rf(ctx, request)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "8378290c124bff339d15d071ef880ab3", "score": "0.5461251", "text": "func (m *MockGormItf) Update(attrs ...interface{}) GormItf {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{}\n\tfor _, a := range attrs {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"Update\", varargs...)\n\tret0, _ := ret[0].(GormItf)\n\treturn ret0\n}", "title": "" }, { "docid": "e6329e6073ca837f6f3eb702f2d2165b", "score": "0.54563296", "text": "func (_m *Usecase) Update(ctx context.Context, _a1 *models.Host) (*models.Host, error) {\n\tret := _m.Called(ctx, _a1)\n\n\tvar r0 *models.Host\n\tif rf, ok := ret.Get(0).(func(context.Context, *models.Host) *models.Host); ok {\n\t\tr0 = rf(ctx, _a1)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*models.Host)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *models.Host) error); ok {\n\t\tr1 = rf(ctx, _a1)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "ecf725925f7f04a0b82239ad1961121c", "score": "0.54536986", "text": "func (_m *Users) Update(user *domain.User) (*domain.User, error) {\n\tret := _m.Called(user)\n\n\tvar r0 *domain.User\n\tif rf, ok := ret.Get(0).(func(*domain.User) *domain.User); ok {\n\t\tr0 = rf(user)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*domain.User)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*domain.User) error); ok {\n\t\tr1 = rf(user)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "31986e19b7b9b0869181b7f5b21d13eb", "score": "0.5446776", "text": "func (_m *Complain) Update(ctx context.Context, p model.ComplainIn, id string) error {\n\tret := _m.Called(ctx, p, id)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, model.ComplainIn, string) error); ok {\n\t\tr0 = rf(ctx, p, id)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "4a3de9b2507a415410e39945fad94795", "score": "0.54324615", "text": "func (_m *VendorStore) Update(_a0 *model.Vendor, _a1 sqlx.Execer) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*model.Vendor, sqlx.Execer) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "c7a7b2f1e0cba8277ef1d76284f54bee", "score": "0.54304034", "text": "func (_m *KarmaProvider) Update(guildID string, userID string, executorID string, value int) error {\n\tret := _m.Called(guildID, userID, executorID, value)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, string, string, int) error); ok {\n\t\tr0 = rf(guildID, userID, executorID, value)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "21854c7cc1dd0762073c1f2b439bc5d6", "score": "0.542468", "text": "func (_m *IUserRepo) Update(form *model.UserForm, id int) (*model.User, error) {\n\tret := _m.Called(form, id)\n\n\tvar r0 *model.User\n\tif rf, ok := ret.Get(0).(func(*model.UserForm, int) *model.User); ok {\n\t\tr0 = rf(form, id)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.User)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*model.UserForm, int) error); ok {\n\t\tr1 = rf(form, id)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "9cee4d980cfcb7bcc84ecf3d39593ead", "score": "0.54234", "text": "func (m *MockRegistry) Update(arg0 *did.Doc, arg1 ...vdr.DIDMethodOption) error {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0}\n\tfor _, a := range arg1 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"Update\", varargs...)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "dac12c5f1f695e14f313a7b65f243aa7", "score": "0.54168457", "text": "func (_m *DBHandler) UpdateFileInfo(ctx context.Context, fileID primitive.ObjectID, updateRequest map[string]interface{}) error {\n\tret := _m.Called(ctx, fileID, updateRequest)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, primitive.ObjectID, map[string]interface{}) error); ok {\n\t\tr0 = rf(ctx, fileID, updateRequest)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "a270e94bc34077e9cbe5fc4c3b9c0f1d", "score": "0.5408623", "text": "func (_m *PostService) Update(UUID string, p model.UpdatePostInput) error {\n\tret := _m.Called(UUID, p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, model.UpdatePostInput) error); ok {\n\t\tr0 = rf(UUID, p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "d9f9c97d67e6c9a40916faa34b5268d3", "score": "0.53938997", "text": "func (_m *API) UpdateServiceObj(_a0 service.Service) (*service.ServiceDetails, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 *service.ServiceDetails\n\tif rf, ok := ret.Get(0).(func(service.Service) *service.ServiceDetails); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*service.ServiceDetails)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(service.Service) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n\n}", "title": "" }, { "docid": "8583de07223cae149f6199a5670cd5e4", "score": "0.5393084", "text": "func (m *MockService) Update(ctx context.Context, id int, f *card.Form) (*card.Card, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", ctx, id, f)\n\tret0, _ := ret[0].(*card.Card)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "574553b1f8b19e14c470136653d4b577", "score": "0.5392373", "text": "func (_m *mockTaskManager) UpdateExtraAttrs(ctx context.Context, id int64, extraAttrs map[string]interface{}) error {\n\tret := _m.Called(ctx, id, extraAttrs)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, map[string]interface{}) error); ok {\n\t\tr0 = rf(ctx, id, extraAttrs)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "efe0a731453c052ea133098b7874ff3e", "score": "0.5371992", "text": "func TestUpdateDeviceTwinMulti(t *testing.T) {\n\t// ormerMock is mocked Ormer implementation\n\tvar ormerMock *beego.MockOrmer\n\t// querySeterMock is mocked QuerySeter implementation\n\tvar querySeterMock *beego.MockQuerySeter\n\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tormerMock = beego.NewMockOrmer(mockCtrl)\n\tquerySeterMock = beego.NewMockQuerySeter(mockCtrl)\n\tdbm.DBAccess = ormerMock\n\n\tcases := []struct {\n\t\t// name is name of the testcase\n\t\tname string\n\t\t// filterReturn is the return of mock interface querySeterMock's filter function\n\t\tfilterReturn orm.QuerySeter\n\t\t// updateReturnInt is the first return of mock interface querySeterMock's update function\n\t\tupdateReturnInt int64\n\t\t// updateReturnErr is the second return of mock interface querySeterMocks's update function also expected error\n\t\tupdateReturnErr error\n\t\t// queryTableReturn is the return of mock interface ormerMock's QueryTable function\n\t\tqueryTableReturn orm.QuerySeter\n\t}{{\n\t\t// Success Case\n\t\tname: \"SuccessCase\",\n\t\tfilterReturn: querySeterMock,\n\t\tupdateReturnInt: int64(1),\n\t\tupdateReturnErr: nil,\n\t\tqueryTableReturn: querySeterMock,\n\t}, {\n\t\t// Failure Case\n\t\tname: \"FailureCase\",\n\t\tfilterReturn: querySeterMock,\n\t\tupdateReturnInt: int64(0),\n\t\tupdateReturnErr: errFailedDBOperation,\n\t\tqueryTableReturn: querySeterMock,\n\t},\n\t}\n\n\t// updateDevice is argument to UpdateDeviceTwinMulti function\n\tupdateDevice := make([]DeviceTwinUpdate, 0)\n\tupdateDevice = append(updateDevice, DeviceTwinUpdate{DeviceID: \"test\"})\n\n\t// run the test cases\n\tfor _, test := range cases {\n\t\tt.Run(test.name, func(t *testing.T) {\n\t\t\tquerySeterMock.EXPECT().Filter(gomock.Any(), gomock.Any()).Return(test.filterReturn).Times(2)\n\t\t\tquerySeterMock.EXPECT().Update(gomock.Any()).Return(test.updateReturnInt, test.updateReturnErr).Times(1)\n\t\t\tormerMock.EXPECT().QueryTable(gomock.Any()).Return(test.queryTableReturn).Times(1)\n\t\t\terr := UpdateDeviceTwinMulti(updateDevice)\n\t\t\tif test.updateReturnErr != err {\n\t\t\t\tt.Errorf(\"UpdateDeviceTwinMulti Case failed: wanted error %v and got error %v\", test.updateReturnErr, err)\n\t\t\t}\n\t\t})\n\t}\n}", "title": "" }, { "docid": "4420ca66e72ca6981de0862e51610dec", "score": "0.53677154", "text": "func (m *MockTaskRepository) Update(id string, summary *string, date *time.Time) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", id, summary, date)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "a420cbe5727d692bc2162647732c7935", "score": "0.53624064", "text": "func (m *ManagerMock) Update(ctx context.Context, ak *hub.APIKey) error {\n\targs := m.Called(ctx, ak)\n\treturn args.Error(0)\n}", "title": "" }, { "docid": "fd8be04ae1cbc8d718053b581bad48e6", "score": "0.5360119", "text": "func (_m *API) UpdateService(_a0 io.Reader) (*service.ServiceDetails, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 *service.ServiceDetails\n\tif rf, ok := ret.Get(0).(func(io.Reader) *service.ServiceDetails); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*service.ServiceDetails)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(io.Reader) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "aec8e9d1947fa492e351e38a8f377b67", "score": "0.5358294", "text": "func (m *MockVirtualMeshClient) UpdateVirtualMesh(arg0 context.Context, arg1 *v1alpha1.VirtualMesh, arg2 ...client.UpdateOption) error {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"UpdateVirtualMesh\", varargs...)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "cf79ae38500d427d1471f136b12eba2c", "score": "0.5350411", "text": "func (_m *Repository) Update(id int32, _a1 products.Product) (*products.Product, error) {\n\tret := _m.Called(id, _a1)\n\n\tvar r0 *products.Product\n\tif rf, ok := ret.Get(0).(func(int32, products.Product) *products.Product); ok {\n\t\tr0 = rf(id, _a1)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*products.Product)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(int32, products.Product) error); ok {\n\t\tr1 = rf(id, _a1)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "7a4ccd8742b9769e0da1308df383338d", "score": "0.53224504", "text": "func (_m *Service) Update(bank domain.Bank) (*domain.Bank, error) {\n\tret := _m.Called(bank)\n\n\tvar r0 *domain.Bank\n\tif rf, ok := ret.Get(0).(func(domain.Bank) *domain.Bank); ok {\n\t\tr0 = rf(bank)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*domain.Bank)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(domain.Bank) error); ok {\n\t\tr1 = rf(bank)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "64f9bcb4c2a2fc1f509c61220cedd8e4", "score": "0.53163195", "text": "func (_m *IcecreamDataAccessor) TxUpdate(_a0 context.Context, _a1 *pg.Tx, _a2 *bo.Icecream) (*bo.Icecream, *pg.Tx, error) {\n\tret := _m.Called(_a0, _a1, _a2)\n\n\tvar r0 *bo.Icecream\n\tif rf, ok := ret.Get(0).(func(context.Context, *pg.Tx, *bo.Icecream) *bo.Icecream); ok {\n\t\tr0 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*bo.Icecream)\n\t\t}\n\t}\n\n\tvar r1 *pg.Tx\n\tif rf, ok := ret.Get(1).(func(context.Context, *pg.Tx, *bo.Icecream) *pg.Tx); ok {\n\t\tr1 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(*pg.Tx)\n\t\t}\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(context.Context, *pg.Tx, *bo.Icecream) error); ok {\n\t\tr2 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "title": "" }, { "docid": "b465f0d6a1cd53dae7e6a48161eabbf6", "score": "0.52960795", "text": "func (m *MockGormItf) Updates(values interface{}, ignoreProtectedAttrs ...bool) GormItf {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{values}\n\tfor _, a := range ignoreProtectedAttrs {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"Updates\", varargs...)\n\tret0, _ := ret[0].(GormItf)\n\treturn ret0\n}", "title": "" }, { "docid": "2baff3095152e35d7979ca9c561b7863", "score": "0.52889216", "text": "func (_m *Repository) UpdateMeta(ctx context.Context, cafeID int, meta []byte) error {\n\tret := _m.Called(ctx, cafeID, meta)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, int, []byte) error); ok {\n\t\tr0 = rf(ctx, cafeID, meta)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "903c27bb3e14260822fd5302878ab679", "score": "0.5287959", "text": "func (_m *GatewayAPIClient) UpdateReceivedOCMShare(ctx context.Context, in *ocmv1beta1.UpdateReceivedOCMShareRequest, opts ...grpc.CallOption) (*ocmv1beta1.UpdateReceivedOCMShareResponse, error) {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, in)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 *ocmv1beta1.UpdateReceivedOCMShareResponse\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *ocmv1beta1.UpdateReceivedOCMShareRequest, ...grpc.CallOption) (*ocmv1beta1.UpdateReceivedOCMShareResponse, error)); ok {\n\t\treturn rf(ctx, in, opts...)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, *ocmv1beta1.UpdateReceivedOCMShareRequest, ...grpc.CallOption) *ocmv1beta1.UpdateReceivedOCMShareResponse); ok {\n\t\tr0 = rf(ctx, in, opts...)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*ocmv1beta1.UpdateReceivedOCMShareResponse)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, *ocmv1beta1.UpdateReceivedOCMShareRequest, ...grpc.CallOption) error); ok {\n\t\tr1 = rf(ctx, in, opts...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "767a3f78b514a718a5eceeb7e023f9ac", "score": "0.52840054", "text": "func (_m *MockTodoGroupsServiceServer) UpdateTodoGroup(_a0 context.Context, _a1 *UpdateTodoGroupRequest) (*UpdateTodoGroupResponse, error) {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 *UpdateTodoGroupResponse\n\tif rf, ok := ret.Get(0).(func(context.Context, *UpdateTodoGroupRequest) *UpdateTodoGroupResponse); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*UpdateTodoGroupResponse)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *UpdateTodoGroupRequest) error); ok {\n\t\tr1 = rf(_a0, _a1)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "b37d52a9ac9bc50b8659240e8f6e9481", "score": "0.5283439", "text": "func (_m *Repository) Update(ctx context.Context, event *events.Domain) (int, error) {\n\tret := _m.Called(ctx, event)\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func(context.Context, *events.Domain) int); ok {\n\t\tr0 = rf(ctx, event)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *events.Domain) error); ok {\n\t\tr1 = rf(ctx, event)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "f27d9d3c533f3100fddda3bc6c113ecb", "score": "0.5275301", "text": "func (m *MockembeddedClient) Update(ctx context.Context, namespace string, obj, result runtime.Object, opts v1.UpdateOptions) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", ctx, namespace, obj, result, opts)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "1b5fd1f882de25d649379c62938a2efd", "score": "0.5273517", "text": "func (_m *MockService) Update(book *Book) (*Book, error) {\n\tret := _m.Called(book)\n\n\tvar r0 *Book\n\tif rf, ok := ret.Get(0).(func(*Book) *Book); ok {\n\t\tr0 = rf(book)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*Book)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*Book) error); ok {\n\t\tr1 = rf(book)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "9be21f71e6bf7c4cc1f63a4acbfdea8e", "score": "0.5269219", "text": "func (_m *IntervalActionClient) Update(ctx context.Context, reqs []requests.UpdateIntervalActionRequest) ([]common.BaseResponse, errors.EdgeX) {\n\tret := _m.Called(ctx, reqs)\n\n\tvar r0 []common.BaseResponse\n\tif rf, ok := ret.Get(0).(func(context.Context, []requests.UpdateIntervalActionRequest) []common.BaseResponse); ok {\n\t\tr0 = rf(ctx, reqs)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]common.BaseResponse)\n\t\t}\n\t}\n\n\tvar r1 errors.EdgeX\n\tif rf, ok := ret.Get(1).(func(context.Context, []requests.UpdateIntervalActionRequest) errors.EdgeX); ok {\n\t\tr1 = rf(ctx, reqs)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(errors.EdgeX)\n\t\t}\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "a197a16e8005ebee491b746ba8646406", "score": "0.52580595", "text": "func (_m *GatewayAPIClient) UpdateReceivedShare(ctx context.Context, in *collaborationv1beta1.UpdateReceivedShareRequest, opts ...grpc.CallOption) (*collaborationv1beta1.UpdateReceivedShareResponse, error) {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, in)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 *collaborationv1beta1.UpdateReceivedShareResponse\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *collaborationv1beta1.UpdateReceivedShareRequest, ...grpc.CallOption) (*collaborationv1beta1.UpdateReceivedShareResponse, error)); ok {\n\t\treturn rf(ctx, in, opts...)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, *collaborationv1beta1.UpdateReceivedShareRequest, ...grpc.CallOption) *collaborationv1beta1.UpdateReceivedShareResponse); ok {\n\t\tr0 = rf(ctx, in, opts...)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*collaborationv1beta1.UpdateReceivedShareResponse)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, *collaborationv1beta1.UpdateReceivedShareRequest, ...grpc.CallOption) error); ok {\n\t\tr1 = rf(ctx, in, opts...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "5253b916ab854c317c5e70fd7d7aa753", "score": "0.52361846", "text": "func (_m *AttendanceRepository) UpdateCount(ctx context.Context, id string, count domain.Count) error {\n\tret := _m.Called(ctx, id, count)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, domain.Count) error); ok {\n\t\tr0 = rf(ctx, id, count)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "2e091f766ddf63c3f414dfadd9fc220d", "score": "0.52311563", "text": "func (m *MockWriter) Update(e *entity.Template) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", e)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "ec707c44e4097309702dfb5246ce31ce", "score": "0.52310896", "text": "func (_m *ExchangeRateUsecase) Update(ctx context.Context, payload *domain.ExchangeRate) error {\n\tret := _m.Called(ctx, payload)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *domain.ExchangeRate) error); ok {\n\t\tr0 = rf(ctx, payload)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "a236eca2f60aa0f63100b930849dcf7c", "score": "0.5216399", "text": "func (m *MockUserHandler) UpdateUserInfo(arg0 context.Context, arg1 *proto.Request, arg2 *proto.Response) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"UpdateUserInfo\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "691583c99255aeb60ec0c0af81e4dc4f", "score": "0.521055", "text": "func (m *MockUserService) UpdateUserInfo(ctx context.Context, in *proto.Request, opts ...client.CallOption) (*proto.Response, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"UpdateUserInfo\", varargs...)\n\tret0, _ := ret[0].(*proto.Response)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "b6fddd4e7cd0a5dff661bf61635ba453", "score": "0.5196417", "text": "func (m *MockAPI) Update(ctx context.Context, vehicle *entity.Vehicle) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", ctx, vehicle)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "a3dc6508b343e74862bea83d09eb572a", "score": "0.51905686", "text": "func (_m *Repository) UpdateModelDeployment(ctx context.Context, tx *sql.Tx, md *apisdeployment.ModelDeployment) error {\n\tret := _m.Called(ctx, tx, md)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *sql.Tx, *apisdeployment.ModelDeployment) error); ok {\n\t\tr0 = rf(ctx, tx, md)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "dd6ec083773b5a4e3a273fcabc55547b", "score": "0.51899743", "text": "func (m *MockOpenStorageVolumeServer) Update(arg0 context.Context, arg1 *api.SdkVolumeUpdateRequest) (*api.SdkVolumeUpdateResponse, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", arg0, arg1)\n\tret0, _ := ret[0].(*api.SdkVolumeUpdateResponse)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "5142f1f24041caee1bcaa24b5969b472", "score": "0.518947", "text": "func (m *MockIRoomRepository) UpdateRoom(arg0 string, arg1 models.RoomModel) (models.RoomModel, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"UpdateRoom\", arg0, arg1)\n\tret0, _ := ret[0].(models.RoomModel)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "ef67222c7f751d66dfaa0287f6fed3ae", "score": "0.5187732", "text": "func (_m *Company) UpdateName(ctx context.Context, req request.UpdateCompanyNameRequest) (string, error) {\n\tret := _m.Called(ctx, req)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(context.Context, request.UpdateCompanyNameRequest) string); ok {\n\t\tr0 = rf(ctx, req)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, request.UpdateCompanyNameRequest) error); ok {\n\t\tr1 = rf(ctx, req)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "f935ef6e5156d874a17624aa95c7b84b", "score": "0.5187694", "text": "func (_m *UserRepository) Update(user *domain.User) (bool, error) {\n\tret := _m.Called(user)\n\n\tvar r0 bool\n\tif rf, ok := ret.Get(0).(func(*domain.User) bool); ok {\n\t\tr0 = rf(user)\n\t} else {\n\t\tr0 = ret.Get(0).(bool)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*domain.User) error); ok {\n\t\tr1 = rf(user)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "55efc9efc480ccca76e3a8460a971abb", "score": "0.51847875", "text": "func (_m *MTOShipmentUpdater) UpdateMTOShipmentOffice(ctx context.Context, mtoShipment *models.MTOShipment, eTag string) (*models.MTOShipment, error) {\n\tret := _m.Called(ctx, mtoShipment, eTag)\n\n\tvar r0 *models.MTOShipment\n\tif rf, ok := ret.Get(0).(func(context.Context, *models.MTOShipment, string) *models.MTOShipment); ok {\n\t\tr0 = rf(ctx, mtoShipment, eTag)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*models.MTOShipment)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *models.MTOShipment, string) error); ok {\n\t\tr1 = rf(ctx, mtoShipment, eTag)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "619ef591bd57002468e35ac14d79b9ea", "score": "0.51708734", "text": "func (m *MockVirtualMeshClient) UpsertVirtualMeshSpec(arg0 context.Context, arg1 *v1alpha1.VirtualMesh, arg2 ...client.UpdateOption) error {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"UpsertVirtualMeshSpec\", varargs...)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "0cce43501e1ce08ee37fddb515e78b29", "score": "0.5167374", "text": "func (m *MockBookKeeping) Update(ctx context.Context, requestId, status, failureReason, failureAction string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", ctx, requestId, status, failureReason, failureAction)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "1ff296827be8a96136d6e5f98085638d", "score": "0.51623213", "text": "func (_m *Repository) Update(ctx context.Context, data *recipes.Domain) (recipes.Domain, error) {\n\tret := _m.Called(ctx, data)\n\n\tvar r0 recipes.Domain\n\tif rf, ok := ret.Get(0).(func(context.Context, *recipes.Domain) recipes.Domain); ok {\n\t\tr0 = rf(ctx, data)\n\t} else {\n\t\tr0 = ret.Get(0).(recipes.Domain)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *recipes.Domain) error); ok {\n\t\tr1 = rf(ctx, data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "4601a3767e4e52c42c32ccaf26352bdf", "score": "0.5155273", "text": "func (m *MockArticleRPC) UpdateNewArts(c context.Context, arg *model.ArgIP) error {\n\tret := m.ctrl.Call(m, \"UpdateNewArts\", c, arg)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "81e8705368e86c5baad442536fab5ed6", "score": "0.5154598", "text": "func (_m *Repository) Update(ctx context.Context, newApplePass models.ApplePassDB) error {\n\tret := _m.Called(ctx, newApplePass)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, models.ApplePassDB) error); ok {\n\t\tr0 = rf(ctx, newApplePass)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "60f632cc689582467e97beab69348663", "score": "0.5154164", "text": "func (m *MockActionsAPI) Update(id string, a *management.Action) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", id, a)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "353fec20341ff529512a6e892feb2a2a", "score": "0.51426494", "text": "func (m *MockRepository) Update(e *entity.Template) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", e)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "3c28b07c925dcfc1bbffb27040e12fa5", "score": "0.5137043", "text": "func (m *MockIPostsRepository) Update(post *models.Post, fields []primitive.E) (*models.Post, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", post, fields)\n\tret0, _ := ret[0].(*models.Post)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "60615a7c70f8b4fdcdb234d665daf267", "score": "0.5135972", "text": "func (_m *MTOShipmentStatusUpdater) UpdateMTOShipmentStatus(payload mto_shipment.PatchMTOShipmentStatusParams) (*models.MTOShipment, error) {\n\tret := _m.Called(payload)\n\n\tvar r0 *models.MTOShipment\n\tif rf, ok := ret.Get(0).(func(mto_shipment.PatchMTOShipmentStatusParams) *models.MTOShipment); ok {\n\t\tr0 = rf(payload)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*models.MTOShipment)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(mto_shipment.PatchMTOShipmentStatusParams) error); ok {\n\t\tr1 = rf(payload)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "09343de1f165ef676d57006f87b0f6da", "score": "0.51200277", "text": "func (m *MockDBHandler) Update(arg0 interface{}) *gorm.DB {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", arg0)\n\tret0, _ := ret[0].(*gorm.DB)\n\treturn ret0\n}", "title": "" }, { "docid": "3a6958f8db6998cc3ac90c33ee8ad274", "score": "0.5119051", "text": "func (m *MockGormItf) UpdateColumns(values interface{}) GormItf {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"UpdateColumns\", values)\n\tret0, _ := ret[0].(GormItf)\n\treturn ret0\n}", "title": "" }, { "docid": "28143706611647a1c822385defcd3695", "score": "0.5118902", "text": "func (m *MockDedicatedHostsClientAPI) Update(ctx context.Context, resourceGroupName, hostGroupName, hostName string, parameters compute.DedicatedHostUpdate) (compute.DedicatedHostsUpdateFuture, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", ctx, resourceGroupName, hostGroupName, hostName, parameters)\n\tret0, _ := ret[0].(compute.DedicatedHostsUpdateFuture)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "5fa9b723bffc15b7721a04927c71fafc", "score": "0.51185596", "text": "func (_m *Shares) UpdateExpireDate(shareID int, expireDate string) error {\n\tret := _m.Called(shareID, expireDate)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(int, string) error); ok {\n\t\tr0 = rf(shareID, expireDate)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "f5567004c6be10568ac8953d406f9f77", "score": "0.5117992", "text": "func (_m *Repository) UpdateProject(ctx context.Context, m *models.Project) (*models.Project, error) {\n\tret := _m.Called(ctx, m)\n\n\tvar r0 *models.Project\n\tif rf, ok := ret.Get(0).(func(context.Context, *models.Project) *models.Project); ok {\n\t\tr0 = rf(ctx, m)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*models.Project)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *models.Project) error); ok {\n\t\tr1 = rf(ctx, m)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "e0b858e0213b08a7c37f6ed4df5d948d", "score": "0.51115215", "text": "func (m *MockOpenStorageRoleServer) Update(arg0 context.Context, arg1 *api.SdkRoleUpdateRequest) (*api.SdkRoleUpdateResponse, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", arg0, arg1)\n\tret0, _ := ret[0].(*api.SdkRoleUpdateResponse)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "f9ffa8dd42406c1e5ddfb03055b3f1dc", "score": "0.5109992", "text": "func (_m *AttendanceRepository) Update(ctx context.Context, id string, attendance domain.Attendance) error {\n\tret := _m.Called(ctx, id, attendance)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, domain.Attendance) error); ok {\n\t\tr0 = rf(ctx, id, attendance)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "796398bc6e799c49eee7fb54d26ff36b", "score": "0.51010704", "text": "func (_m *MTOShipmentUpdater) UpdateMTOShipmentPrime(ctx context.Context, mtoShipment *models.MTOShipment, eTag string) (*models.MTOShipment, error) {\n\tret := _m.Called(ctx, mtoShipment, eTag)\n\n\tvar r0 *models.MTOShipment\n\tif rf, ok := ret.Get(0).(func(context.Context, *models.MTOShipment, string) *models.MTOShipment); ok {\n\t\tr0 = rf(ctx, mtoShipment, eTag)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*models.MTOShipment)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *models.MTOShipment, string) error); ok {\n\t\tr1 = rf(ctx, mtoShipment, eTag)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "fdfd4d4a11725e818dda4df7869e061f", "score": "0.5096233", "text": "func (client *Client) TestUseUpdate() (Update, error) {\n\tresult, err := client.SendAndCatch(UpdateData{\n\t\t\"@type\": \"testUseUpdate\",\n\t})\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif result.Data[\"@type\"].(string) == \"error\" {\n\t\treturn nil, fmt.Errorf(\"error! code: %d msg: %s\", result.Data[\"code\"], result.Data[\"message\"])\n\t}\n\n\tswitch UpdateEnum(result.Data[\"@type\"].(string)) {\n\n\tcase UpdateAuthorizationStateType:\n\t\tvar update UpdateAuthorizationState\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewMessageType:\n\t\tvar update UpdateNewMessage\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageSendAcknowledgedType:\n\t\tvar update UpdateMessageSendAcknowledged\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageSendSucceededType:\n\t\tvar update UpdateMessageSendSucceeded\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageSendFailedType:\n\t\tvar update UpdateMessageSendFailed\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageContentType:\n\t\tvar update UpdateMessageContent\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageEditedType:\n\t\tvar update UpdateMessageEdited\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageViewsType:\n\t\tvar update UpdateMessageViews\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageContentOpenedType:\n\t\tvar update UpdateMessageContentOpened\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageMentionReadType:\n\t\tvar update UpdateMessageMentionRead\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateMessageLiveLocationViewedType:\n\t\tvar update UpdateMessageLiveLocationViewed\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewChatType:\n\t\tvar update UpdateNewChat\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatChatListType:\n\t\tvar update UpdateChatChatList\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatTitleType:\n\t\tvar update UpdateChatTitle\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatPhotoType:\n\t\tvar update UpdateChatPhoto\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatPermissionsType:\n\t\tvar update UpdateChatPermissions\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatLastMessageType:\n\t\tvar update UpdateChatLastMessage\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatOrderType:\n\t\tvar update UpdateChatOrder\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatIsPinnedType:\n\t\tvar update UpdateChatIsPinned\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatIsMarkedAsUnreadType:\n\t\tvar update UpdateChatIsMarkedAsUnread\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatIsSponsoredType:\n\t\tvar update UpdateChatIsSponsored\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatHasScheduledMessagesType:\n\t\tvar update UpdateChatHasScheduledMessages\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatDefaultDisableNotificationType:\n\t\tvar update UpdateChatDefaultDisableNotification\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatReadInboxType:\n\t\tvar update UpdateChatReadInbox\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatReadOutboxType:\n\t\tvar update UpdateChatReadOutbox\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatUnreadMentionCountType:\n\t\tvar update UpdateChatUnreadMentionCount\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatNotificationSettingsType:\n\t\tvar update UpdateChatNotificationSettings\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateScopeNotificationSettingsType:\n\t\tvar update UpdateScopeNotificationSettings\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatActionBarType:\n\t\tvar update UpdateChatActionBar\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatPinnedMessageType:\n\t\tvar update UpdateChatPinnedMessage\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatReplyMarkupType:\n\t\tvar update UpdateChatReplyMarkup\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatDraftMessageType:\n\t\tvar update UpdateChatDraftMessage\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateChatOnlineMemberCountType:\n\t\tvar update UpdateChatOnlineMemberCount\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNotificationType:\n\t\tvar update UpdateNotification\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNotificationGroupType:\n\t\tvar update UpdateNotificationGroup\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateActiveNotificationsType:\n\t\tvar update UpdateActiveNotifications\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateHavePendingNotificationsType:\n\t\tvar update UpdateHavePendingNotifications\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateDeleteMessagesType:\n\t\tvar update UpdateDeleteMessages\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateUserChatActionType:\n\t\tvar update UpdateUserChatAction\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateUserStatusType:\n\t\tvar update UpdateUserStatus\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateUserType:\n\t\tvar update UpdateUser\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateBasicGroupType:\n\t\tvar update UpdateBasicGroup\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateSupergroupType:\n\t\tvar update UpdateSupergroup\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateSecretChatType:\n\t\tvar update UpdateSecretChat\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateUserFullInfoType:\n\t\tvar update UpdateUserFullInfo\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateBasicGroupFullInfoType:\n\t\tvar update UpdateBasicGroupFullInfo\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateSupergroupFullInfoType:\n\t\tvar update UpdateSupergroupFullInfo\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateServiceNotificationType:\n\t\tvar update UpdateServiceNotification\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateFileType:\n\t\tvar update UpdateFile\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateFileGenerationStartType:\n\t\tvar update UpdateFileGenerationStart\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateFileGenerationStopType:\n\t\tvar update UpdateFileGenerationStop\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateCallType:\n\t\tvar update UpdateCall\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateUserPrivacySettingRulesType:\n\t\tvar update UpdateUserPrivacySettingRules\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateUnreadMessageCountType:\n\t\tvar update UpdateUnreadMessageCount\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateUnreadChatCountType:\n\t\tvar update UpdateUnreadChatCount\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateOptionType:\n\t\tvar update UpdateOption\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateInstalledStickerSetsType:\n\t\tvar update UpdateInstalledStickerSets\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateTrendingStickerSetsType:\n\t\tvar update UpdateTrendingStickerSets\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateRecentStickersType:\n\t\tvar update UpdateRecentStickers\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateFavoriteStickersType:\n\t\tvar update UpdateFavoriteStickers\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateSavedAnimationsType:\n\t\tvar update UpdateSavedAnimations\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateSelectedBackgroundType:\n\t\tvar update UpdateSelectedBackground\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateLanguagePackStringsType:\n\t\tvar update UpdateLanguagePackStrings\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateConnectionStateType:\n\t\tvar update UpdateConnectionState\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateTermsOfServiceType:\n\t\tvar update UpdateTermsOfService\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateUsersNearbyType:\n\t\tvar update UpdateUsersNearby\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewInlineQueryType:\n\t\tvar update UpdateNewInlineQuery\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewChosenInlineResultType:\n\t\tvar update UpdateNewChosenInlineResult\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewCallbackQueryType:\n\t\tvar update UpdateNewCallbackQuery\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewInlineCallbackQueryType:\n\t\tvar update UpdateNewInlineCallbackQuery\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewShippingQueryType:\n\t\tvar update UpdateNewShippingQuery\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewPreCheckoutQueryType:\n\t\tvar update UpdateNewPreCheckoutQuery\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewCustomEventType:\n\t\tvar update UpdateNewCustomEvent\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdateNewCustomQueryType:\n\t\tvar update UpdateNewCustomQuery\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdatePollType:\n\t\tvar update UpdatePoll\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tcase UpdatePollAnswerType:\n\t\tvar update UpdatePollAnswer\n\t\terr = json.Unmarshal(result.Raw, &update)\n\t\treturn &update, err\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Invalid type\")\n\t}\n}", "title": "" }, { "docid": "870b7b0d06ebd84e095f9cfea3df497d", "score": "0.50942045", "text": "func (_m *WriteSessionWithinTransaction) UpdateTenant(runtimeID string, tenant string) apperrors.AppError {\n\tret := _m.Called(runtimeID, tenant)\n\n\tvar r0 apperrors.AppError\n\tif rf, ok := ret.Get(0).(func(string, string) apperrors.AppError); ok {\n\t\tr0 = rf(runtimeID, tenant)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(apperrors.AppError)\n\t\t}\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "19a403d9792ba483aeb03bab912a40c1", "score": "0.5093665", "text": "func (m *MockUsersInterface) Update(arg0 context.Context, arg1 *User) (int64, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", arg0, arg1)\n\tret0, _ := ret[0].(int64)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "a6ae8ad49784ff95da402fae552ed91a", "score": "0.5092496", "text": "func (_m *Repository) Update(user *users.Domain, id int) (*users.Domain, error) {\n\tret := _m.Called(user, id)\n\n\tvar r0 *users.Domain\n\tif rf, ok := ret.Get(0).(func(*users.Domain, int) *users.Domain); ok {\n\t\tr0 = rf(user, id)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*users.Domain)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*users.Domain, int) error); ok {\n\t\tr1 = rf(user, id)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "83e0bf3b7925e9ec2a701d7251b51fb3", "score": "0.5089974", "text": "func (m *MockUserRepository) Update(arg0 context.Context, arg1 *api.User) (int64, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Update\", arg0, arg1)\n\tret0, _ := ret[0].(int64)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "a237f53aa9ef22553b4b47c2137f48e6", "score": "0.5089177", "text": "func (m *MockMasterServer) UpdateTask(arg0 context.Context, arg1 *pb.UpdateTaskRequest) (*pb.UpdateTaskResponse, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"UpdateTask\", arg0, arg1)\n\tret0, _ := ret[0].(*pb.UpdateTaskResponse)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "241163532bbaef760259719ad8ae13a7", "score": "0.5085513", "text": "func (m *MockPortServer) PortUpdate(arg0 context.Context, arg1 *PortRequestMsg) (*PortResponseMsg, error) {\n\tret := m.ctrl.Call(m, \"PortUpdate\", arg0, arg1)\n\tret0, _ := ret[0].(*PortResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" } ]
b1bace8bb15a3bc6858f6b83d2b99dc9
The URLfriendly name of the GitHub App.
[ { "docid": "f98884369c43d23121e09dbde38c5588", "score": "0.55258954", "text": "func (o ConnectionGithubEnterpriseConfigPtrOutput) AppSlug() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ConnectionGithubEnterpriseConfig) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.AppSlug\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" } ]
[ { "docid": "09ab91e26b061cd7044127772639253c", "score": "0.6754375", "text": "func (app *Application) Name() string { return \"gopls\" }", "title": "" }, { "docid": "34f2961ab18226aedd52f32494ecc9d8", "score": "0.65822774", "text": "func (g gitHubAuthService) Name() string { return \"GitHub\" }", "title": "" }, { "docid": "3ac15224e938f1028601261943fdb5d2", "score": "0.6407969", "text": "func AppName(app inter.App) string {\n\tname, err := app.MakeE(\"config.App.Name\")\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\treturn name.(string)\n}", "title": "" }, { "docid": "b4b9d765ce1468b34dc66efa3f2163fe", "score": "0.6384394", "text": "func (wa *WebApp) AppName() string {\n\treturn wa.tut.Name()\n}", "title": "" }, { "docid": "acaecada40823c93a99cd3a1bda5b439", "score": "0.6383933", "text": "func AppName() string {\n\treturn appName\n}", "title": "" }, { "docid": "acaecada40823c93a99cd3a1bda5b439", "score": "0.6383933", "text": "func AppName() string {\n\treturn appName\n}", "title": "" }, { "docid": "8fcb8a322cce50b1fd3c0f16333bf902", "score": "0.6142537", "text": "func AppName() string {\n\treturn C.GoString(C.al_get_app_name())\n}", "title": "" }, { "docid": "7b6c207070545f4236ff1835fde98a76", "score": "0.61089784", "text": "func (a *AppManifest) AppName() string {\n\treturn a.Name\n}", "title": "" }, { "docid": "d116bf6bb5cea0da95c54576930451b1", "score": "0.6065786", "text": "func RepoName(url string) string {\n\treturn strings.Replace(path.Base(url), path.Ext(url), \"\", 1)\n}", "title": "" }, { "docid": "c5edb8f31a18471b2140065d9a10ee35", "score": "0.6050983", "text": "func (a *App) Name() string {\n\treturn a.Metadata.Name\n}", "title": "" }, { "docid": "68a48d4e3ce5f51108105c531977641a", "score": "0.6026451", "text": "func myName() string {\n\treturn \"Goku\"\n}", "title": "" }, { "docid": "f4b0131f485a3b0b4dac662092d9b06f", "score": "0.5964806", "text": "func (conf *Config) AppName() string {\n\treturn os.Args[0]\n}", "title": "" }, { "docid": "e30615798e014f5c1618126d03743bd5", "score": "0.59530896", "text": "func AppImageName(source *v1alpha1.Source) string {\n\treturn fmt.Sprintf(\"app-%s-%s:%d\", source.Namespace, source.Name, source.Generation)\n}", "title": "" }, { "docid": "686f0c124eabecf72235de1168f5ca6a", "score": "0.5942974", "text": "func (d *Driver) AppName() string {\n\tvar out struct {\n\t\tAppName string\n\t}\n\n\tif err := d.macRPC.Call(\"driver.Bundle\", &out, nil); err != nil {\n\t\tapp.Panic(err)\n\t}\n\n\tif len(out.AppName) != 0 {\n\t\treturn out.AppName\n\t}\n\n\twd, err := os.Getwd()\n\tif err != nil {\n\t\tapp.Panic(errors.Wrap(err, \"app name unreachable\"))\n\t}\n\n\treturn filepath.Base(wd)\n}", "title": "" }, { "docid": "d1b0ac7a01f020f1d8910cb676bfb0c8", "score": "0.59348947", "text": "func (d *Driver) AppName() string {\n\treturn \"goapp\"\n}", "title": "" }, { "docid": "bca0396536d731e5ba51143ebcb18079", "score": "0.5926112", "text": "func (a *App) Name() string {\n\treturn a.faker.MustParse(\"app.name\")\n}", "title": "" }, { "docid": "74292ff9d2e9b90b976479a70ff154a5", "score": "0.5826075", "text": "func name(URL string) (string, error) {\n\tp, err := url.Parse(URL)\n\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tfilename := \"index.html\"\n\n\tif p.Path != \"\" && !strings.HasSuffix(p.Path, \"/\") {\n\t\tfilename = path.Base(p.Path)\n\t}\n\treturn filename, nil\n}", "title": "" }, { "docid": "c31bbf7152b48e9ed39b6e08034df099", "score": "0.58241034", "text": "func (i *Instance) AppName() string {\n\treturn i.asg.AppName()\n}", "title": "" }, { "docid": "78f83794bfb8816806461104a6cccb01", "score": "0.5815307", "text": "func Name() string {\n\treturn driver.AppName()\n}", "title": "" }, { "docid": "39e43e819e0c83d3fc13b5cf0075b7e8", "score": "0.5798541", "text": "func (h Hugo) Name() string {\n\treturn \"hugo\"\n}", "title": "" }, { "docid": "a653419063c8ea1e24bec58f5539a98d", "score": "0.57285345", "text": "func (o WebAppOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *WebApp) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "59e35eae632bce725961042f2078caa8", "score": "0.5700535", "text": "func (c *Cluster) AppName() string {\n\treturn c.account.AppName()\n}", "title": "" }, { "docid": "486e5a073089510c0eed28235aa9618f", "score": "0.569874", "text": "func (r *DeploymentGroup) AppName() pulumi.StringOutput {\n\treturn (pulumi.StringOutput)(r.s.State[\"appName\"])\n}", "title": "" }, { "docid": "64db5a698b283ca502be45a8aad0165e", "score": "0.56906027", "text": "func (a *Account) AppName() string {\n\treturn a.app.name\n}", "title": "" }, { "docid": "0b476b25ec79636aca94680673be47ae", "score": "0.566763", "text": "func (a *anApp) Name() string {\n\treturn \"testing\"\n}", "title": "" }, { "docid": "37ee103819b1aca59ebedd460844fe85", "score": "0.56313443", "text": "func (n *System_Messages_Message) AppName() *System_Messages_Message_AppName {\n\treturn &System_Messages_Message_AppName{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"app-name\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "37ee103819b1aca59ebedd460844fe85", "score": "0.56313443", "text": "func (n *System_Messages_Message) AppName() *System_Messages_Message_AppName {\n\treturn &System_Messages_Message_AppName{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"app-name\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "1e78a5d4f7f84d1ba7bd1a7ffed6d919", "score": "0.56212306", "text": "func getAppName(name string) string {\n\tnames := bytes.Split([]byte(name), []byte(\"meep-\"+activeScenarioName+\"-\"))\n\tif len(names) != 2 {\n\t\treturn \"\"\n\t}\n\treturn string(names[1])\n}", "title": "" }, { "docid": "7d15fed05acf8ca139050080fce731bf", "score": "0.5608713", "text": "func (h Hangout) AppURL() string {\n\treturn h.BrowserURL()\n}", "title": "" }, { "docid": "fee7a52bea92698e822c026e61844641", "score": "0.5603336", "text": "func (o WebAppOutput) DisplayName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *WebApp) pulumi.StringOutput { return v.DisplayName }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "d3436e714a2ac25d9460672517e8fcc4", "score": "0.56008106", "text": "func (p *Project) Name() string {\n\treturn p.parsedURL.Name\n}", "title": "" }, { "docid": "b9c2c5d2063cff7159274298c60c1f15", "score": "0.55883574", "text": "func dirName(repoURL url.URL) string {\n\treturn base64.URLEncoding.EncodeToString([]byte(repoURL.String()))\n}", "title": "" }, { "docid": "fa096df74e9e95d826a2b8a393703bec", "score": "0.5585646", "text": "func DetectAppName(host string) (string, error) {\n\tremote, err := findRemote(host)\n\n\tif err != nil {\n\t\treturn \"\", errors.New(\"Cannot detect the app name.\\n\" +\n\t\t\t\"You may not be in a project OR no application has been created for this project\")\n\t}\n\n\tss := strings.Split(remote, \"/\")\n\treturn strings.Split(ss[len(ss)-1], \".\")[0], nil\n}", "title": "" }, { "docid": "71fad46ffc8f30d4034940ba75d7bc07", "score": "0.55827403", "text": "func (c *Context) AppName() string {\n\treturn \"bf-ia-broker\"\n}", "title": "" }, { "docid": "3dd99075178bbb00e795a6b2a7c8226c", "score": "0.55823433", "text": "func (n *Messages_Message) AppName() *Messages_Message_AppName {\n\treturn &Messages_Message_AppName{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"app-name\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "b0a0cf1b93fd6125f52fb00fff5d4468", "score": "0.5581807", "text": "func (o ConnectionGithubEnterpriseConfigOutput) AppSlug() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ConnectionGithubEnterpriseConfig) *string { return v.AppSlug }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "3dd99075178bbb00e795a6b2a7c8226c", "score": "0.55813944", "text": "func (n *Messages_Message) AppName() *Messages_Message_AppName {\n\treturn &Messages_Message_AppName{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"app-name\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "a5df8acd8a8442594139a29a4388bd4f", "score": "0.5570322", "text": "func siteName() string {\n\treturn config.SiteName\n}", "title": "" }, { "docid": "bc3d649f5d54dcc84dda931cf84cf491", "score": "0.5548589", "text": "func (g *Git) Name() string {\n\treturn \"Git\"\n}", "title": "" }, { "docid": "3643bcf7b1413a1717e3d0ab68510838", "score": "0.55394655", "text": "func (e *Experiment) Name() string {\n\tu, err := url.Parse(e.SelfURL)\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\ti := strings.Index(u.Path, endpointExperiment)\n\tif i < 0 {\n\t\treturn \"\"\n\t}\n\treturn u.Path[len(endpointExperiment)+i:]\n}", "title": "" }, { "docid": "82d819103004f7926cba83aa2e0cd455", "score": "0.55083877", "text": "func (i *Image) Name() string {\n\treturn filepath.Base(i.URL)\n}", "title": "" }, { "docid": "f6349fbf992c01c08f774c89330701c5", "score": "0.5480227", "text": "func (a *AppID) Name() string {\n\treturn a.name\n}", "title": "" }, { "docid": "ff61b2b6ffe4383d468c0ffb3dae2a09", "score": "0.5471243", "text": "func getRepoNameForURLPath(repo keppel.Repository, authz *auth.Authorization) string {\n\t//on the regular API, the URL path includes the account name\n\tif authz.Audience.AccountName == \"\" {\n\t\treturn repo.FullName()\n\t}\n\t//on domain-remapped APIs, the URL path contains only the bare repository name\n\treturn repo.Name\n}", "title": "" }, { "docid": "4ef841e676b5f1a473d9458ca5a4664d", "score": "0.54701525", "text": "func (v *GetAddOnAddOnApp) GetName() string { return v.AppData.Name }", "title": "" }, { "docid": "343a7b76ab74ce03801c5c7f3528a4a9", "score": "0.54676175", "text": "func (project *Project) Name() string {\n\treturn filepath.Base(project.Location())\n}", "title": "" }, { "docid": "1592922ca4c26bf4abd615dda48ffafe", "score": "0.546056", "text": "func GetAppName() string {\n\treturn appName\n}", "title": "" }, { "docid": "8de25fe37cfc4d7ec1776e7ae449a780", "score": "0.5449553", "text": "func (v *GetAppApp) GetName() string { return v.AppData.Name }", "title": "" }, { "docid": "7b84a1d779c93dd4bd8c343f2ea213a9", "score": "0.54404306", "text": "func (s *RegisterApplicationInCompass) Name() string {\n\treturn fmt.Sprintf(\"Register application %s in compass\", s.name)\n}", "title": "" }, { "docid": "1214cef8ba7e5939f599e0f1552f8335", "score": "0.54216766", "text": "func (o *GlobalOpts) AppName() string {\n\tif o.appName != \"\" {\n\t\treturn o.appName\n\t}\n\to.appName = viper.GetString(appFlag)\n\treturn o.appName\n}", "title": "" }, { "docid": "b929f5d7b1eb2099505403ab260f77a3", "score": "0.5420383", "text": "func (GithubValidator) Name() string { return \"Github validator\" }", "title": "" }, { "docid": "4f54bf93c8306f086ba13da07594921c", "score": "0.5399182", "text": "func (i *ExportApp) Name() string {\n\treturn \"export_app\"\n}", "title": "" }, { "docid": "198d462fd931d66e6bbea4f1f2c2974f", "score": "0.5394147", "text": "func GetYurtAppManagerName() string {\n\treturn projectPrefix + \"app-manager\"\n}", "title": "" }, { "docid": "5da2bca454a6dd48f38c66da54ef8bdd", "score": "0.53912246", "text": "func (o RailsAppLayerOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *RailsAppLayer) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "8cd5a6930efabbc8cecc0094039f4c24", "score": "0.53906196", "text": "func (obj *application) Name() string {\n\treturn obj.name\n}", "title": "" }, { "docid": "ccc96953288e704db9c40d90cbb955ca", "score": "0.53765494", "text": "func (o PlatformApplicationOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *PlatformApplication) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "5b7adbc0b803e6ba887179949797ea66", "score": "0.53666157", "text": "func (v *CreateAppInput) GetName() string { return v.Name }", "title": "" }, { "docid": "78c5a8a0e59eeef66e7ade49e63fb2bd", "score": "0.5349797", "text": "func (d *DockerWebhook) GetName() string {\n\treturn strings.Split(d.repoName, \"/\")[1]\n}", "title": "" }, { "docid": "05670aad897ff61d2b5cd28e44d2c99b", "score": "0.53443635", "text": "func gh(user string) string {\n\treturn GitHubPrefix + user\n}", "title": "" }, { "docid": "04ead40ed0ec2d28ff7a4bc628ce7b52", "score": "0.5323994", "text": "func (o *Application) Name() string {\n\tif o != nil && o.bitmap_&64 != 0 {\n\t\treturn o.name\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "cc17780c85c488abd3edbe1bd5a5d018", "score": "0.5323463", "text": "func appURL(id string) string {\n\treturn fmt.Sprintf(\"%s/app/applications/details/%s\", apiClient.Config.URL, id)\n}", "title": "" }, { "docid": "3cd898db2ba0c8dd894a179015f6554d", "score": "0.53141755", "text": "func (dtas *GenDocTransServer) ApplicationName() string {\n\treturn dtas.AppName\n}", "title": "" }, { "docid": "eeca20382b8253cdc31afab98ccf7f3f", "score": "0.53133535", "text": "func (n *System_Messages_MessageAny) AppName() *System_Messages_Message_AppNameAny {\n\treturn &System_Messages_Message_AppNameAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"app-name\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "95d0e1dc477369371bff15d4d5c06ce6", "score": "0.5306254", "text": "func (n *Messages_MessageAny) AppName() *Messages_Message_AppNameAny {\n\treturn &Messages_Message_AppNameAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"app-name\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "7f4b8cfbada997e588f0770b035f4044", "score": "0.5299214", "text": "func (gc *Client) Name() string {\n\treturn scm.RepoGithub\n}", "title": "" }, { "docid": "1df8d56627a67334be69f1a081ec8c02", "score": "0.52898866", "text": "func A_name(name string) view.String {\n\treturn view.Printf(\"<a name='%s'></a>\", name)\n}", "title": "" }, { "docid": "f7bc8f9a629672332e0130e876f0aac4", "score": "0.52869326", "text": "func GetAppName(projectName string, containerName string) (string, error) {\n\t// generate the appname as a combination of projectName and containername\n\t// we also need to strip out any non-alphanumeric characters or rkt will complain\n\treg, err := regexp.Compile(\"[^A-Za-z0-9]+\")\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tappName := reg.ReplaceAllString(containerName, \"\")\n\tappName = fmt.Sprintf(\"%s-%s\", projectName, appName)\n\treturn appName, nil\n}", "title": "" }, { "docid": "4afa85c03e239186f856956aa0a9eb51", "score": "0.52862823", "text": "func (app AppModule) Name() string {\n\treturn ModuleName\n}", "title": "" }, { "docid": "98ab6e16d44831cfb409b26d662cdf54", "score": "0.52765423", "text": "func (c *GithubClient) ProviderName() string {\n\treturn \"gitHUB\"\n}", "title": "" }, { "docid": "8845bc37159fe72428516138339de2ce", "score": "0.5275849", "text": "func (o *OAuthAppWithOwnerLogin) GetName() string {\n\tif o == nil || o.Name == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Name\n}", "title": "" }, { "docid": "a56555f99b733b9074f34888a37b7b4e", "score": "0.5272717", "text": "func GetName() string {\r\n\treturn newManage().GetName()\r\n}", "title": "" }, { "docid": "48b645bea1c5695e96635eb2d7fcc4e3", "score": "0.52692413", "text": "func URLScheme(name string) string {\n\tswitch name {\n\tcase HTTPS, HTTP, Git:\n\t\treturn name + \"://\"\n\tcase SSHGit:\n\t\treturn \"ssh://git@\"\n\tdefault:\n\t\treturn \"\"\n\t}\n}", "title": "" }, { "docid": "38e0dd0a26cf52a156861dbfc4279067", "score": "0.52659786", "text": "func (g *Git) GetName() string {\n\treturn \"Git\"\n}", "title": "" }, { "docid": "4ec3e3347d9211a446791cf946e42360", "score": "0.52610207", "text": "func (g GooglePlus) Name() string {\n\treturn \"Google+\"\n}", "title": "" }, { "docid": "d7d7a9f1a46ad63ffd2007aaec50664e", "score": "0.52169174", "text": "func (m *Metadata) Basename() string {\n\tbasename := strings.Join([]string{m.Org, m.Repo, m.Branch}, \"-\")\n\tif m.Variant != \"\" {\n\t\tbasename = fmt.Sprintf(\"%s__%s\", basename, m.Variant)\n\t}\n\treturn fmt.Sprintf(\"%s.yaml\", basename)\n}", "title": "" }, { "docid": "deef70cb2c6c62d910cb0e127eea0daf", "score": "0.5214084", "text": "func App() string {\n\treturn apps[rand.Intn(len(apps))]\n}", "title": "" }, { "docid": "478ebfc0328f894c9e90baf35368af10", "score": "0.52061474", "text": "func EnsureAppName(app string) (string, error) {\n\t// check if string https://stackoverflow.com/questions/22593259/\n\tif _, err := strconv.Atoi(app); err != nil {\n\t\treturn app, nil // return as is\n\t}\n\t// convert to name\n\tapps, err := user.GetUserApps()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tname := common.GetKeyForValue(apps, app)\n\tif name == \"\" {\n\t\treturn \"\", errors.New(\"App with ID \" + app + \" not found.\")\n\t}\n\treturn name, nil\n}", "title": "" }, { "docid": "b4d97a69e2b186cd88ae5c3d26052e03", "score": "0.5203064", "text": "func (app *App) DefaultCname() *string {\n\tif !app.Spec.Ingress.GenerateDefaultCname {\n\t\treturn nil\n\t}\n\tif len(app.Spec.Ingress.Controller.ServiceEndpoint) == 0 {\n\t\treturn nil\n\t}\n\turl := fmt.Sprintf(\"%s.%s.%s\", app.Name, app.Spec.Ingress.Controller.ServiceEndpoint, ShipaCloudDomain)\n\treturn &url\n}", "title": "" }, { "docid": "3f5a0a7b738730d4bf01e4a7940e0783", "score": "0.5202979", "text": "func siteTitle() string {\n\treturn config.SiteTitle\n}", "title": "" }, { "docid": "43be7b72acbe6d0253d95648f40d20ba", "score": "0.5201221", "text": "func (parser *AppveyorParser) Name() string {\n\treturn \"AppVeyor\"\n}", "title": "" }, { "docid": "56d8e38610cb01dce5c41153ba068d01", "score": "0.52003586", "text": "func (v *CreateAppCreateAppCreateAppPayloadApp) GetName() string { return v.AppData.Name }", "title": "" }, { "docid": "77ad05bd84b818eef9333b76055c0125", "score": "0.5198003", "text": "func makeRepositoryName(name string) string {\n\treturn nonAlphanumericSequence.ReplaceAllString(name, \"-\")\n}", "title": "" }, { "docid": "620a823e73f6b8dacf1cd66a887f367c", "score": "0.5197498", "text": "func (r *PlatformApplication) Name() pulumi.StringOutput {\n\treturn (pulumi.StringOutput)(r.s.State[\"name\"])\n}", "title": "" }, { "docid": "25038acf5c020e1feb693f702b3fb600", "score": "0.51917183", "text": "func (i *HelmInstaller) chartName(project, sha string) string {\n\treturn fmt.Sprintf(\n\t\tchartNameFormat,\n\t\ti.organisation,\n\t\tproject,\n\t\tsha,\n\t\tproject,\n\t)\n}", "title": "" }, { "docid": "e1057522b9efdfca92e74c82507b7739", "score": "0.5188513", "text": "func StudentRepoName(userName string) string {\n\treturn userName + StudentRepoSuffix\n}", "title": "" }, { "docid": "6f816c1f894319512ad907f5a7c88f2c", "score": "0.5185343", "text": "func GetApplicationName() string {\n\tc := C.g_get_application_name()\n\n\treturn C.GoString((*C.char)(c))\n}", "title": "" }, { "docid": "33b45e21bcbe78b316cac23d39f58ea7", "score": "0.5181042", "text": "func (o StandardAppVersionOutput) Name() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *StandardAppVersion) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "5d54aa0bb9089c460e702fe93e571d5d", "score": "0.51789093", "text": "func (h *HTTP) Name() string {\n\tif h.cfg.Name == \"\" {\n\t\treturn fmt.Sprintf(\"%s://%s\", h.schema, h.cfg.BindAddr)\n\t}\n\treturn h.cfg.Name\n}", "title": "" }, { "docid": "7f3e3d2d13dcd885103049be9975d7b4", "score": "0.5177277", "text": "func (t *Token) SiteName() string {\n\treturn t.URL\n}", "title": "" }, { "docid": "57cf64c02f98dc92b41b70ef4a4b6620", "score": "0.5172874", "text": "func (g *Gateway) Name() string {\n\treturn g.FlogoApp.Name\n}", "title": "" }, { "docid": "8812a038aaa967cec33a271b1cb14629", "score": "0.5171894", "text": "func (a AppModuleBasic) Name() string {\n\treturn upgtypes.ModuleName\n}", "title": "" }, { "docid": "41ed1d1137aca5efc9adad8c6506404d", "score": "0.51667196", "text": "func (v *GetAppWithAddonsApp) GetName() string { return v.AppData.Name }", "title": "" }, { "docid": "90150cefee7aeca76496c12ce2072c78", "score": "0.5164904", "text": "func DisplayName() string {\n\tu := systemx.CurrentUserOrDefault(user.User{Username: \"unknown\"})\n\treturn stringsx.DefaultIfBlank(os.Getenv(EnvDisplayName), stringsx.DefaultIfBlank(u.Name, u.Username))\n}", "title": "" }, { "docid": "46654c0e23a0a452e99aff1f1c6559fa", "score": "0.5162447", "text": "func ExternalName() {\n\tfmt.Println(\"+-----+ \")\n\tfmt.Println(\"| Pod | \")\n\tfmt.Println(\"+-----+ \")\n\tfmt.Println(\" ^ \")\n\tfmt.Println(\" | \")\n\tfmt.Println(\" v \")\n\tfmt.Println(\" myservice \")\n\tfmt.Println(\" +-------+------------------------+ \")\n\tfmt.Println(\" | ExternalName | \")\n\tfmt.Println(\" | Service | \")\n\tfmt.Println(\" | | \")\n\tfmt.Println(\" | CNAME | \")\n\tfmt.Println(\" | mydatabase.example.com | \")\n\tfmt.Println(\" | | \")\n\tfmt.Println(\" +------------------------+ \")\n\tfmt.Println(\" ^ ^ \")\n\tfmt.Println(\" | | \")\n\tfmt.Println(\" v v \")\n\tfmt.Println(\" +--------------+ \")\n\tfmt.Println(\" | database | \")\n\tfmt.Println(\" +--------------+ \")\n}", "title": "" }, { "docid": "43ac280749bd844d1202f74970d77308", "score": "0.51506996", "text": "func (a *App) Author() string {\n\treturn a.faker.MustParse(\"app.author\")\n}", "title": "" }, { "docid": "6e9cc74e5991cec6e37e6e4e6c857654", "score": "0.5145773", "text": "func (g *Git) String() string {\n\treturn \"github://\" + g.host\n}", "title": "" }, { "docid": "1769fe2f7a643a6f3f32e3b3728b964a", "score": "0.51434195", "text": "func (s ConnectApplicationUsingCompass) Name() string {\n\treturn \"Connect application using Compass\"\n}", "title": "" }, { "docid": "880ddba4504fd4cf3cc5872853bfe9b8", "score": "0.51383746", "text": "func (AppModuleBasic) Name() string {\n\treturn proposal.ModuleName\n}", "title": "" }, { "docid": "cb5608b32e68cd7d9669fcdd855da01d", "score": "0.51344717", "text": "func (p *CircleCIProject) FullName() string {\n\treturn fmt.Sprintf(\"%s/%s\", p.owner, p.projectName)\n}", "title": "" }, { "docid": "96157cd3ca3056b8af83aabd1fe7c4ca", "score": "0.5128958", "text": "func (o *Application) Fullname() string {\n\tif o != nil && o.bitmap_&16 != 0 {\n\t\treturn o.fullname\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "21883c9ed900f26c861663b3059a516f", "score": "0.512761", "text": "func (AppModule) Name() string {\n\treturn ModuleName\n}", "title": "" } ]
2aa223c837eacb0c6609915872be09ed
TryConvertString attempts to parse the string parameter s into an appropriate literal value of type string, bool, int, float64, or time.Time. The functions evaluates the following rules in order. It returns the first success. The rules are: 1. "null", "none", "" => "" 2. "true" => true (bool) 3. "false" => false (bool) 4. "0.234" => float64 5. 131238 => int 6. time.Parse(time.RFC3339Nano, s) 7. time.Parse(time.RFC3339, s) 8. time.Parse("20060102", s) 9. If no rules pass without error, then just return the input value For example: TryConvertString("a") => "a" (string) TryConvertString("true") => true (bool) TryConvertString("123.31") => 123.31 (float64) TryConvertString("4") => 4 (int) TryConvertString("20180501") => 20180501T00:00:00Z (time.Time)
[ { "docid": "6bac8dbb8871457eddb257f5ff4f1294", "score": "0.75427526", "text": "func TryConvertString(s string) interface{} {\n\ts_lc := strings.ToLower(s)\n\n\tif s_lc == \"\" {\n\t\treturn \"\"\n\t}\n\n\tif s_lc == \"null\" || s_lc == \"none\" || s_lc == \"nil\" {\n\t\treturn Null{}\n\t}\n\n\tif s_lc == \"true\" {\n\t\treturn true\n\t}\n\n\tif s_lc == \"false\" {\n\t\treturn false\n\t}\n\n\tif s_lc == \"[]\" {\n\t\treturn make([]interface{}, 0)\n\t}\n\n\tif s_lc == \"{}\" {\n\t\treturn make(map[string]struct{}, 0)\n\t}\n\n\tif strings.HasPrefix(s_lc, \"0x\") && (len(s_lc) >= 4) && (0 == len(s_lc)%2) {\n\t\tb := []byte(s[2:])\n\t\tvalue := make([]byte, hex.DecodedLen(len(b)))\n\t\t_, err := hex.Decode(value, b)\n\t\tif err == nil {\n\t\t\tif len(value) == 1 {\n\t\t\t\treturn value[0]\n\t\t\t}\n\t\t\treturn value\n\t\t}\n\t}\n\n\tif strings.Contains(s, \".\") {\n\t\tleft_f64, err := strconv.ParseFloat(s, 64)\n\t\tif err == nil {\n\t\t\treturn left_f64\n\t\t}\n\t}\n\n\tleft_int, err := strconv.Atoi(s)\n\tif err == nil {\n\t\treturn left_int\n\t}\n\n\tleft_time, err := time.Parse(time.RFC3339Nano, s)\n\tif err == nil {\n\t\treturn left_time\n\t}\n\n\tleft_time, err = time.Parse(time.RFC3339, s)\n\tif err == nil {\n\t\treturn left_time\n\t}\n\n\tleft_time, err = time.Parse(\"2006-01-02\", s)\n\tif err == nil {\n\t\treturn left_time\n\t}\n\n\tleft_ipv4 := net.ParseIP(s)\n\tif left_ipv4 != nil {\n\t\treturn left_ipv4\n\t}\n\n\t_, left_net, err := net.ParseCIDR(s)\n\tif err == nil {\n\t\treturn left_net\n\t}\n\n\treturn s\n}", "title": "" } ]
[ { "docid": "aa46e9dbbc32689c03eb7d9a9d8e0b58", "score": "0.6342274", "text": "func ConvertStrings() {\n\n\ta := \"21.123438782734\"\n\tb, _ := strconv.ParseBool(a)\n\tfmt.Println(reflect.TypeOf(b))\n\n\tflt, _ := strconv.ParseFloat(a, 64)\n\n\tstint, _ := strconv.ParseInt(a, 10, 64)\n\n\tfmt.Printf(\"a string: %T, bool: %T as float64: %T. int: %T\\n\", a, b, flt, stint)\n\n}", "title": "" }, { "docid": "93abbfb2c8de3198575df0e4c2341218", "score": "0.6178108", "text": "func Parse(s string) interface{} {\n\n\t// nothing is nil\n\tif len(s) == 0 {\n\t\treturn nil\n\t}\n\n\t/*\n\t Is it forced to be a string with quotes?\n\t*/\n\tif strings.HasPrefix(s, literalDoubleQuote) && strings.HasSuffix(s, literalDoubleQuote) {\n\t\treturn strings.Trim(s, literalDoubleQuote)\n\t}\n\tif strings.HasPrefix(s, literalSingleQuote) && strings.HasSuffix(s, literalSingleQuote) {\n\t\treturn strings.Trim(s, literalSingleQuote)\n\t}\n\n\t/*\n\t Check literals\n\t*/\n\tswitch strings.ToLower(s) {\n\t/*\n\t Booleans\n\t*/\n\tcase literalTrue:\n\t\treturn true\n\tcase literalFalse:\n\t\treturn false\n\t/*\n\t\tOther\n\t*/\n\tcase literalNull:\n\t\treturn nil\n\t}\n\n\t/*\n\t Numbers\n\t*/\n\n\t// try int (most common type)\n\tif val, err := strconv.ParseInt(s, 10, 0); err == nil {\n\t\treturn int(val)\n\t}\n\n\t/*\n\t ints\n\t*/\n\n\t// try int8\n\tif val, err := strconv.ParseInt(s, 10, 8); err == nil {\n\t\treturn val\n\t}\n\n\t// try int16\n\tif val, err := strconv.ParseInt(s, 10, 16); err == nil {\n\t\treturn val\n\t}\n\n\t// try int32\n\tif val, err := strconv.ParseInt(s, 10, 32); err == nil {\n\t\treturn val\n\t}\n\n\t// try int64\n\tif val, err := strconv.ParseInt(s, 10, 64); err == nil {\n\t\treturn val\n\t}\n\n\t/*\n\t uints\n\t*/\n\t// try uint8\n\tif val, err := strconv.ParseUint(s, 10, 8); err == nil {\n\t\treturn val\n\t}\n\n\t// try uint16\n\tif val, err := strconv.ParseUint(s, 10, 16); err == nil {\n\t\treturn val\n\t}\n\n\t// try uint32\n\tif val, err := strconv.ParseUint(s, 10, 32); err == nil {\n\t\treturn val\n\t}\n\n\t// try uint64\n\tif val, err := strconv.ParseUint(s, 10, 64); err == nil {\n\t\treturn val\n\t}\n\n\t/*\n\t floats\n\t*/\n\n\t// try float32\n\tif val, err := strconv.ParseFloat(s, 32); err == nil {\n\t\treturn float32(val)\n\t}\n\n\t// try float64\n\tif val, err := strconv.ParseFloat(s, 64); err == nil {\n\t\treturn val\n\t}\n\n\t/*\n\t Nothing - just return the string\n\t*/\n\treturn s\n}", "title": "" }, { "docid": "9d8ea5382c4ae41d3ea915696c689476", "score": "0.6097209", "text": "func TestTryConvert(t *testing.T) {\n\t/* TODO Test for wrong values as well */\n\tvalOf := reflect.ValueOf\n\tvals := map[string]reflect.Value{\n\t\t\"8192\": valOf(uint(8192)), /* uint\t\t*/\n\t\t\"-3\": valOf(-3), /* int\t\t*/\n\t\t\"yes\": valOf(\"yes\"), /* string\t*/\n\t\t\"true\": valOf(true), /* bool\t\t*/\n\t\t\"false\": valOf(false), /*\t\t\t*/\n\t\t\"2.3\": valOf(2.3), /* float\t*/\n\t\t\"3.1415926\": valOf(float64(3.1415926)), /* double\t*/\n\t}\n\tfor str, val := range vals {\n\t\tactual, err := tryConvert(nil, val.Type(), str)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"errored out for value '%v' of expected type '%s'\", str, val.Type())\n\t\t}\n\t\tif val.Interface() != actual.Interface() {\n\t\t\tt.Errorf(\"expected '%v' but got '%v' for type '%s' (actual type: '%s')\", val, actual, val.Type(), actual.Type())\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ecf85fb876e2dba2aec43bdfe1050c85", "score": "0.5975052", "text": "func CoerceString(value string) (interface{}, error) {\n\treturn value, nil\n}", "title": "" }, { "docid": "58741a09eb6a93550fd7ac7d0bda9cbd", "score": "0.5970669", "text": "func convert(trueType string, value interface{}) (interface{}, error) {\n\tvar valueString string\n\n\tswitch value.(type) {\n\tcase int:\n\t\tvalueString = fmt.Sprintf(\"%d\", value.(int))\n\tcase bool:\n\t\tvalueString = fmt.Sprintf(\"%t\", value.(bool))\n\tcase float64:\n\t\tif trueType != \"int\" {\n\t\t\tvalueString = fmt.Sprintf(\"%2.f\", value.(float64))\n\t\t}else{\n\t\t\tvalueString = fmt.Sprintf(\"%0.f\", value.(float64))\n\t\t}\n\tcase string:\n\t\tvalueString = value.(string)\n\tdefault:\n\t\tif trueType == \"map\" {\n\t\t\tmarshal, err := json.Marshal(value)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.New(\"can't parse map type\")\n\t\t\t}\n\n\t\t\tvalueString = string(marshal)\n\t\t}else{\n\t\t\treturn nil, errors.New(\"type not found\")\n\t\t}\n\t}\n\n\tswitch strings.ToLower(trueType) {\n\tcase \"int\":\n\t\trInt, err := strconv.Atoi(valueString)\n\t\tif err != nil {\n\t\t\treturn rInt, fmt.Errorf(\"cannot convert %s to %s\", valueString, \"int\")\n\t\t}\n\t\treturn rInt, nil\n\tcase \"float64\":\n\t\trFloat64, err := strconv.ParseFloat(valueString, 64)\n\t\tif err != nil {\n\t\t\treturn rFloat64, fmt.Errorf(\"cannot convert %s to %s\", valueString, \"float64\")\n\t\t}\n\t\treturn rFloat64, nil\n\tcase \"bool\":\n\t\trBool, err := strconv.ParseBool(valueString)\n\t\tif err != nil {\n\t\t\treturn rBool, fmt.Errorf(\"cannot convert %s to %s\", valueString, \"bool\")\n\t\t}\n\t\treturn rBool, nil\n\tcase \"string\", \"map\", \"empty\":\n\t\treturn valueString, nil\n\tdefault:\n\t\treturn value, fmt.Errorf(\"%s's type is not supported\", trueType)\n\t}\n}", "title": "" }, { "docid": "c75f78943e5397f5ea3dc8336e3484f9", "score": "0.5844503", "text": "func convertTime(t string) time.Time {\n\tvar r time.Time\n\tif t != \"\" {\n\t\tr, _ = time.Parse(time.RFC3339, t)\n\t}\n\treturn r\n}", "title": "" }, { "docid": "fdf365e7086443b5c3a5c8c50ba978da", "score": "0.5807155", "text": "func (cv Value) TryAsString() (string, bool) {\n\tv := cv.Value()\n\tif val, err := convertValue(v, reflect.TypeOf(\"\")); v != nil && err == nil {\n\t\treturn val.(string), true\n\t}\n\treturn \"\", false\n}", "title": "" }, { "docid": "a7bbf960739754759a5aab7ff8f94393", "score": "0.57965404", "text": "func TryToTime(s string, bt time.Time) (time.Time, error) {\n\tif s == \"\" {\n\t\treturn ZeroTime, nil\n\t}\n\tif s == \"now\" {\n\t\treturn time.Now(), nil\n\t}\n\n\t// if s is a duration string, add it to bt(base time)\n\tif IsDuration(s) {\n\t\tdur, err := ToDuration(s)\n\t\tif err != nil {\n\t\t\treturn ZeroTime, err\n\t\t}\n\t\treturn bt.Add(dur), nil\n\t}\n\n\t// as a date string, parse it to time.Time\n\treturn ToTime(s)\n}", "title": "" }, { "docid": "1dd767c33c33a8817f3f38367c4a5705", "score": "0.5795317", "text": "func TryToString(val any, defaultAsErr bool) (str string, err error) {\n\tif val == nil {\n\t\treturn\n\t}\n\n\tswitch value := val.(type) {\n\tcase int:\n\t\tstr = strconv.Itoa(value)\n\tcase int8:\n\t\tstr = strconv.Itoa(int(value))\n\tcase int16:\n\t\tstr = strconv.Itoa(int(value))\n\tcase int32: // same as `rune`\n\t\tstr = strconv.Itoa(int(value))\n\tcase int64:\n\t\tstr = strconv.FormatInt(value, 10)\n\tcase uint:\n\t\tstr = strconv.FormatUint(uint64(value), 10)\n\tcase uint8:\n\t\tstr = strconv.FormatUint(uint64(value), 10)\n\tcase uint16:\n\t\tstr = strconv.FormatUint(uint64(value), 10)\n\tcase uint32:\n\t\tstr = strconv.FormatUint(uint64(value), 10)\n\tcase uint64:\n\t\tstr = strconv.FormatUint(value, 10)\n\tcase float32:\n\t\tstr = strconv.FormatFloat(float64(value), 'f', -1, 32)\n\tcase float64:\n\t\tstr = strconv.FormatFloat(value, 'f', -1, 64)\n\tcase time.Duration:\n\t\tstr = strconv.FormatInt(int64(value), 10)\n\tcase fmt.Stringer:\n\t\tstr = value.String()\n\tdefault:\n\t\tif defaultAsErr {\n\t\t\terr = comdef.ErrConvType\n\t\t} else {\n\t\t\tstr = fmt.Sprint(value)\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "6c8d5128fa7e1e3d259b791d9a50a565", "score": "0.572444", "text": "func FromString(value string, targetType string) (interface{}, error) {\n\tmessage := \"cast: cannot cast `%v` to type `%v`\"\n\n\tswitch targetType {\n\tcase Int:\n\t\tv, err := strconv.ParseInt(value, 0, 32)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(message, value, targetType)\n\t\t}\n\t\treturn int(v), nil\n\tcase Int8:\n\t\tv, err := strconv.ParseInt(value, 0, 8)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn int8(v), nil\n\tcase Int16:\n\t\tv, err := strconv.ParseInt(value, 0, 16)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn int16(v), nil\n\tcase Int32:\n\t\tv, err := strconv.ParseInt(value, 0, 32)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn int32(v), nil\n\tcase Int64:\n\t\tv, err := strconv.ParseInt(value, 0, 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn v, nil\n\n\tcase Uint:\n\t\tv, err := strconv.ParseUint(value, 0, 32)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn uint(v), nil\n\tcase Uint8:\n\t\tv, err := strconv.ParseUint(value, 0, 8)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn uint8(v), nil\n\tcase Uint16:\n\t\tv, err := strconv.ParseUint(value, 0, 16)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn uint16(v), nil\n\tcase Uint32:\n\t\tv, err := strconv.ParseUint(value, 0, 32)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn uint32(v), nil\n\tcase Uint64:\n\t\tv, err := strconv.ParseUint(value, 0, 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn v, nil\n\n\tcase Bool:\n\t\tv, err := strconv.ParseBool(value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn v, nil\n\n\tcase Float32:\n\t\tv, err := strconv.ParseFloat(value, 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn float32(v), nil\n\tcase Float64:\n\t\tv, err := strconv.ParseFloat(value, 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn v, nil\n\n\tcase String:\n\t\treturn value, nil\n\t}\n\n\treturn nil, fmt.Errorf(\"cast: type %v is not supported\", targetType)\n}", "title": "" }, { "docid": "705a42b92b1a72017ac08a8305aa5de6", "score": "0.5707485", "text": "func Convert(i interface{}, t string, params ...interface{}) interface{} {\n\tswitch t {\n\tcase \"int\":\n\t\treturn Int(i)\n\tcase \"int8\":\n\t\treturn Int8(i)\n\tcase \"int16\":\n\t\treturn Int16(i)\n\tcase \"int32\":\n\t\treturn Int32(i)\n\tcase \"int64\":\n\t\treturn Int64(i)\n\tcase \"uint\":\n\t\treturn Uint(i)\n\tcase \"uint8\":\n\t\treturn Uint8(i)\n\tcase \"uint16\":\n\t\treturn Uint16(i)\n\tcase \"uint32\":\n\t\treturn Uint32(i)\n\tcase \"uint64\":\n\t\treturn Uint64(i)\n\tcase \"float32\":\n\t\treturn Float32(i)\n\tcase \"float64\":\n\t\treturn Float64(i)\n\tcase \"bool\":\n\t\treturn Bool(i)\n\tcase \"string\":\n\t\treturn String(i)\n\tcase \"[]byte\":\n\t\treturn Bytes(i)\n\tcase \"[]int\":\n\t\treturn Ints(i)\n\tcase \"[]string\":\n\t\treturn Strings(i)\n\n\tcase \"Time\", \"time.Time\":\n\t\tif len(params) > 0 {\n\t\t\treturn Time(i, String(params[0]))\n\t\t}\n\t\treturn Time(i)\n\n\tcase \"gtime.Time\":\n\t\tif len(params) > 0 {\n\t\t\treturn GTime(i, String(params[0]))\n\t\t}\n\t\treturn *GTime(i)\n\n\tcase \"GTime\", \"*gtime.Time\":\n\t\tif len(params) > 0 {\n\t\t\treturn GTime(i, String(params[0]))\n\t\t}\n\t\treturn GTime(i)\n\n\tcase \"Duration\", \"time.Duration\":\n\t\treturn Duration(i)\n\tdefault:\n\t\treturn i\n\t}\n}", "title": "" }, { "docid": "0ecdd4fc145cb18a7f8aa23d301c045f", "score": "0.55791587", "text": "func ConvertString(num string, opts ...*options.ConvertOptions) (string, error) {\n\tdec, err := decimal.NewFromString(num)\n\tif err != nil {\n\t\treturn \"\", ErrInvalidNumber\n\t}\n\n\treturn cv(dec, opts...)\n}", "title": "" }, { "docid": "0b48d5e9542c3e5c10b95147e312450a", "score": "0.5568946", "text": "func stringToTime(s string) (time.Time, error) {\n\tsec, err := strconv.ParseInt(s, 10, 64)\n\tif err != nil {\n\t\treturn time.Time{}, err\n\t}\n\treturn time.Unix(sec, 0), nil\n}", "title": "" }, { "docid": "a6844a136daf9ccabe6a603b5c278e1c", "score": "0.5556548", "text": "func (s Stats) TryString(name string, defValue string, aliases ...string) string {\n\tfield := s.Get(name, aliases...)\n\tif field != nil {\n\t\tif value, ok := field.(string); ok {\n\t\t\treturn value\n\t\t}\n\t}\n\treturn defValue\n}", "title": "" }, { "docid": "44ae3f6b34bc4a6fefe3a077039a36ea", "score": "0.55535334", "text": "func convert(s string) float64 {\n\tvar x float64\n\tx, _ = strconv.ParseFloat(s, 64)\n\treturn x\n}", "title": "" }, { "docid": "363223c7931b74e5a6ec08f9bc350828", "score": "0.55398536", "text": "func StringToType(valueString string, targetType reflect.Type) (interface{}, gomerr.Gomerr) {\n\tvar value interface{}\n\tvar err error\n\n\tswitch targetType.Kind() {\n\tcase reflect.String:\n\t\tvalue = valueString\n\tcase reflect.Bool:\n\t\tvalue, err = strconv.ParseBool(valueString)\n\tcase reflect.Int:\n\t\tparsed, parseErr := strconv.ParseInt(valueString, 0, 64)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = int(parsed)\n\t\t}\n\tcase reflect.Int8:\n\t\tparsed, parseErr := strconv.ParseInt(valueString, 0, 8)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = int8(parsed)\n\t\t}\n\tcase reflect.Int16:\n\t\tparsed, parseErr := strconv.ParseInt(valueString, 0, 16)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = int16(parsed)\n\t\t}\n\tcase reflect.Int32:\n\t\tparsed, parseErr := strconv.ParseInt(valueString, 0, 32)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = int32(parsed)\n\t\t}\n\tcase reflect.Int64:\n\t\tvalue, err = strconv.ParseInt(valueString, 0, 64)\n\tcase reflect.Uint:\n\t\tparsed, parseErr := strconv.ParseUint(valueString, 0, 64)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = uint(parsed)\n\t\t}\n\tcase reflect.Uint8:\n\t\tparsed, parseErr := strconv.ParseUint(valueString, 0, 8)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = uint8(parsed)\n\t\t}\n\tcase reflect.Uint16:\n\t\tparsed, parseErr := strconv.ParseUint(valueString, 0, 16)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = uint16(parsed)\n\t\t}\n\tcase reflect.Uint32:\n\t\tparsed, parseErr := strconv.ParseUint(valueString, 0, 32)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = uint32(parsed)\n\t\t}\n\tcase reflect.Uint64:\n\t\tvalue, err = strconv.ParseUint(valueString, 0, 64)\n\tcase reflect.Uintptr:\n\t\tvalue, err = strconv.ParseUint(valueString, 0, 64)\n\tcase reflect.Float32:\n\t\tparsed, parseErr := strconv.ParseFloat(valueString, 32)\n\t\tif parseErr != nil {\n\t\t\terr = parseErr\n\t\t} else {\n\t\t\tvalue = float32(parsed)\n\t\t}\n\tcase reflect.Float64:\n\t\tvalue, err = strconv.ParseFloat(valueString, 64)\n\tcase reflect.Struct:\n\t\tif targetType == timeType {\n\t\t\tif strings.Index(valueString, \"T\") == -1 {\n\t\t\t\tvalueString = valueString + \"T00:00:00Z\"\n\t\t\t}\n\t\t\tvalue, err = time.Parse(time.RFC3339Nano, valueString)\n\t\t}\n\tcase reflect.Slice:\n\t\tif targetType == byteSliceType {\n\t\t\tvalue = []byte(valueString) // NB: To decode the bytes, use (or define) a field function (e.g. $base64Decode)\n\t\t} // Feature:p2 splitting comma separated values\n\t}\n\n\tif err != nil {\n\t\treturn nil, gomerr.Unmarshal(\"valueString\", valueString, targetType.String()).Wrap(err)\n\t}\n\n\treturn value, nil\n}", "title": "" }, { "docid": "284d62b81f367291ced5c1f4dda4d663", "score": "0.5525676", "text": "func (g GoColumnType) FromString(s string) any {\n\tswitch g {\n\tcase ColTypeUnknown:\n\t\treturn nil\n\tcase ColTypeBytes:\n\t\treturn nil\n\tcase ColTypeString:\n\t\treturn s\n\tcase ColTypeInteger:\n\t\tif s == \"\" {\n\t\t\treturn int(0)\n\t\t}\n\t\ti, _ := strconv.Atoi(s)\n\t\treturn i\n\tcase ColTypeUnsigned:\n\t\tif s == \"\" {\n\t\t\treturn uint(0)\n\t\t}\n\t\ti, _ := strconv.ParseUint(s, 10, 64)\n\t\treturn uint(i)\n\tcase ColTypeInteger64:\n\t\tif s == \"\" {\n\t\t\treturn int64(0)\n\t\t}\n\t\ti, _ := strconv.ParseInt(s, 10, 64)\n\t\treturn i\n\tcase ColTypeUnsigned64:\n\t\tif s == \"\" {\n\t\t\treturn uint64(0)\n\t\t}\n\t\ti, _ := strconv.ParseUint(s, 10, 64)\n\t\treturn i\n\tcase ColTypeTime:\n\t\tif s == \"\" {\n\t\t\treturn time.Time{}\n\t\t}\n\t\td, _ := time.Parse(config.DefaultDateTimeFormat, s)\n\t\treturn d\n\tcase ColTypeFloat32:\n\t\tif s == \"\" {\n\t\t\treturn float32(0)\n\t\t}\n\t\tf, _ := strconv.ParseFloat(s, 32)\n\t\treturn float32(f)\n\tcase ColTypeFloat64:\n\t\tif s == \"\" {\n\t\t\treturn float64(0)\n\t\t}\n\t\tf, _ := strconv.ParseFloat(s, 32)\n\t\treturn f\n\tcase ColTypeBool:\n\t\treturn s == \"true\"\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "e12abfe1ae4df21b9435fa07e508db4f", "score": "0.5441921", "text": "func recast(s string,r bool) interface{} {\n\tif r {\n\t\t// handle numeric strings ahead of boolean\n\t\tif f, err := strconv.ParseFloat(s,64); err == nil {\n\t\t\treturn interface{}(f)\n\t\t}\n\t\t// ParseBool treats \"1\"==true & \"0\"==false\n\t\tif b, err := strconv.ParseBool(s); err == nil {\n\t\t\treturn interface{}(b)\n\t\t}\n\t}\n\treturn interface{}(s)\n}", "title": "" }, { "docid": "7b706b26650aacdc6555dda7f835f086", "score": "0.54390204", "text": "func TryParseTime(ts string, layouts []string, tz *time.Location) time.Time {\n\tconst ms = 10000000000\n\tvar (\n\t\tt time.Time\n\t\terr error\n\t)\n\n\tif tz == nil {\n\t\ttz = time.UTC\n\t}\n\n\tfor _, l := range layouts {\n\t\tif l == \"U\" || l == \"UN\" {\n\t\t\tvar n int64\n\t\t\tif n, err = strconv.ParseInt(ts, 10, 64); err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif l == \"U\" {\n\t\t\t\tif n > ms { // javascript ts in ms\n\t\t\t\t\tn = n / 1000\n\t\t\t\t}\n\t\t\t\tt = time.Unix(n, 0)\n\t\t\t} else {\n\t\t\t\tt = time.Unix(0, n)\n\t\t\t}\n\t\t\treturn t.In(tz)\n\t\t}\n\n\t\tif t, err = time.ParseInLocation(l, ts, tz); err == nil {\n\t\t\treturn t\n\t\t}\n\t}\n\treturn time.Time{}\n}", "title": "" }, { "docid": "c0875bac5a536989a235535fd45ffc3a", "score": "0.541819", "text": "func ConvertStringToNumber(v reflect.Value, dest reflect.Type) (reflect.Value, error) {\n\t// Pass value through\n\tif v.Type() == dest {\n\t\treturn v, nil\n\t}\n\t// Skip this hook if source is not string\n\tif v.Kind() != reflect.String {\n\t\treturn nilValue, nil\n\t}\n\t// Convert to int, uint, float or bool\n\tswitch dest.Kind() {\n\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\tif value, err := strconv.ParseInt(v.String(), 0, 64); err == nil {\n\t\t\treturn reflect.ValueOf(value).Convert(dest), nil\n\t\t}\n\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\tif value, err := strconv.ParseUint(v.String(), 0, 64); err == nil {\n\t\t\treturn reflect.ValueOf(value).Convert(dest), nil\n\t\t}\n\tcase reflect.Float32, reflect.Float64:\n\t\tif value, err := strconv.ParseFloat(v.String(), 64); err == nil {\n\t\t\treturn reflect.ValueOf(value).Convert(dest), nil\n\t\t}\n\tcase reflect.Bool:\n\t\tif value, err := strconv.ParseBool(v.String()); err == nil {\n\t\t\treturn reflect.ValueOf(value).Convert(dest), nil\n\t\t}\n\t}\n\t// Skip\n\treturn nilValue, nil\n}", "title": "" }, { "docid": "6b0764a6a21b0eccc20a9e29e8f645dd", "score": "0.5415884", "text": "func FromString(val string) IntOrString {\n\treturn IntOrString{Type: String, StrVal: val}\n}", "title": "" }, { "docid": "d4db1ddc3cdbe235c345dfd8cd85567d", "score": "0.54096633", "text": "func ConvertString(v reflect.Value) (string, error) {\n\tif !v.IsValid() {\n\t\treturn \"\", errors.New(\"invalid value\")\n\t}\n\tif k := v.Kind(); k == reflect.Interface || k == reflect.Ptr {\n\t\tif v.IsNil() {\n\t\t\treturn \"\", errors.New(\"value is nil\")\n\t\t}\n\t}\n\tv = Elem(v)\n\tswitch v.Kind() {\n\tcase reflect.String:\n\t\treturn v.String(), nil\n\tcase reflect.Bool:\n\t\treturn fmt.Sprintf(\"%t\", v.Interface()), nil\n\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,\n\t\treflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\treturn fmt.Sprintf(\"%d\", v.Interface()), nil\n\tdefault:\n\t\tif v.Type().ConvertibleTo(stringType) {\n\t\t\tif s, ok := (v.Convert(stringType).Interface()).(string); ok {\n\t\t\t\treturn s, nil\n\t\t\t}\n\t\t}\n\t\treturn \"\", errors.Errorf(\"expected string but got %T\", v.Interface())\n\t}\n}", "title": "" }, { "docid": "7d757f39b206b571c03b090b3c22c67d", "score": "0.53587633", "text": "func CastStringBoolToTrueFalse(str string, defaultValue bool) string {\n\t// True and False values\n\tt := \"true\"\n\tf := \"false\"\n\n\tif IsStringBoolTrue(str) {\n\t\treturn t\n\t}\n\tif IsStringBoolFalse(str) {\n\t\treturn f\n\t}\n\n\t// String value unrecognized, return default value\n\n\tif defaultValue {\n\t\treturn t\n\t} else {\n\t\treturn f\n\t}\n}", "title": "" }, { "docid": "f1558812fdab756d12290c76ce3dcf96", "score": "0.5356238", "text": "func (q *responseBuilder) convert(dataType, value string) (interface{}, error) {\n\tconst (\n\t\tstringDatatype = \"string\"\n\t\ttimeDatatype = \"dateTime\"\n\t\tfloatDatatype = \"double\"\n\t\tboolDatatype = \"boolean\"\n\t\tintDatatype = \"long\"\n\t\tuintDatatype = \"unsignedLong\"\n\t\ttimeDataTypeWithFmt = \"dateTime:RFC3339\"\n\t)\n\ts := value\n\tswitch dataType {\n\tcase stringDatatype:\n\t\treturn s, nil\n\tcase timeDatatype, timeDataTypeWithFmt:\n\t\treturn time.Parse(time.RFC3339, s)\n\tcase floatDatatype:\n\t\treturn strconv.ParseFloat(s, 64)\n\tcase boolDatatype:\n\t\tif s == \"false\" {\n\t\t\treturn false, nil\n\t\t}\n\t\treturn true, nil\n\tcase intDatatype:\n\t\treturn strconv.ParseInt(s, 10, 64)\n\tcase uintDatatype:\n\t\treturn strconv.ParseUint(s, 10, 64)\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"%s has unknown data type %s\", s, dataType)\n\t}\n}", "title": "" }, { "docid": "cfad3a0b70ccbdd1ce2c467b185ff342", "score": "0.53297037", "text": "func CastStringBoolTo01(str string, defaultValue bool) string {\n\t// True and False values\n\tt := \"1\"\n\tf := \"0\"\n\n\tif IsStringBoolTrue(str) {\n\t\treturn t\n\t}\n\tif IsStringBoolFalse(str) {\n\t\treturn f\n\t}\n\n\t// String value unrecognized, return default value\n\n\tif defaultValue {\n\t\treturn t\n\t} else {\n\t\treturn f\n\t}\n}", "title": "" }, { "docid": "7027be20851d906fc670945a6697ed76", "score": "0.5302733", "text": "func FromString(s string) (time.Time, error) {\n\tif len(s) != 10 {\n\t\treturn time.Time{}, errors.New(\"wrong format\")\n\t}\n\tyear := s[:4]\n\tmonth := s[5:7]\n\tday := s[8:]\n\ty, err := strconv.Atoi(year)\n\tif err != nil {\n\t\treturn time.Time{}, errors.New(\"wrong format\")\n\t}\n\tm, err := strconv.Atoi(month)\n\tif err != nil {\n\t\treturn time.Time{}, errors.New(\"wrong format\")\n\t}\n\td, err := strconv.Atoi(day)\n\tif err != nil {\n\t\treturn time.Time{}, errors.New(\"wrong format\")\n\t}\n\tif m < 1 || m > 12 {\n\t\treturn time.Time{}, errors.New(\"wrong format\")\n\t}\n\tloc, _ := time.LoadLocation(\"\")\n\tres := time.Date(y, time.Month(m), d, 0, 0, 0, 0, loc)\n\treturn res, nil\n}", "title": "" }, { "docid": "63b87241a66887e945e76621896b1e94", "score": "0.530255", "text": "func (eC *ExternalCalls) ConvertStrToInt(str string) (int, error) {\n\treturn strconv.Atoi(str)\n}", "title": "" }, { "docid": "a8701986e68c4e4c7049ab091d9aa85b", "score": "0.5294242", "text": "func parseTs(currentLocation *time.Location, str string) interface{} {\n\tswitch str {\n\tcase \"-infinity\":\n\t\tif infinityTsEnabled {\n\t\t\treturn infinityTsNegative\n\t\t}\n\t\treturn []byte(str)\n\tcase \"infinity\":\n\t\tif infinityTsEnabled {\n\t\t\treturn infinityTsPositive\n\t\t}\n\t\treturn []byte(str)\n\t}\n\tt, err := ParseTimestamp(currentLocation, str)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "title": "" }, { "docid": "c71e167c5c61be88b69e3b1fb2ab689b", "score": "0.5293306", "text": "func FromString(s string) (Type, error) {\n\tswitch s {\n\tcase \"Any\":\n\t\treturn AnyT, nil\n\tcase \"Pointer\":\n\t\treturn PointerT, nil\n\tcase \"Boolean\":\n\t\treturn BooleanT, nil\n\tcase \"Integer\":\n\t\treturn IntegerT, nil\n\tcase \"ByteString\":\n\t\treturn ByteArrayT, nil\n\tcase \"Buffer\":\n\t\treturn BufferT, nil\n\tcase \"Array\":\n\t\treturn ArrayT, nil\n\tcase \"Struct\":\n\t\treturn StructT, nil\n\tcase \"Map\":\n\t\treturn MapT, nil\n\tcase \"Interop\":\n\t\treturn InteropT, nil\n\tdefault:\n\t\treturn 0xFF, ErrInvalidType\n\t}\n}", "title": "" }, { "docid": "694a6fa9aa4c88522eb29e4fd1d9eb82", "score": "0.5285294", "text": "func parseTime(s string) (time.Time, error) {\n\t// attempt to parse time as RFC3339 string\n\tt, err := time.Parse(time.RFC3339Nano, s)\n\tif err == nil {\n\t\treturn t, nil\n\t}\n\n\t// attempt to parse time as float number of unix seconds\n\tif f, err := strconv.ParseFloat(s, 64); err == nil {\n\t\tsec, dec := math.Modf(f)\n\t\treturn time.Unix(int64(sec), int64(dec*(1e9))), nil\n\t}\n\n\t// attempt to parse time as json marshaled value\n\tif err := json.Unmarshal([]byte(s), &t); err == nil {\n\t\treturn t, nil\n\t}\n\n\treturn time.Time{}, err\n}", "title": "" }, { "docid": "0b2af4ae8aaf62ed6f9cd5fd3b24c95b", "score": "0.52567387", "text": "func convertDate(input string) (strfmt.Date, error) {\n\ttemp, err := formats.Parse(\"date\", input)\n\tif err != nil {\n\t\treturn strfmt.Date{}, err\n\t}\n\treturn *temp.(*strfmt.Date), nil\n}", "title": "" }, { "docid": "ab466b19a7d1b2a40c4f72475fc4fb6f", "score": "0.519907", "text": "func FromStringOrDefault(s string, defaultVal int64) int64 {\n\n\tv, err := strconv.ParseInt(s, 10, 64)\n\n\tif err != nil {\n\t\treturn defaultVal\n\t}\n\treturn int64(v)\n}", "title": "" }, { "docid": "e22b769cfa6c88a45c499863f17b5dae", "score": "0.5181431", "text": "func StringToTime(layout, s string) (time.Time, error) {\n\treturn time.Parse(layout, s)\n}", "title": "" }, { "docid": "d2ae6a85c51323884b40591b4cd79f36", "score": "0.51778185", "text": "func Parse(timeStr string) (time.Time, error) {\n\terr := errors.New(\"provided string is not W3C-DTF format\")\n\n\tif !strings.Contains(timeStr, \"T\") {\n\t\tswitch true {\n\t\tcase IsYear(timeStr):\n\t\t\treturn ParseYear(timeStr)\n\t\tcase IsYearAndMonth(timeStr):\n\t\t\treturn ParseYearAndMonth(timeStr)\n\t\tcase IsCompleteDate(timeStr):\n\t\t\treturn ParseCompleteDate(timeStr)\n\t\tdefault:\n\t\t\treturn time.Time{}, err\n\t\t}\n\t} else {\n\t\tswitch true {\n\t\tcase IsCompleteDateWithMinutes(timeStr):\n\t\t\treturn ParseCompleteDateWithMinutes(timeStr)\n\t\tcase IsCompleteDateWithSeconds(timeStr):\n\t\t\treturn ParseCompleteDateWithSeconds(timeStr)\n\t\tcase IsCompleteDateWithFractionOfSecond(timeStr):\n\t\t\treturn ParseCompleteDateWithFractionOfSecond(timeStr)\n\t\tdefault:\n\t\t\treturn time.Time{}, err\n\t\t}\n\t}\n}", "title": "" }, { "docid": "068cb9c5534b938b668d2cc239bb4ca4", "score": "0.5169228", "text": "func ConvStrToDate(date string) (time.Time, error) {\n\tmyDate, err := time.Parse(\"2006-01-02 15:04:05\", date)\n\tif err != nil {\n\t\treturn time.Now(), ErrorCustom(err.Error(), \"Error al convertir la fecha\")\n\t}\n\treturn myDate, nil\n}", "title": "" }, { "docid": "f7b4cbe478b595da416a91580beb989a", "score": "0.5126754", "text": "func timeConversion(s string) string {\n // 12p > 12, 1p > 13, 2p > 14, 11a > 11\n // if am and hour is 12, use 0\n // if pm and hour is 12, don't add 12\n bs := []byte(s)\n am := s[len(s)-2] == 'A'\n if am && bs[0] == '1' && bs[1] == '2' {\n bs[0] = '0'\n bs[1] = '0'\n } else if !am && (bs[0] != '1' || bs[1] != '2') {\n bs[0]++\n if x := bs[1] + 2; x <= '9' {\n bs[1] = x\n } else {\n bs[1] = x - 10\n bs[0]++\n }\n }\n return string(bs[:len(bs)-2])\n}", "title": "" }, { "docid": "59ec7c4999bb95cf755f0368c340186e", "score": "0.5123091", "text": "func stringToTimeHookFunc(layout string) mapstructure.DecodeHookFuncType {\n\treturn func(f reflect.Type, t reflect.Type, data any) (any, error) {\n\t\tif f.Kind() != reflect.String {\n\t\t\treturn data, nil\n\t\t}\n\t\tif t != reflect.TypeOf(time.Time{}) {\n\t\t\treturn data, nil\n\t\t}\n\t\treturn time.Parse(layout, data.(string))\n\t}\n}", "title": "" }, { "docid": "62686b8590fbda50902aaaaa24d6fed6", "score": "0.5115761", "text": "func FromString(s string) *string { return &s }", "title": "" }, { "docid": "00c18a0f1921241b8628b6a73215f69e", "score": "0.5105658", "text": "func (s Info) TryString(name string, defValue string, aliases ...string) string {\n\tfield := s.Get(name, aliases...)\n\tif field != nil {\n\t\treturn field.(string)\n\t}\n\treturn defValue\n}", "title": "" }, { "docid": "f647641f6857eab2314425157f112b44", "score": "0.5098433", "text": "func StringToTime(s string) (time.Time, error) {\n\tloc, _ := time.LoadLocation(\"Local\")\n\tt, err := time.ParseInLocation(\"2006-1-2 15:04:05\", s, loc)\n\treturn t, err\n}", "title": "" }, { "docid": "e4cc9ef82845a31890bd91f614738371", "score": "0.50977093", "text": "func StringToTime1(s string) (time.Time, error) {\n\tloc, _ := time.LoadLocation(\"Local\")\n\tt, err := time.ParseInLocation(\"2006-01-02T15:04:05+08:00\", s, loc)\n\treturn t, err\n}", "title": "" }, { "docid": "52088dd69372691449763d08c4d857f4", "score": "0.5090398", "text": "func FromString(val string) IntOrString {\n\treturn IntOrString{intstr.FromString(val)}\n}", "title": "" }, { "docid": "8ee0026403da123593314945a25ee5a9", "score": "0.50705767", "text": "func convertBasicType(val string, typ FieldType) (Value, error) {\n\tswitch typ {\n\tcase StringFieldType:\n\t\treturn val, nil\n\tcase IntegerFieldType:\n\t\treturn strconv.Atoi(val)\n\tcase FloatFieldType:\n\t\treturn strconv.ParseFloat(val, 64)\n\tcase BooleanFieldType:\n\t\treturn strconv.ParseBool(val)\n\tcase TimestampFieldType:\n\t\tf, err := strconv.ParseFloat(val, 64)\n\t\treturn Value(time.Unix(0, int64(f*1e9))), err\n\tdefault:\n\t\treturn nil, errors.New(\"unrecognized type\")\n\t}\n}", "title": "" }, { "docid": "5ae49819d251f77912f497763f42a877", "score": "0.50684786", "text": "func (t *Timestamp) str2time(s string) error {\n\tts, err := strconv.ParseInt(s, 10, 64)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*t = Timestamp(time.Unix(ts, 0))\n\treturn nil\n}", "title": "" }, { "docid": "7a5879d285211be0d312e95df1332958", "score": "0.5057275", "text": "func testTime(s string) time.Time {\n\tt, _ := time.Parse(time.RFC3339, s)\n\treturn t\n}", "title": "" }, { "docid": "6cc5ec99a7ce758a32274d8b867a2ed2", "score": "0.5049986", "text": "func FromStringOrDefault(s string, defaultVal int32) int32 {\n\n\tv, err := strconv.ParseInt(s, 10, 32)\n\n\tif err != nil {\n\t\treturn defaultVal\n\t}\n\treturn int32(v)\n}", "title": "" }, { "docid": "ae2468f0f543d1e348611133ee8baa5f", "score": "0.50432384", "text": "func StringToNullTime(layout, s string) (sql.NullTime, error) {\n\tif s == \"\" {\n\t\treturn sql.NullTime{}, nil\n\t}\n\n\tv, err := time.Parse(layout, s)\n\tif err != nil {\n\t\treturn sql.NullTime{}, err\n\t}\n\n\treturn sql.NullTime{\n\t\tTime: v,\n\t\tValid: true,\n\t}, nil\n}", "title": "" }, { "docid": "89cd9847da0f5e50a1692ac08a54fc7c", "score": "0.50323606", "text": "func MustStringToBool(value string) bool {\n\n\t// Parse float\n\tret, err := StringToBool(value)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn ret\n}", "title": "" }, { "docid": "6d9ca67a65fc946f85c2569e2a6ab2fb", "score": "0.50321406", "text": "func FormatFromString(s string) Format {\n\tswitch strings.ToLower(s) {\n\tcase \"json\", \"j\":\n\t\treturn JSON\n\tcase \"yaml\", \"y\":\n\t\treturn YAML\n\tdefault:\n\t\treturn Auto\n\t}\n}", "title": "" }, { "docid": "4e2466c1a60120efd6c19ca3c35f013c", "score": "0.5027404", "text": "func ConvStrADate(date string) (time.Time, error) {\n\tdateTimeOne, errS := time.Parse(\"2006/01/02\", date)\n\tdateTimeTwo, errE := time.Parse(\"2006-01-02\", date)\n\tif errS != nil {\n\t\treturn dateTimeOne, errS\n\t}\n\n\tif errE != nil {\n\t\treturn dateTimeTwo, errE\n\t}\n\treturn dateTimeTwo, nil\n}", "title": "" }, { "docid": "e85a679956c07993332e899cba6adef4", "score": "0.50233704", "text": "func StringToBool(s string) bool {\n\tvar m = map[string]bool{\n\t\t\"true\": true,\n\t\t\"True\": true,\n\t\t\"TRUE\": true,\n\t\t\"false\": false,\n\t\t\"False\": false,\n\t\t\"FALSE\": false,\n\t\t\"yes\": true,\n\t\t\"Yes\": true,\n\t\t\"YES\": true,\n\t\t\"no\": false,\n\t\t\"No\": false,\n\t\t\"NO\": false,\n\t}\n\treturn m[s]\n}", "title": "" }, { "docid": "1a46b8ae39878ff78d55fa1c152d093d", "score": "0.50223595", "text": "func (o _String) Try(v string, err error) string {\n\tCheck(err)\n\treturn v\n}", "title": "" }, { "docid": "4f61584eadf47e3fbaa030dd1e2f1c81", "score": "0.5021312", "text": "func stringToType(s string) (uint16, error) {\n\tif s == \"\" {\n\t\treturn 0, nil\n\t}\n\tfor k, v := range dns.TypeToString {\n\t\tif v == s {\n\t\t\treturn k, nil\n\t\t}\n\t}\n\treturn 0, fmt.Errorf(\"unknown type '%s'\", s)\n}", "title": "" }, { "docid": "496f65486dea53a5093e97c3e3fdddb8", "score": "0.5019703", "text": "func FromProtobufStringValue(str *wrappers.StringValue) *string {\n\tif str == nil {\n\t\treturn nil\n\t}\n\treturn &str.Value\n}", "title": "" }, { "docid": "54f5ae5235fe9db4ce6774155325f7f3", "score": "0.50190353", "text": "func ParseString(s string) (*Results, error) {\n\treturn Parse(strings.NewReader(s))\n}", "title": "" }, { "docid": "d25be4cd08a5e9ef6cf9fc6a28b7d745", "score": "0.50114805", "text": "func ParseValueFromString(val string) (Value, error) {\n\tc := val[0]\n\tswitch {\n\t// A float value\n\tcase '0' <= c && c <= '9':\n\t\tf, err := strconv.ParseFloat(val, 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn NewFloat(f), nil\n\n\t// A map value\n\tcase c == 'm':\n\t\tif !strings.HasPrefix(val, \"map\") {\n\t\t\tbreak\n\t\t}\n\t\treturn ParseMapFromString(val)\n\n\t// A string value\n\tcase c == '\"':\n\t\treturn NewString(strings.Trim(val, \"\\\"\")), nil\n\n\t// A distribution value\n\tcase c == 'd':\n\t\tif !strings.HasPrefix(val, \"dist\") {\n\t\t\tbreak\n\t\t}\n\t\tdistVal, err := ParseDistFromString(val)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn distVal, nil\n\t}\n\n\treturn nil, errors.New(\"unknown value type\")\n}", "title": "" }, { "docid": "554630af2f34e24b9aaf222142967230", "score": "0.50072515", "text": "func DateTimeFromString(sDate string) *time.Time {\r\n\t// fmt.Println(sDate)\r\n\tif strings.Compare(\"null\", sDate) == 0 {\r\n\t\treturn nil\r\n\t}\r\n\tt, _ := time.Parse(\"2006-01-02 15:04:05.999\", sDate)\r\n\t// fmt.Println(err)\r\n\treturn &t\r\n}", "title": "" }, { "docid": "51ab5d56e94aa56e7806ab54b43d6dc0", "score": "0.5002593", "text": "func ReflectStringToField(o reflect.Value, v string, timeFormat string) error {\n\tswitch o.Kind() {\n\tcase reflect.String:\n\t\to.SetString(v)\n\tcase reflect.Bool:\n\t\tb, _ := ParseBool(v)\n\t\to.SetBool(b)\n\tcase reflect.Int8:\n\t\tfallthrough\n\tcase reflect.Int16:\n\t\tfallthrough\n\tcase reflect.Int:\n\t\tfallthrough\n\tcase reflect.Int32:\n\t\tfallthrough\n\tcase reflect.Int64:\n\t\ti64, _ := ParseInt64(v)\n\t\tif !o.OverflowInt(i64) {\n\t\t\to.SetInt(i64)\n\t\t}\n\tcase reflect.Float32:\n\t\tfallthrough\n\tcase reflect.Float64:\n\t\tf64, _ := ParseFloat64(v)\n\t\tif !o.OverflowFloat(f64) {\n\t\t\to.SetFloat(f64)\n\t\t}\n\tcase reflect.Uint8:\n\t\tfallthrough\n\tcase reflect.Uint16:\n\t\tfallthrough\n\tcase reflect.Uint:\n\t\tfallthrough\n\tcase reflect.Uint32:\n\t\tfallthrough\n\tcase reflect.Uint64:\n\t\tui64 := StrToUint64(v)\n\t\tif !o.OverflowUint(ui64) {\n\t\t\to.SetUint(ui64)\n\t\t}\n\tcase reflect.Ptr:\n\t\tif o.IsZero() || o.IsNil() {\n\t\t\t// create object\n\t\t\tbaseType, _, _ := DerefPointersZero(o)\n\t\t\to.Set(reflect.New(baseType.Type()))\n\t\t}\n\n\t\to2 := o.Elem()\n\n\t\tif o.IsZero() {\n\t\t\treturn nil\n\t\t}\n\n\t\tswitch o2.Interface().(type) {\n\t\tcase int:\n\t\t\ti64, _ := ParseInt64(v)\n\t\t\tif !o2.OverflowInt(i64) {\n\t\t\t\to2.SetInt(i64)\n\t\t\t}\n\t\tcase int8:\n\t\t\ti64, _ := ParseInt64(v)\n\t\t\tif !o2.OverflowInt(i64) {\n\t\t\t\to2.SetInt(i64)\n\t\t\t}\n\t\tcase int16:\n\t\t\ti64, _ := ParseInt64(v)\n\t\t\tif !o2.OverflowInt(i64) {\n\t\t\t\to2.SetInt(i64)\n\t\t\t}\n\t\tcase int32:\n\t\t\ti64, _ := ParseInt64(v)\n\t\t\tif !o2.OverflowInt(i64) {\n\t\t\t\to2.SetInt(i64)\n\t\t\t}\n\t\tcase int64:\n\t\t\ti64, _ := ParseInt64(v)\n\t\t\tif !o2.OverflowInt(i64) {\n\t\t\t\to2.SetInt(i64)\n\t\t\t}\n\t\tcase float32:\n\t\t\tf64, _ := ParseFloat64(v)\n\t\t\tif !o2.OverflowFloat(f64) {\n\t\t\t\to2.SetFloat(f64)\n\t\t\t}\n\t\tcase float64:\n\t\t\tf64, _ := ParseFloat64(v)\n\t\t\tif !o2.OverflowFloat(f64) {\n\t\t\t\to2.SetFloat(f64)\n\t\t\t}\n\t\tcase uint:\n\t\t\tif !o2.OverflowUint(StrToUint64(v)) {\n\t\t\t\to2.SetUint(StrToUint64(v))\n\t\t\t}\n\t\tcase uint64:\n\t\t\tif !o2.OverflowUint(StrToUint64(v)) {\n\t\t\t\to2.SetUint(StrToUint64(v))\n\t\t\t}\n\t\tcase string:\n\t\t\to2.SetString(v)\n\t\tcase bool:\n\t\t\tb, _ := ParseBool(v)\n\t\t\to2.SetBool(b)\n\t\tcase time.Time:\n\t\t\tif LenTrim(timeFormat) == 0 {\n\t\t\t\to2.Set(reflect.ValueOf(ParseDate(v)))\n\t\t\t} else {\n\t\t\t\to2.Set(reflect.ValueOf(ParseDateTimeCustom(v, timeFormat)))\n\t\t\t}\n\t\tdefault:\n\t\t\treturn fmt.Errorf(o2.Type().Name() + \" Unhandled [1]\")\n\t\t}\n\tdefault:\n\t\tswitch o.Interface().(type) {\n\t\tcase sql.NullString:\n\t\t\to.Set(reflect.ValueOf(sql.NullString{String: v, Valid: true}))\n\t\tcase sql.NullBool:\n\t\t\tb, _ := ParseBool(v)\n\t\t\to.Set(reflect.ValueOf(sql.NullBool{Bool: b, Valid: true}))\n\t\tcase sql.NullFloat64:\n\t\t\tf64, _ := ParseFloat64(v)\n\t\t\to.Set(reflect.ValueOf(sql.NullFloat64{Float64: f64, Valid: true}))\n\t\tcase sql.NullInt32:\n\t\t\ti32, _ := ParseInt32(v)\n\t\t\to.Set(reflect.ValueOf(sql.NullInt32{Int32: int32(i32), Valid: true}))\n\t\tcase sql.NullInt64:\n\t\t\ti64, _ := ParseInt64(v)\n\t\t\to.Set(reflect.ValueOf(sql.NullInt64{Int64: i64, Valid: true}))\n\t\tcase sql.NullTime:\n\t\t\tvar tv time.Time\n\n\t\t\tif LenTrim(timeFormat) == 0 {\n\t\t\t\ttv = ParseDateTime(v)\n\t\t\t} else {\n\t\t\t\ttv = ParseDateTimeCustom(v, timeFormat)\n\t\t\t}\n\n\t\t\to.Set(reflect.ValueOf(sql.NullTime{Time: tv, Valid: true}))\n\t\tcase time.Time:\n\t\t\tif LenTrim(timeFormat) == 0 {\n\t\t\t\to.Set(reflect.ValueOf(ParseDateTime(v)))\n\t\t\t} else {\n\t\t\t\to.Set(reflect.ValueOf(ParseDateTimeCustom(v, timeFormat)))\n\t\t\t}\n\t\tcase nil:\n\t\t\treturn nil\n\t\tdefault:\n\t\t\treturn fmt.Errorf(o.Type().Name() + \" Unhandled [2]\")\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "4056840aa46475e7f61a87021ee776ea", "score": "0.499357", "text": "func (c TypeConverter) Convert(val string) (i interface{}, err error) {\n\tif i, err = c.Bool(val); err == nil {\n\t\treturn i, nil\n\t}\n\n\tfor _, convert := range c.Funcs {\n\t\tif i, err = convert(val); err == nil {\n\t\t\treturn i, nil\n\t\t}\n\t}\n\n\treturn nil, ErrNoMatch\n}", "title": "" }, { "docid": "dd77a2474f40fd5555365a9a97040d72", "score": "0.4989054", "text": "func convertDateTime(input string) (strfmt.DateTime, error) {\n\ttemp, err := formats.Parse(\"date-time\", input)\n\tif err != nil {\n\t\treturn strfmt.DateTime{}, err\n\t}\n\treturn *temp.(*strfmt.DateTime), nil\n}", "title": "" }, { "docid": "268bb4a953c072ca55d548344140f2db", "score": "0.49887654", "text": "func convertValue(value interface{}, targetType reflect.Type) (interface{}, error) {\n\tif value == nil {\n\t\treturn reflect.Zero(targetType).Interface(), nil\n\t}\n\n\tvalueType := reflect.TypeOf(value)\n\tif valueType.AssignableTo(targetType) {\n\t\treturn value, nil\n\t} else if targetType.Name() == \"string\" {\n\t\treturn fmt.Sprintf(\"%v\", value), nil\n\t}\n\tswitch v := value.(type) {\n\tcase string:\n\t\ttarget := reflect.New(targetType).Interface()\n\t\tswitch t := target.(type) {\n\t\tcase *int:\n\t\t\treturn strconv.Atoi(v)\n\t\tcase *bool:\n\t\t\treturn strconv.ParseBool(v)\n\t\tcase encoding.TextUnmarshaler:\n\t\t\terr := t.UnmarshalText([]byte(v))\n\t\t\t// target should have a pointer receiver to be able to change itself based on text\n\t\t\treturn reflect.ValueOf(target).Elem().Interface(), err\n\t\t}\n\t}\n\treturn nil, fmt.Errorf(\"can't convert %v to %v\", reflect.TypeOf(value).String(), targetType)\n}", "title": "" }, { "docid": "95d5db8028af3f31bb931da42cbbde6f", "score": "0.49873248", "text": "func stringToInterfaceBasic(paramType string, value interface{}) (interface{}, error) {\n\tswitch paramType {\n\tcase \"string\":\n\t\treturn value, nil\n\tcase \"int\":\n\t\treturn int(value.(float64)), nil\n\tcase \"int8\":\n\t\treturn int8(value.(float64)), nil\n\tcase \"int16\":\n\t\treturn int16(value.(float64)), nil\n\tcase \"int32\":\n\t\treturn int32(value.(float64)), nil\n\tcase \"int40\", \"int48\", \"int56\":\n\t\tdata := int64(value.(float64))\n\t\tv, success := new(big.Int).SetString(strconv.FormatInt(data, 10), 10)\n\t\tif !success {\n\t\t\terr := fmt.Errorf(\"parse type \" + paramType + \" to big.int failed.\")\n\t\t\treturn value, err\n\t\t}\n\t\treturn v, nil\n\tcase \"int64\":\n\t\tdata := int64(value.(float64))\n\t\treturn data, nil\n\tcase \"int72\", \"int80\", \"int88\", \"int96\", \"int104\", \"int112\", \"int120\", \"int128\", \"int136\", \"int144\", \"int152\", \"int160\", \"int168\", \"int176\", \"int184\", \"int192\", \"int200\", \"int208\", \"int216\", \"int224\", \"int232\", \"int240\", \"int248\", \"int256\":\n\t\tv, success := new(big.Int).SetString(value.(string), 10)\n\t\tif !success {\n\t\t\terr := fmt.Errorf(\"parse type \" + paramType + \" to big.int failed.\")\n\t\t\treturn value, err\n\t\t}\n\t\treturn v, nil\n\tcase \"uint\":\n\t\treturn uint(value.(float64)), nil\n\tcase \"uint8\":\n\t\treturn uint8(value.(float64)), nil\n\tcase \"uint16\":\n\t\treturn uint16(value.(float64)), nil\n\tcase \"uint32\":\n\t\treturn uint32(value.(float64)), nil\n\tcase \"uint40\", \"uint48\", \"uint56\":\n\t\tdata := uint64(value.(float64))\n\t\tv, success := new(big.Int).SetString(strconv.FormatUint(data, 10), 10)\n\t\tif !success {\n\t\t\terr := fmt.Errorf(\"parse type \" + paramType + \" to big.int failed.\")\n\t\t\treturn value, err\n\t\t}\n\t\treturn v, nil\n\tcase \"uint64\":\n\t\tdata := uint64(value.(float64))\n\t\treturn data, nil\n\tcase \"uint72\", \"uint80\", \"uint88\", \"uint96\", \"uint104\", \"uint112\", \"uint120\", \"uint128\", \"uint136\", \"uint144\", \"uint152\", \"uint160\", \"uint168\", \"uint176\", \"uint184\", \"uint192\", \"uint200\", \"uint208\", \"uint216\", \"uint224\", \"uint232\", \"uint240\", \"uint248\", \"uint256\":\n\t\tv, success := new(big.Int).SetString(value.(string), 10)\n\t\tif !success {\n\t\t\terr := fmt.Errorf(\"parse type \" + paramType + \" to big.int failed.\")\n\t\t\treturn value, err\n\t\t}\n\t\treturn v, nil\n\tcase \"bool\":\n\t\treturn value.(bool), nil\n\tcase \"[]byte\", \"bytes\":\n\t\tbyteValue := common.FromHex(value.(string))\n\t\tresult := make([]byte, len(byteValue))\n\t\tcopy(result[:], byteValue)\n\t\treturn result, nil\n\tcase \"bytes1\", \"bytes2\", \"bytes3\", \"bytes4\", \"bytes5\", \"bytes6\", \"bytes7\", \"bytes8\", \"bytes9\", \"bytes10\", \"bytes11\", \"bytes12\", \"bytes13\", \"bytes14\", \"bytes15\", \"bytes16\", \"bytes17\", \"bytes18\", \"bytes19\", \"bytes20\", \"bytes21\", \"bytes22\", \"bytes23\", \"bytes24\", \"bytes25\", \"bytes26\", \"bytes27\", \"bytes28\", \"bytes29\", \"bytes30\", \"bytes31\", \"bytes32\":\n\t\tlength, err := strconv.ParseInt(paramType[5:], 10, 8)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tbyteValue := common.FromHex(value.(string))\n\t\tif len(byteValue) != int(length) {\n\t\t\terr := fmt.Errorf(\"length not match for type (\" + paramType + \"), value: \" + value.(string) + \", acture length: \" + strconv.FormatInt(int64(len(byteValue)), 10))\n\t\t\treturn value, err\n\t\t}\n\t\tresult := make([]byte, length)\n\t\tcopy(result[:], byteValue)\n\t\treturn mustByteSliceToArray(reflect.ValueOf(result)).Interface(), nil\n\tcase \"address\":\n\t\tresult := common.HexToAddress(value.(string))\n\t\treturn result, nil\n\tdefault:\n\t\terr := fmt.Errorf(\"unsupport interface type (\" + paramType + \")\")\n\t\treturn value, err\n\t}\n}", "title": "" }, { "docid": "f8fb2e3665bfe734b0a201170d633941", "score": "0.49758652", "text": "func castType(m string) string {\n\tswitch m {\n\t// TODO: add more cases\n\tcase \"varchar\":\n\t\treturn \"string\"\n\tcase \"int\":\n\t\treturn \"int64\"\n\tcase \"tinyint\":\n\t\treturn \"bool\"\n\tdefault:\n\t\t// TODO: change this to panic after all cases are set\n\t\tlog.Print(\"Unknown type: \", m)\n\t\treturn \"?\"\n\t}\n}", "title": "" }, { "docid": "e89075917f629713ac876ca57dfaf569", "score": "0.4964366", "text": "func (s *SyncStats) TryString(name string, defValue string, aliases ...string) string {\n\ts.mutex.RLock()\n\tdefer s.mutex.RUnlock()\n\n\treturn s._Stats.TryString(name, defValue, aliases...)\n}", "title": "" }, { "docid": "e90a5c6c801f00e552d53f7380300bca", "score": "0.49598455", "text": "func Convert(src string, kind reflect.Kind) (retVal interface{}) {\n\tswitch kind {\n\tcase reflect.Slice:\n\t\tretVal = strings.SplitN(src, \",\", -1)\n\tcase reflect.String:\n\t\tretVal = src\n\tcase reflect.Int:\n\t\tval, err := strconv.ParseInt(src, 10, 32)\n\t\tif err == nil {\n\t\t\tretVal = int(val)\n\t\t} else {\n\t\t\tretVal = int(0)\n\t\t}\n\tcase reflect.Int8:\n\t\tval, err := strconv.ParseInt(src, 10, 8)\n\t\tif err == nil {\n\t\t\tretVal = int8(val)\n\t\t} else {\n\t\t\tretVal = int8(0)\n\t\t}\n\n\tcase reflect.Int16:\n\t\tval, err := strconv.ParseInt(src, 10, 16)\n\t\tif err == nil {\n\t\t\tretVal = int16(val)\n\t\t} else {\n\t\t\tretVal = int16(0)\n\t\t}\n\n\tcase reflect.Int32:\n\t\tval, err := strconv.ParseInt(src, 10, 32)\n\t\tif err == nil {\n\t\t\tretVal = int32(val)\n\t\t} else {\n\t\t\tretVal = int32(0)\n\t\t}\n\n\tcase reflect.Int64:\n\t\tval, err := strconv.ParseInt(src, 10, 64)\n\t\tif err == nil {\n\t\t\tretVal = int64(val)\n\t\t} else {\n\t\t\tretVal = int64(0)\n\t\t}\n\n\tcase reflect.Uint:\n\t\tval, err := strconv.ParseInt(src, 10, 32)\n\t\tif err == nil {\n\t\t\tretVal = uint(val)\n\t\t} else {\n\t\t\tretVal = uint(0)\n\t\t}\n\tcase reflect.Uint8:\n\t\tval, err := strconv.ParseInt(src, 10, 8)\n\t\tif err == nil {\n\t\t\tretVal = uint8(val)\n\t\t} else {\n\t\t\tretVal = uint8(0)\n\t\t}\n\n\tcase reflect.Uint16:\n\t\tval, err := strconv.ParseInt(src, 10, 16)\n\t\tif err == nil {\n\t\t\tretVal = uint16(val)\n\t\t} else {\n\t\t\tretVal = uint16(0)\n\t\t}\n\n\tcase reflect.Uint32:\n\t\tval, err := strconv.ParseInt(src, 10, 32)\n\t\tif err == nil {\n\t\t\tretVal = uint32(val)\n\t\t} else {\n\t\t\tretVal = uint32(0)\n\t\t}\n\n\tcase reflect.Uint64:\n\t\tval, err := strconv.ParseInt(src, 10, 64)\n\t\tif err == nil {\n\t\t\tretVal = uint64(val)\n\t\t} else {\n\t\t\tretVal = uint64(0)\n\t\t}\n\tcase reflect.Float32:\n\t\tval, err := strconv.ParseFloat(src, 32)\n\t\tif err == nil {\n\t\t\tretVal = float32(val)\n\t\t} else {\n\t\t\tretVal = float32(0.0)\n\t\t}\n\tcase reflect.Float64:\n\t\tval, err := strconv.ParseFloat(src, 64)\n\t\tif err == nil {\n\t\t\tretVal = val\n\t\t} else {\n\t\t\tretVal = float64(0.0)\n\t\t}\n\tcase reflect.Bool:\n\t\tval, err := strconv.ParseBool(src)\n\t\tif err == nil {\n\t\t\tretVal = val\n\t\t} else {\n\t\t\tretVal = false\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "f75b73248c4c6b25341c20807c32fa19", "score": "0.49558052", "text": "func TimeStringToTimestamp(timeStr string) (time.Time, bool) {\n\tlayout := \"2006-01-02 15:04:05\" // 정해진 layout --> 변경하면 안 된다.\n\tt, err := time.Parse(layout, timeStr)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn t, false\n\t}\n\treturn t, true\n}", "title": "" }, { "docid": "534233fad815d36622ae9ed83bb97679", "score": "0.49551252", "text": "func FromString(s string) *StringOrConfig {\n\treturn &StringOrConfig{\n\t\tType: String,\n\t\tStrVal: s,\n\t}\n}", "title": "" }, { "docid": "cf77e5f98583d2b206e3d771153e6e83", "score": "0.49481386", "text": "func ConvertToSeconds(s string) (seconds float64, err error) {\n\ts = strings.TrimSpace(s)\n\tparts := strings.Split(s, \":\")\n\tmultipliers := []float64{60 * 60, 60, 1}\n\tif len(parts) == 2 {\n\t\tmultipliers = []float64{60, 1, 1}\n\t} else if len(parts) == 1 {\n\t\tmultipliers = []float64{1, 1, 1}\n\t}\n\tfor i, part := range parts {\n\t\tvar partf float64\n\t\tpartf, err = strconv.ParseFloat(part, 64)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\tseconds += partf * multipliers[i]\n\t}\n\treturn\n}", "title": "" }, { "docid": "6277d51399731fa62c9f556cd30777f2", "score": "0.49364063", "text": "func (x *MatchType) FromString(s string) bool {\n\ti, ok := MatchType_value[s]\n\tif ok {\n\t\t*x = MatchType(i)\n\t}\n\n\treturn ok\n}", "title": "" }, { "docid": "384a8aa35d0e023c2e06692f7b8d93b7", "score": "0.49359185", "text": "func stringToInt(ss string) (int64, error) {\n\tif ss == \"\" {\n\t\treturn 0, nil\n\t}\n\tif ss == \"-0\" {\n\t\treturn 0, strconv.ErrSyntax\n\t}\n\tif len(ss) > 2 {\n\t\tswitch ss[:2] {\n\t\tcase \"0x\", \"0X\":\n\t\t\treturn strconv.ParseInt(ss[2:], 16, 64)\n\t\tcase \"0b\", \"0B\":\n\t\t\treturn strconv.ParseInt(ss[2:], 2, 64)\n\t\tcase \"0o\", \"0O\":\n\t\t\treturn strconv.ParseInt(ss[2:], 8, 64)\n\t\t}\n\t}\n\treturn strconv.ParseInt(ss, 10, 64)\n}", "title": "" }, { "docid": "1a1fb764c554efc3820dfe8bb164fe22", "score": "0.4934256", "text": "func String() (convert ConvertFunc) {\n\treturn func(val string) (i interface{}, err error) {\n\t\treturn val, nil\n\t}\n}", "title": "" }, { "docid": "d5d91cc7badafb6243c3439f3e67d034", "score": "0.49289435", "text": "func ConvertString(from string, to string, src string) string {\n\treturn string(Convert(from, to, []byte(src)))\n}", "title": "" }, { "docid": "ba89c72644c9b4a13926b4e073030de2", "score": "0.4927009", "text": "func Str2DateP(s string) *time.Time {\n\tif s != \"\" {\n\t\td, _ := time.Parse(\"2006-01-02\", s)\n\t\treturn &d\n\t} else {\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "aa3a889566fe524a937e8c40c763e387", "score": "0.49226967", "text": "func ValueFromString(str string, dataType DataType) (val DataValue, err error) {\n\tval.DataType = dataType\n\n\tif len(str) == 0 || str == \"null\" {\n\t\treturn\n\t}\n\n\tvar b bool\n\tvar i int64\n\tvar f float64\n\tvar ui uint64\n\n\tswitch dataType {\n\tcase Bool:\n\t\tval.IsBool = true\n\t\tb, err = strconv.ParseBool(str)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\t\tval.Valid = true\n\t\tval.BoolVal = b\n\t\treturn\n\tcase Int8:\n\t\ti, err = strconv.ParseInt(str, 10, 8)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\n\t\t// We need to convert it from i64 to i8 since strconv.ParseXXX\n\t\t// always returns the largest bit size value.\n\t\ti8 := int8(i)\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&i8)\n\t\treturn\n\tcase Uint8, SmallEnum:\n\t\tui, err = strconv.ParseUint(str, 10, 8)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\t\tui8 := uint8(ui)\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&ui8)\n\t\treturn\n\tcase Int16:\n\t\ti, err = strconv.ParseInt(str, 10, 16)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\t\ti16 := int16(i)\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&i16)\n\t\treturn\n\tcase Uint16, BigEnum:\n\t\tui, err = strconv.ParseUint(str, 10, 16)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\t\tui16 := uint16(ui)\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&ui16)\n\t\treturn\n\tcase Int32:\n\t\ti, err = strconv.ParseInt(str, 10, 32)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\t\ti32 := int32(i)\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&i32)\n\t\treturn\n\tcase Uint32:\n\t\tui, err = strconv.ParseUint(str, 10, 32)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\t\tui32 := uint32(ui)\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&ui32)\n\t\treturn\n\tcase Int64:\n\t\ti, err = strconv.ParseInt(str, 10, 64)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&i)\n\t\treturn\n\tcase Float32:\n\t\tf, err = strconv.ParseFloat(str, 32)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"\")\n\t\t\treturn\n\t\t}\n\t\tf32 := float32(f)\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&f32)\n\t\treturn\n\tcase UUID:\n\t\tvar uuidBytes []byte\n\t\tif strings.HasPrefix(str, \"0x\") {\n\t\t\tstr = str[2:]\n\t\t}\n\t\tuuidBytes, err = hex.DecodeString(strings.Replace(str, \"-\", \"\", -1))\n\t\tif err != nil || len(uuidBytes) != 16 {\n\t\t\terr = utils.StackError(err, \"Failed to decode uuid string: %s\", str)\n\t\t\treturn\n\t\t}\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&uuidBytes[0])\n\t\treturn\n\tcase GeoPoint:\n\t\tvar point [2]float32\n\t\tpoint, err = GeoPointFromString(str)\n\t\tif err != nil {\n\t\t\terr = utils.StackError(err, \"Failed to read geopoint string: %s\", str)\n\t\t\treturn\n\t\t}\n\t\tval.Valid = true\n\t\tval.OtherVal = unsafe.Pointer(&point[0])\n\t\treturn\n\tdefault:\n\t\terr = utils.StackError(nil, \"Unsupported data type value %#x\", dataType)\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "604bbb36d93d0866996c4ab206580a51", "score": "0.49200746", "text": "func parseStr(str string) interface{} {\n\tswitch str {\n\tcase \"components\":\n\t\treturn ComponentFilter{}\n\tdefault:\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "21d15fd998da95c74da5dcd27d5e4e53", "score": "0.49139294", "text": "func (s *SyncInfo) TryString(name string, defValue string, aliases ...string) string {\n\ts.mutex.RLock()\n\tdefer s.mutex.RUnlock()\n\n\treturn s._Info.TryString(name, defValue, aliases...)\n}", "title": "" }, { "docid": "4b62841df20a5d79c539e1143dc3bbd0", "score": "0.491124", "text": "func StrToTime(s, format string) (int64, error) {\n\tt, err := TimeParse(s, format)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn t.Unix(), nil\n}", "title": "" }, { "docid": "0e8ade3aad944a3330197ec3d2332293", "score": "0.49083555", "text": "func stringToT[T RealNumbers](str string) (t T, err error) {\n\tswitch any(t).(type) {\n\tcase float32:\n\t\tnum, err := strconv.ParseFloat(str, 32)\n\t\tif err != nil {\n\t\t\treturn t, moerr.NewInternalErrorNoCtx(\"error while casting %s to %s\", str, T_float32.String())\n\t\t}\n\t\t// FIX: https://stackoverflow.com/a/36391858/1609570\n\t\tnumf32 := float32(num)\n\t\treturn *(*T)(unsafe.Pointer(&numf32)), nil\n\tcase float64:\n\t\tnum, err := strconv.ParseFloat(str, 64)\n\t\tif err != nil {\n\t\t\treturn t, moerr.NewInternalErrorNoCtx(\"error while casting %s to %s\", str, T_float64.String())\n\t\t}\n\t\treturn *(*T)(unsafe.Pointer(&num)), nil\n\tdefault:\n\t\tpanic(moerr.NewInternalErrorNoCtx(\"not implemented\"))\n\t}\n}", "title": "" }, { "docid": "01ee5e20bed8aa2a3d483730fe1df016", "score": "0.490786", "text": "func ConvertTimeString(event *models.Event, timezone string) (string, time.Time) {\n\tstr := \"2006-01-02 15:04\"\n\tdbloc, _ := time.LoadLocation(event.TimeZone)\n\tdbTimeZone, _ := time.ParseInLocation(str, event.DateTime, dbloc)\n\tuserloc, _ := time.LoadLocation(timezone)\n\ttimeFormat := dbTimeZone.In(userloc)\n\tcnvtTimeZone := timeFormat.Format(time.RFC3339)\n\n\treturn cnvtTimeZone, timeFormat\n}", "title": "" }, { "docid": "611f5f359ac5c9a722d48442746e7645", "score": "0.49054578", "text": "func FromString(str string) String {\n\treturn (String)(str)\n}", "title": "" }, { "docid": "0df73db8af641a0176da3672c03e607e", "score": "0.49041268", "text": "func CoerceString(objCursor interface{}) (string, bool) {\n\tasString, ok := objCursor.(string)\n\tif ok && asString != \"\" {\n\t\treturn asString, true\n\t}\n\n\tasStringPtr, ok := objCursor.(*string)\n\tif ok && asStringPtr != nil {\n\t\treturn *asStringPtr, true\n\t}\n\n\tasFloat64, ok := CoerceFloat64(objCursor)\n\tif ok {\n\t\treturn strconv.FormatFloat(asFloat64, 'f', -1, 64), true\n\t}\n\n\tasInt64, ok := CoerceInt64(objCursor)\n\tif ok {\n\t\treturn strconv.Itoa(int(asInt64)), true\n\t}\n\n\tasBool, ok := objCursor.(bool)\n\tif ok {\n\t\treturn strconv.FormatBool(asBool), true\n\t}\n\n\treturn \"\", false\n}", "title": "" }, { "docid": "fe5ef214c059416ab8dccbf28e122c3a", "score": "0.4903897", "text": "func (t *TSDBTime) Parse(data string) error {\n\t// for relative time, we need to remove quotes\n\tif strings.ContainsAny(data, \"\\\"\") {\n\t\t// max is 2 replacements\n\t\tdata = strings.Replace(data, \"\\\"\", \"\", 2)\n\t}\n\tsplits := durationRe.FindStringSubmatch(data)\n\tif splits != nil {\n\t\t// Relative time\n\t\tt.Time = time.Now().Add(-DecodeDuration(splits[1], splits[2]))\n\t\treturn nil\n\t}\n\t// Try integer\n\tif msecsRe.MatchString(data) {\n\t\ttimeStamp := ParseOpenTsdbTimeStamp(data)\n\t\tt.Time = time.Unix(timeStamp/1000, timeStamp%1000)\n\t\treturn nil\n\t}\n\t// Try the various time formats\n\tfor _, fmt := range timeFormats {\n\t\tdate, err := time.Parse(fmt, data)\n\t\tif err == nil {\n\t\t\t// Conversion succeeded\n\t\t\tt.Time = date\n\t\t\treturn nil\n\t\t}\n\t}\n\t// None matched\n\treturn errors.New(\"Invalid date: '\" + data + \"'\")\n}", "title": "" }, { "docid": "1048267b5745ad92982e356078d32ec2", "score": "0.48913226", "text": "func convertStringToInt(str string) (int, error) {\n\terr_code, err := strconv.Atoi(str)\n\tif err != nil {\n\t\treturn -1, err //returning -1 for undefined error code\n\t}\n\treturn err_code, nil\n}", "title": "" }, { "docid": "7ed53840350defa0eae0b61f02a61292", "score": "0.4883902", "text": "func FromString(s string) (CallFlag, error) {\n\tflags := strings.Split(s, \",\")\n\tif len(flags) == 0 {\n\t\treturn NoneFlag, errors.New(\"empty flags\")\n\t}\n\tif len(flags) == 1 {\n\t\tfor f, str := range flagString {\n\t\t\tif s == str {\n\t\t\t\treturn f, nil\n\t\t\t}\n\t\t}\n\t\treturn NoneFlag, errors.New(\"unknown flag\")\n\t}\n\n\tvar res CallFlag\n\n\tfor _, flag := range flags {\n\t\tvar knownFlag bool\n\n\t\tflag = strings.TrimSpace(flag)\n\t\tfor _, f := range basicFlags {\n\t\t\tif flag == flagString[f] {\n\t\t\t\tres |= f\n\t\t\t\tknownFlag = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !knownFlag {\n\t\t\treturn NoneFlag, errors.New(\"unknown/inappropriate flag\")\n\t\t}\n\t}\n\treturn res, nil\n}", "title": "" }, { "docid": "b6acefab96893b8baf140c5411e988de", "score": "0.48810148", "text": "func (stringToTimestamp StringToTimestamp) ParseTimestamp(value string) (time.Time, error) {\n\ttimestamp, err := stringToTimestamp.os.Parse(value)\n\tif err != nil {\n\t\treturn timestamp, err\n\t}\n\treturn timestamp, nil\n}", "title": "" }, { "docid": "707552cd4847e688f996207f38009020", "score": "0.48797137", "text": "func ResolveString(t v1.StringTransform, input any) (string, error) {\n\tswitch t.Type {\n\tcase v1.StringTransformTypeFormat:\n\t\tif t.Format == nil {\n\t\t\treturn \"\", errors.Errorf(errStringTransformTypeFormat, string(t.Type))\n\t\t}\n\t\treturn fmt.Sprintf(*t.Format, input), nil\n\tcase v1.StringTransformTypeConvert:\n\t\tif t.Convert == nil {\n\t\t\treturn \"\", errors.Errorf(errStringTransformTypeConvert, string(t.Type))\n\t\t}\n\t\treturn stringConvertTransform(t.Convert, input)\n\tcase v1.StringTransformTypeTrimPrefix, v1.StringTransformTypeTrimSuffix:\n\t\tif t.Trim == nil {\n\t\t\treturn \"\", errors.Errorf(errStringTransformTypeTrim, string(t.Type))\n\t\t}\n\t\treturn stringTrimTransform(input, t.Type, *t.Trim), nil\n\tcase v1.StringTransformTypeRegexp:\n\t\tif t.Regexp == nil {\n\t\t\treturn \"\", errors.Errorf(errStringTransformTypeRegexp, string(t.Type))\n\t\t}\n\t\treturn stringRegexpTransform(input, *t.Regexp)\n\tdefault:\n\t\treturn \"\", errors.Errorf(errStringTransformTypeFailed, string(t.Type))\n\t}\n}", "title": "" }, { "docid": "da3d499bd41ba19b4505b44a09a9e952", "score": "0.48712602", "text": "func stringToStruct(s string) *activityHandlerPayloadScenarioInputsValue {\n\tdecodedData := &activityHandlerPayloadScenarioInputsValue{}\n\n\terr := json.Unmarshal([]byte(s), decodedData)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\treturn decodedData\n}", "title": "" }, { "docid": "253e45b197e8e32cc80f8234ad084839", "score": "0.4870956", "text": "func Strconv() error {\n\t//strconv is a good way to convert to and from strings\n\ts := \"1234\"\n\t// we can specify the base (10) and precision\n\t// 64 bit\n\tres, err := strconv.ParseInt(s, 10, 64)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(res)\n\n\t// lets try hex\n\tres, err = strconv.ParseInt(\"FF\", 16, 64)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(res)\n\n\t// we can do other useful things like:\n\tval, err := strconv.ParseBool(\"true\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(val)\n\n\treturn nil\n}", "title": "" }, { "docid": "968d33db699fe1592ea046ad751a3b5b", "score": "0.48674423", "text": "func (p *Parser) convertType(input gjson.Result, desiredType string, name string) (interface{}, error) {\n\tswitch inputType := input.Value().(type) {\n\tcase string:\n\t\tswitch desiredType {\n\t\tcase \"uint\":\n\t\t\tr, err := strconv.ParseUint(inputType, 10, 64)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"unable to convert field %q to type uint: %w\", name, err)\n\t\t\t}\n\t\t\treturn r, nil\n\t\tcase \"int\":\n\t\t\tr, err := strconv.ParseInt(inputType, 10, 64)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"unable to convert field %q to type int: %w\", name, err)\n\t\t\t}\n\t\t\treturn r, nil\n\t\tcase \"float\":\n\t\t\tr, err := strconv.ParseFloat(inputType, 64)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"unable to convert field %q to type float: %w\", name, err)\n\t\t\t}\n\t\t\treturn r, nil\n\t\tcase \"bool\":\n\t\t\tr, err := strconv.ParseBool(inputType)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"unable to convert field %q to type bool: %w\", name, err)\n\t\t\t}\n\t\t\treturn r, nil\n\t\t}\n\tcase bool:\n\t\tswitch desiredType {\n\t\tcase \"string\":\n\t\t\treturn strconv.FormatBool(inputType), nil\n\t\tcase \"int\":\n\t\t\tif inputType {\n\t\t\t\treturn int64(1), nil\n\t\t\t}\n\n\t\t\treturn int64(0), nil\n\t\tcase \"uint\":\n\t\t\tif inputType {\n\t\t\t\treturn uint64(1), nil\n\t\t\t}\n\n\t\t\treturn uint64(0), nil\n\t\t}\n\tcase float64:\n\t\tswitch desiredType {\n\t\tcase \"string\":\n\t\t\treturn fmt.Sprint(inputType), nil\n\t\tcase \"int\":\n\t\t\treturn input.Int(), nil\n\t\tcase \"uint\":\n\t\t\treturn input.Uint(), nil\n\t\tcase \"bool\":\n\t\t\tif inputType == 0 {\n\t\t\t\treturn false, nil\n\t\t\t} else if inputType == 1 {\n\t\t\t\treturn true, nil\n\t\t\t} else {\n\t\t\t\treturn nil, fmt.Errorf(\"unable to convert field %q to type bool\", name)\n\t\t\t}\n\t\t}\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unknown format '%T' for field %q\", inputType, name)\n\t}\n\n\treturn input.Value(), nil\n}", "title": "" }, { "docid": "6f2fd87ac35c1c24331a7f995236cf5c", "score": "0.4866569", "text": "func strToDate(parser *argparse.Parser, dateStr string, errorStr string, defaultToNow bool) *time.Time {\n\tvar dateTime time.Time\n\tvar err error\n\n\tif len(dateStr) > 0 {\n\t\t// Check to see if the date is a time only\n\t\tmatched, _ := regexp.MatchString(\"^[0-9]{1,2}:[0-9]{2}(:[0-9]{2})?([ ]*(am|pm|AM|PM)?)?$\", dateStr)\n\t\tif matched {\n\t\t\tdateStr = time.Now().Format(\"2006-01-02\") + \" \" + dateStr\n\t\t}\n\t\tdateTime, err = dateparse.ParseLocal(dateStr)\n\t\tif err != nil {\n\t\t\tinvalidArgs(parser, err, errorStr)\n\t\t} else {\n\t\t\tif dateTime.Year() == 0 {\n\t\t\t\tdateTime = dateTime.AddDate(time.Now().Year(), 0, 0)\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn &dateTime\n\t}\n\tif defaultToNow {\n\t\tdateTime = time.Now()\n\t\treturn &dateTime\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "df258b0a6ef578f3fff57882fd33b1eb", "score": "0.48637426", "text": "func decodeValue(s string, v reflect.Value) error {\n\tswitch v.Elem().Kind() {\n\tcase reflect.String:\n\t\tv.Elem().SetString(s)\n\tcase reflect.Bool:\n\t\tb, err := strconv.ParseBool(s)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tv.Elem().SetBool(b)\n\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\ti, err := strconv.ParseInt(s, 10, 64)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tv.Elem().SetInt(i)\n\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:\n\t\ti, err := strconv.ParseUint(s, 10, 64)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tv.Elem().SetUint(i)\n\tcase reflect.Float32, reflect.Float64:\n\t\tf, err := strconv.ParseFloat(s, 64)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tv.Elem().SetFloat(f)\n\tcase reflect.Complex64, reflect.Complex128:\n\t\tf, err := strconv.ParseComplex(s, 128)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tv.Elem().SetComplex(f)\n\tcase reflect.Map, reflect.Struct:\n\t\ti := v.Interface()\n\t\tswitch i.(type) {\n\t\tcase *time.Time:\n\t\t\tt, err := parseTime(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tv.Elem().Set(reflect.ValueOf(t))\n\t\tdefault:\n\t\t\terr := json.Unmarshal([]byte(s), i)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\tcase reflect.Chan, reflect.Func:\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported field kind: %s\", v.Elem().Kind().String())\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "b06ab393abd929b93393543849b51bb7", "score": "0.48612925", "text": "func parse(s string) time.Time {\n\tt, err := time.Parse(\"Mon Jan 2 15:04:05 PST 2006\", s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "title": "" }, { "docid": "0b65d86790907242f27dadba8e673f1f", "score": "0.48569486", "text": "func parseValue(value string) interface{} {\n\tvalue = strings.TrimSpace(value)\n\t// try to parse time\n\ttime, err := time.Parse(\"2006-01-02T15:04:05\", value)\n\tif err == nil {\n\t\treturn time\n\t}\n\t// try to parse integer\n\ti, err := strconv.ParseInt(value, 10, 64)\n\tif err == nil {\n\t\treturn i\n\t}\n\t// if the value isn't time and integer, treat it as string\n\treturn strings.Trim(value, `\"'`)\n}", "title": "" }, { "docid": "2e64113f269b1a7494895630947cf1d0", "score": "0.4856617", "text": "func FormatFromString(s string) (Format, error) {\n\tswitch strings.ToLower(s) {\n\tcase \"json\":\n\t\treturn JSON, nil\n\tcase \"yaml\":\n\t\treturn YAML, nil\n\tcase \"text\":\n\t\treturn Text, nil\n\t// Keep update the `ValidFormats` list on adding new\n\n\tdefault:\n\t\treturn Unknown, ErrInvalidFormat\n\t}\n}", "title": "" }, { "docid": "dad7948ea5d7b5ae0b89a283b352e6ec", "score": "0.48547128", "text": "func parseBool(s string) (bool, error) {\n\tif s == \"\" {\n\t\treturn false, nil\n\t}\n\n\tresult, err := strconv.ParseBool(s)\n\tif err != nil {\n\t\treturn false, errors.Wrap(err, \"parseBool\")\n\t}\n\treturn result, nil\n}", "title": "" }, { "docid": "51cbb962688582756fdb5d47bfbea91c", "score": "0.48545057", "text": "func DefaultConvertor() Convertor {\n\tconvertor := &convertorMapping{}\n\n\tconvertor.\n\t\t//\n\t\t// int to ... convertors\n\t\t//\n\t\tRegisterConvertor(reflect.TypeOf(0), reflect.TypeOf(\"\"), func(in interface{}) (interface{}, error) {\n\t\t\treturn strconv.Itoa(in.(int)), nil\n\t\t}).\n\t\tRegisterConvertor(reflect.TypeOf(0), reflect.TypeOf(0.0), func(in interface{}) (interface{}, error) {\n\t\t\treturn float64(in.(int)), nil\n\t\t}).\n\t\t//\n\t\t// float to ... convertors\n\t\t//\n\t\tRegisterConvertor(reflect.TypeOf(0.0), reflect.TypeOf(\"\"), func(in interface{}) (interface{}, error) {\n\t\t\treturn fmt.Sprintf(\"%f\", in), nil\n\t\t}).\n\t\tRegisterConvertor(reflect.TypeOf(0.0), reflect.TypeOf(0), func(in interface{}) (interface{}, error) {\n\t\t\treturn int(math.Round(in.(float64))), nil\n\t\t}).\n\t\t//\n\t\t// string to ... convertors\n\t\t//\n\t\tRegisterConvertor(reflect.TypeOf(\"\"), reflect.TypeOf(0), func(in interface{}) (interface{}, error) {\n\t\t\ti, err := strconv.ParseInt(in.(string), 10, 64)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t\treturn int(i), err\n\t\t}).\n\t\tRegisterConvertor(reflect.TypeOf(\"\"), reflect.TypeOf(0.0), func(in interface{}) (interface{}, error) {\n\t\t\tf, err := strconv.ParseFloat(in.(string), 64)\n\t\t\tif err != nil {\n\t\t\t\treturn 0.0, err\n\t\t\t}\n\t\t\treturn float64(f), err\n\t\t}).\n\t\tRegisterConvertor(reflect.TypeOf(\"\"), reflect.TypeOf(time.Time{}), func(in interface{}) (interface{}, error) {\n\t\t\tt, err := iso8601.ParseString(in.(string))\n\t\t\tif err != nil {\n\t\t\t\treturn time.Time{}, err\n\t\t\t}\n\t\t\treturn t, err\n\t\t}).\n\t\t//\n\t\t// time.Time to ... convertors\n\t\t//\n\t\tRegisterConvertor(reflect.TypeOf(time.Time{}), reflect.TypeOf(\"\"), func(in interface{}) (interface{}, error) {\n\t\t\treturn in.(time.Time).Format(time.RFC3339), nil\n\t\t})\n\n\treturn convertor\n}", "title": "" }, { "docid": "a3396b00d8fa8c7c74e6aafd2a3e8058", "score": "0.4854291", "text": "func parseTime(timeString string) time.Time {\n\tt, err := time.Parse(time.RFC3339, timeString)\n\tif err != nil {\n\t\tfmt.Printf(\"Parsing Time Erorr!!!!: %s\", err.Error())\n\t}\n\treturn t\n}", "title": "" }, { "docid": "ed4db6541c97598abe46a6d44552a93d", "score": "0.48435017", "text": "func FromString(value string, ptr interface{}) error {\n\tPtrPanic(ptr)\n\ttyp := reflect.TypeOf(ptr).Elem().String()\n\tfromString := FromStringMap[typ]\n\tif fromString == nil {\n\t\treturn errParse\n\t}\n\tval, err := fromString(value)\n\tif err != nil {\n\t\treturn numError(err)\n\t}\n\tSet(ptr, val)\n\treturn nil\n}", "title": "" }, { "docid": "5dbe3c3369ca3b6b559bcb52a8adc93e", "score": "0.48417386", "text": "func parseTime(v string) (t time.Time, e error) {\n\tif t, e = time.Parse(time.RFC3339, v); e == nil {\n\t\treturn\n\t}\n\treturn time.Parse(time.UnixDate, v)\n}", "title": "" }, { "docid": "c162ef1e96699575a33c99c60459e354", "score": "0.4832074", "text": "func parseTime(format string, timeStr string) time.Time {\n\tt, err := time.Parse(format, timeStr)\n\tcheck(err)\n\treturn t\n}", "title": "" } ]
b27741aba6f17eeb5b1c182e49dcac11
sortLists will loop through a given
[ { "docid": "c1db2e64503ded9106fd5ca3e7285002", "score": "0.6823342", "text": "func (b *BoardSorter) sortLists(lists []List) {\n\tfor _, l := range lists {\n\t\tb.sortList(l)\n\t}\n}", "title": "" } ]
[ { "docid": "2319ce0882dcef248c32faf28c3af98f", "score": "0.69486856", "text": "func (s *Shell) Sort(l []Comparable) []Comparable {\n\th := 1\n\tfor h < len(l)/3 {\n\t\th = 3*h + 1\n\t}\n\tfor ; h >= 1; h /= 3 {\n\t\tfor i := h; i < len(l); i++ {\n\t\t\tfor j := i - h; j >= 0 && s.less(l[j+h], l[j]); j -= h {\n\t\t\t\t//s.exchange(&l[j+h], &l[j])\n\t\t\t\tl[j], l[j+h] = l[j+h], l[j]\n\t\t\t}\n\t\t}\n\t}\n\treturn l\n}", "title": "" }, { "docid": "7556dba2004f6c485c2ae6563dc11c3a", "score": "0.6626652", "text": "func SortList(l *list.List, len int) {\n\tif len == 0 {\n\t\treturn\n\t}\n\tfor e := l.Front(); e != nil; e = e.Next() {\n\t\tif e.Next() != nil {\n\t\t\tif e.Value.(int) > e.Value.(int) {\n\t\t\t\tl.MoveAfter(e, e.Next())\n\t\t\t}\n\t\t}\n\t\tif e.Prev() != nil {\n\t\t\tif e.Value.(int) < e.Prev().Value.(int) {\n\t\t\t\tl.MoveBefore(e, e.Prev())\n\t\t\t}\n\t\t}\n\t}\n\tSortList(l, len-1)\n}", "title": "" }, { "docid": "480591e26a57fb83e75f721a8d1f689f", "score": "0.64532214", "text": "func sortListBasedonTFFile(in []string, d *schema.ResourceData, listName string) ([]string, error) {\n\tif attributeCount, ok := d.Get(listName + \".#\").(int); ok {\n\t\tfor i := 0; i < attributeCount; i++ {\n\t\t\tcurrAttributeId := d.Get(listName + \".\" + strconv.Itoa(i))\n\t\t\tfor j := 0; j < len(in); j++ {\n\t\t\t\tif currAttributeId == in[j] {\n\t\t\t\t\tin[i], in[j] = in[j], in[i]\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn in, nil\n\t}\n\treturn in, fmt.Errorf(\"Could not find list: %s\", listName)\n}", "title": "" }, { "docid": "3443b7c4586bdeb32aa1c30c30898919", "score": "0.6370894", "text": "func Fuzz_Sort(list []module.Version) {\n\tmodule.Sort(list)\n}", "title": "" }, { "docid": "e7d1b9f413114d25596edc82bb5687c0", "score": "0.6288218", "text": "func sortFunctionList(list []whisk.Action) {\n\tisLess := func(i, j int) bool {\n\t\treturn list[i].Name < list[j].Name\n\t}\n\tsort.Slice(list, isLess)\n}", "title": "" }, { "docid": "15997ed733ab36e3ca9020988f803ef1", "score": "0.62792945", "text": "func (list List_f64) ShellSort() List_f64 {\n h := 1\n\tfor h < len(list) {\n\t\th = 3 * h + 1\n\t}\n\tfor h >= 1 {\n\t\tfor i := h; i < len(list); i++ {\n\t\t\tfor j := i; j >= h && list[j] < list[j - h]; j = j - h {\n list.Swap(i, j)\n\t\t\t}\n\t\t}\n\t\th = h/3\n\t}\n return list\n}", "title": "" }, { "docid": "2491724161c5e89d57052a25dbf1f7d8", "score": "0.6240467", "text": "func SortChangelists(cls []Changelist) {\n\tsort.Slice(cls, func(i, j int) bool {\n\t\t// Returns true iff cls[i] is less than cls[j].\n\t\tif cls[i].Host < cls[j].Host {\n\t\t\treturn true\n\t\t}\n\t\tif cls[i].Host == cls[j].Host && cls[i].Change < cls[j].Change {\n\t\t\treturn true\n\t\t}\n\t\tif cls[i].Host == cls[j].Host && cls[i].Change == cls[j].Change && cls[i].Patchset < cls[j].Patchset {\n\t\t\treturn true\n\t\t}\n\t\treturn false\n\t})\n}", "title": "" }, { "docid": "8432d85722c0a45ef2811e2fb1986caa", "score": "0.6199125", "text": "func sortList(head *ListNode) *ListNode {\n\tif head == nil || head.Next == nil {\n\t\treturn head\n\t}\n\tvar dummyHeadOne *ListNode = &ListNode{}\n\tvar dummyHeadTwo *ListNode = &ListNode{}\n\tvar dummySortedHead *ListNode = &ListNode{}\n\tvar dummySortedLast *ListNode = dummySortedHead\n\tvar unvisitedNode *ListNode = head\n\n\tvar listLength, level uint = 0, 0\n\tfor unvisitedNode != nil && unvisitedNode.Next != nil {\n\t\tunvisitedNode = addNode(dummyHeadOne, unvisitedNode, 1<<level)\n\t\tunvisitedNode = addNode(dummyHeadTwo, unvisitedNode, 1<<level)\n\t\tnewHead, newTail := merge(dummyHeadOne.Next, dummyHeadTwo.Next)\n\t\tdummySortedLast.Next = newHead // this also modify dummySortedHead.Next\n\t\tdummySortedLast = newTail\n\t\tlistLength += 2\n\t}\n\tif unvisitedNode != nil {\n\t\tdummySortedLast.Next = unvisitedNode\n\t\tlistLength++\n\t}\n\tlevel++\n\n\tfor listLength > 1<<level {\n\t\tdummySortedLast = dummySortedHead\n\t\tunvisitedNode = dummySortedHead.Next\n\t\tfor unvisitedNode != nil {\n\t\t\tunvisitedNode = addNode(dummyHeadOne, unvisitedNode, 1<<level)\n\t\t\tunvisitedNode = addNode(dummyHeadTwo, unvisitedNode, 1<<level)\n\t\t\tnewHead, newTail := merge(dummyHeadOne.Next, dummyHeadTwo.Next)\n\t\t\tdummySortedLast.Next = newHead // this actually modify dummySortedHead.Next\n\t\t\tdummySortedLast = newTail\n\t\t}\n\t\tlevel++\n\t}\n\n\treturn dummySortedHead.Next\n}", "title": "" }, { "docid": "b32bbc3eef8199746f08c595157139e5", "score": "0.6110426", "text": "func (results Results) Sort() {\n\tfor i := range results {\n\t\tr := &results[i]\n\t\tr.Sort()\n\t}\n}", "title": "" }, { "docid": "1aef6aeee8ac44024813054d53eb792e", "score": "0.61086756", "text": "func merge_sortList(l1 *ListNode,l2 *ListNode) *ListNode{\n\tif l1 == nil{\n\t\treturn l2\n\t}\n\tif l2 == nil{\n\t\treturn l1\n\t}\n\tvar head *ListNode\n\tif l1.Val < l2.Val{\n\t\thead = l1\n\t\tl1 = l1.Next\n\t}else{\n\t\thead = l2\n\t\tl2 = l2.Next\n\t}\n\tvar visit *ListNode = head\n\tfor l1 != nil && l2 != nil{\n\t\tif l1.Val < l2.Val{\n\t\t\tvisit.Next = l1\n\t\t\tvisit = l1\n\t\t\tl1 = l1.Next\n\t\t}else{\n\t\t\tvisit.Next = l2\n\t\t\tvisit = l2\n\t\t\tl2 = l2.Next\n\t\t}\n\t}\n\tif l1 == nil{\n\t\tvisit.Next = l2\n\t}else if l2 == nil{\n\t\tvisit.Next = l1\n\t}\n\treturn head\n}", "title": "" }, { "docid": "edb67d31182aacfb0cc939b23779d1d7", "score": "0.60329145", "text": "func (ms *multiSorter) Sort(cvsslist []cvss) {\n\tms.cves = cvsslist\n\tsort.Sort(ms)\n}", "title": "" }, { "docid": "ddbdb356d60f93794d042b058ebf4de6", "score": "0.6019047", "text": "func sortBySizeThenName(list []backblaze.Transmitted) {\n\tsort.Slice(list, func(i, j int) bool {\n\t\tif list[i].Size == list[j].Size {\n\t\t\treturn list[i].FName < list[j].FName // FName lexicographical ascending\n\t\t}\n\t\treturn list[i].Size > list[j].Size // Size descending\n\t})\n\n}", "title": "" }, { "docid": "21034d7900204ef3dbdb73d2b05131f2", "score": "0.601233", "text": "func sortRuntimeList(runtimes []*node.Runtime) {\n\tsort.Slice(runtimes, func(i, j int) bool {\n\t\treturn bytes.Compare(runtimes[i].ID[:], runtimes[j].ID[:]) == -1\n\t})\n}", "title": "" }, { "docid": "1dc4b05520f4f0a5842a5bb6c5710b66", "score": "0.60069805", "text": "func (b bananas) sort() bananas {\n\tfor skupc, bunch := range b {\n\t\tsort.Slice(bunch, func(i, j int) bool {\n\t\t\treturn bunch[i].Quantity > bunch[j].Quantity\n\t\t})\n\t\tb[skupc] = bunch\n\t}\n\treturn b\n}", "title": "" }, { "docid": "571859c9c3cad27d8fb241bd69e68fe6", "score": "0.6003695", "text": "func (solver *Solver) sortLearnts(low, high int) {\n\tif low < high {\n\t\tp := solver.partition(low, high)\n\t\tsolver.sortLearnts(low, p-1)\n\t\tsolver.sortLearnts(p+1, high)\n\t}\n}", "title": "" }, { "docid": "39d0d5dbaa14a0d7128ee7e3db3f41aa", "score": "0.59976923", "text": "func SortList(head *ListNode) *ListNode {\n\treturn sortList2(head)\n}", "title": "" }, { "docid": "cf470881435af04db7f145a6c0aa367e", "score": "0.5978032", "text": "func sortResults(r []Result) []Result {\n\tsort.Slice(r, func(i, j int) bool {\n\t\tr1 := r[i]\n\t\tr2 := r[j]\n\n\t\t// if result is equal sort by smaShort\n\t\tif r1.result == r2.result {\n\t\t\t// if smaShort is equal sort by smaLong\n\t\t\tif r1.smaShort == r2.smaShort {\n\t\t\t\t// sort by smaLong\n\t\t\t\treturn r1.smaLong < r2.smaLong\n\t\t\t}\n\t\t\t// sort by smaShort\n\t\t\treturn r1.smaShort < r2.smaShort\n\t\t}\n\t\t// else sort by result\n\t\treturn r1.result > r2.result\n\t})\n\treturn r\n}", "title": "" }, { "docid": "29dea5c91253919760c692d27f7a3737", "score": "0.59461915", "text": "func mergeLists(a, b []int, begin, middle, end int) {\n\ti := begin\n\tj := middle\n\n\tfor k := begin; k < end; k++ {\n\t\tif i < middle && (j >= end || a[i] < a[j]) {\n\t\t\tb[k] = a[i]\n\t\t\ti++\n\t\t} else {\n\t\t\tb[k] = a[j]\n\t\t\tj++\n\t\t}\n\t}\n}", "title": "" }, { "docid": "f8dc2bcf3bc454165a28ed22f955b292", "score": "0.59426737", "text": "func sortParameters(p []spec.Parameter) {\n\tsort.Sort(byNameIn{p})\n}", "title": "" }, { "docid": "bfbc4846b873767fdd12b68d65ace0f3", "score": "0.59424955", "text": "func Sort(items []Audio) []Audio{\n\tvar num = len(items)\n \n if num == 1 {\n return items\n }\n \n middle := int(num / 2)\n var (\n left = make([]Audio, middle)\n right = make([]Audio, num-middle)\n )\n for i := 0; i < num; i++ {\n if i < middle {\n left[i] = items[i]\n } else {\n right[i-middle] = items[i]\n }\n }\n \n return merge(Sort(left), Sort(right))\n}", "title": "" }, { "docid": "32daaa584a0e1fdd1c5a85f9cc911fba", "score": "0.5931604", "text": "func SortP(slc []int) {\n\tSort(slc)\n\twg.Done()\n}", "title": "" }, { "docid": "48574a2fcede184f5a7fe054051321e9", "score": "0.59227914", "text": "func SortNodeList(nodes []*node.Node) {\n\tsort.Slice(nodes, func(i, j int) bool {\n\t\treturn bytes.Compare(nodes[i].ID[:], nodes[j].ID[:]) == -1\n\t})\n}", "title": "" }, { "docid": "3227ce13426ba3c825ed8da5de4d4a5a", "score": "0.59185344", "text": "func (b *BoardSorter) sortList(l List) {\n\tCards, err := b.Trello.getListCards(l.Id)\n\tif err != nil {\n\t\tlog.Printf(\"failed to get cards for list: %v\", l.Id)\n\t\treturn\n\t}\n\tb.sortCards(Cards)\n\tb.updateCards(Cards)\n}", "title": "" }, { "docid": "083b42f86aa9a1661a0387e4122becb3", "score": "0.5906942", "text": "func Sort(elements []int) {\n\tif len(elements) <= 1 {\n\t\treturn\n\t}\n\tmid := len(elements) / 2\n\tleft, right := elements[:mid], elements[mid:]\n\tSort(left)\n\tSort(right)\n\tmerge(left, right)\n}", "title": "" }, { "docid": "d54cce728fb3af1176f474e4b1e4a3b4", "score": "0.58819425", "text": "func walMergesort(tls *libc.TLS, aContent uintptr, aBuffer uintptr, aList uintptr, pnList uintptr) { /* sqlite3.c:61455:13: */\n\tbp := tls.Alloc(220)\n\tdefer tls.Free(220)\n\n\tvar nList int32 = *(*int32)(unsafe.Pointer(pnList)) // Size of input list\n\t*(*int32)(unsafe.Pointer(bp + 216 /* nMerge */)) = 0 // Number of elements in list aMerge\n\t*(*uintptr)(unsafe.Pointer(bp + 208 /* aMerge */)) = uintptr(0) // List to be merged\n\tvar iList int32 // Index into input list\n\tvar iSub U32 = U32(0) // Index into aSub array\n\t// var aSub [13]Sublist at bp, 208\n\t// Array of sub-lists\n\n\tlibc.Xmemset(tls, bp /* &aSub[0] */, 0, uint64(unsafe.Sizeof([13]Sublist{})))\n\n\tfor iList = 0; iList < nList; iList++ {\n\t\t*(*int32)(unsafe.Pointer(bp + 216 /* nMerge */)) = 1\n\t\t*(*uintptr)(unsafe.Pointer(bp + 208 /* aMerge */)) = (aList + uintptr(iList)*2)\n\t\tfor iSub = U32(0); (iList & (int32(1) << iSub)) != 0; iSub++ {\n\t\t\tvar p uintptr\n\n\t\t\tp = (bp /* &aSub */ + uintptr(iSub)*16)\n\n\t\t\twalMerge(tls, aContent, (*Sublist)(unsafe.Pointer(p)).FaList, (*Sublist)(unsafe.Pointer(p)).FnList, bp+208 /* &aMerge */, bp+216 /* &nMerge */, aBuffer)\n\t\t}\n\t\t(*Sublist)(unsafe.Pointer(bp /* &aSub */ + uintptr(iSub)*16)).FaList = *(*uintptr)(unsafe.Pointer(bp + 208 /* aMerge */))\n\t\t(*Sublist)(unsafe.Pointer(bp /* &aSub */ + uintptr(iSub)*16)).FnList = *(*int32)(unsafe.Pointer(bp + 216 /* nMerge */))\n\t}\n\n\tfor iSub++; iSub < (U32((int32(uint64(unsafe.Sizeof([13]Sublist{})) / uint64(unsafe.Sizeof(Sublist{})))))); iSub++ {\n\t\tif (nList & (int32(1) << iSub)) != 0 {\n\t\t\tvar p uintptr\n\n\t\t\tp = (bp /* &aSub */ + uintptr(iSub)*16)\n\n\t\t\twalMerge(tls, aContent, (*Sublist)(unsafe.Pointer(p)).FaList, (*Sublist)(unsafe.Pointer(p)).FnList, bp+208 /* &aMerge */, bp+216 /* &nMerge */, aBuffer)\n\t\t}\n\t}\n\n\t*(*int32)(unsafe.Pointer(pnList)) = *(*int32)(unsafe.Pointer(bp + 216 /* nMerge */))\n\n}", "title": "" }, { "docid": "52ed6ba6b1cf8d365f2ad50c0807a72e", "score": "0.5865976", "text": "func (node *List) SortBy(less func(i, j int) bool) error {\n\tif !node.IsList() {\n\t\treturn log.ERR_NO_SUPPORT\n\t}\n\n\tleft, right := 0, int(node.VLen()-1)\n\n\treturn node.quicksort(left, right, less)\n\n}", "title": "" }, { "docid": "70e35183f508c23b223c1346c26954a5", "score": "0.5865037", "text": "func (resultList *ResultList) Sort(target KademliaID){\n\tsortingTarget = target\n\tsort.Sort(byTargetDistance(resultList.List))\n}", "title": "" }, { "docid": "c9bec17075e53d37b2b43f7b6b15e380", "score": "0.58613336", "text": "func sortResultItems(items []framework.ResultItem) {\n\tsort.SliceStable(items, func(i, j int) bool {\n\t\tif fileLess(items, i, j) != 0 {\n\t\t\treturn fileLess(items, i, j) < 0\n\t\t}\n\t\tif severityLess(items, i, j) != 0 {\n\t\t\treturn severityLess(items, i, j) < 0\n\t\t}\n\t\treturn resultItemToString(items[i]) < resultItemToString(items[j])\n\t})\n}", "title": "" }, { "docid": "ba9151cba700358fdce505a8ca98aa53", "score": "0.58559483", "text": "func (a sorter) Swap(i, j int) {\n\tfor _, list := range a.frame.lists {\n\t\tlist[i], list[j] = list[j], list[i]\n\t}\n}", "title": "" }, { "docid": "c068f7165f48d13ae889d7b453fb6c21", "score": "0.5854542", "text": "func (c *compactor) sort(less reflectx.Func2x1) []beam.T {\n\tsort.Sort(sorter{data: c.unsorted, less: less})\n\th := sortListHeap{data: c.sorted, less: less}\n\theap.Init(&h)\n\tsorted := make([]beam.T, 0, c.size()-len(c.unsorted))\n\tfor h.Len() > 0 {\n\t\ts := heap.Pop(&h).([]beam.T)\n\t\tsorted = append(sorted, s[0])\n\t\tif len(s) > 1 {\n\t\t\theap.Push(&h, s[1:])\n\t\t}\n\t}\n\tc.sorted = [][]beam.T{mergeSorted(sorted, c.unsorted, func(a, b any) bool { return less.Call2x1(a, b).(bool) })}\n\tc.unsorted = nil\n\tif len(c.sorted[0]) == 0 {\n\t\tc.sorted = nil\n\t\treturn nil\n\t}\n\treturn c.sorted[0]\n}", "title": "" }, { "docid": "2c405f167422d29dd0977ea7089badfa", "score": "0.584921", "text": "func Sort(list List, f LessFn) {\n\tsort.Slice(list, f)\n}", "title": "" }, { "docid": "5a5baa3fc73f52250274383f181a0111", "score": "0.5846896", "text": "func Sort(numbers []int) {\n\tvar wg sync.WaitGroup\n\n\t// Creating wait group that waits of len(numbers) of go routines to finish\n\twg.Add(len(numbers))\n\n\tfor _, x := range numbers {\n\t\t// Spinning a Go routine\n\t\tgo sleepAndPrint(x, &wg)\n\t}\n\n\t// Waiting for all go routines to finish\n\twg.Wait()\n}", "title": "" }, { "docid": "729b43f7459abc5325c37bdf1b172664", "score": "0.58335084", "text": "func Sort(item []rune) {\n\tfor i := range item {\n\t\tj := i\n\t\tfor (j > 0) && (item[j] < item[j-1]) {\n\t\t\titem[j], item[j-1] = item[j-1], item[j]\n\t\t\tj--\n\t\t}\n\t}\n}", "title": "" }, { "docid": "acded52dbbecaa84ad52c404427ec0a7", "score": "0.5830247", "text": "func (s *State) sortLinksByDist() {\n\tsort.Slice(s.links, func(i, j int) bool { return s.links[i].distance < s.links[j].distance })\n}", "title": "" }, { "docid": "728b8787c209e3cc84677b68a97c02f3", "score": "0.581706", "text": "func sortStories(stories []item, orderedIDs []int) []item {\n\t// create a map from item.ID to item\n\tm := make(map[int]item)\n\tfor _, story := range stories {\n\t\tm[story.ID] = story\n\t}\n\n\t// orderedIDs determine the order of stories in the output slice (based on story.ID)\n\tret := make([]item, 0, len(orderedIDs))\n\tfor _, id := range orderedIDs {\n\t\titm, ok := m[id]\n\t\tif ok {\n\t\t\tret = append(ret, itm)\n\t\t}\n\t\tif len(ret) >= len(stories) {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn ret\n}", "title": "" }, { "docid": "09cb0f99a8581fb380f56a49458ca221", "score": "0.58016896", "text": "func sort(data []int) {\n\tlength := len(data)\n\tsortedData := make([]int, length)\n\tcopy(sortedData, data)\n\tsplit(data, sortedData, 0, length)\n\tcopy(data, sortedData)\n}", "title": "" }, { "docid": "2d200693889332cc4aab71d36979c58d", "score": "0.5796561", "text": "func vdbeSorterSort(tls *libc.TLS, pTask uintptr, pList uintptr) int32 { /* sqlite3.c:96147:12: */\n\tbp := tls.Alloc(512)\n\tdefer tls.Free(512)\n\n\tvar i int32\n\tvar p uintptr\n\tvar rc int32\n\t// var aSlot [64]uintptr at bp, 512\n\n\trc = vdbeSortAllocUnpacked(tls, pTask)\n\tif rc != SQLITE_OK {\n\t\treturn rc\n\t}\n\n\tp = (*SorterList)(unsafe.Pointer(pList)).FpList\n\t(*SortSubtask)(unsafe.Pointer(pTask)).FxCompare = vdbeSorterGetCompare(tls, (*SortSubtask)(unsafe.Pointer(pTask)).FpSorter)\n\tlibc.Xmemset(tls, bp /* &aSlot[0] */, 0, uint64(unsafe.Sizeof([64]uintptr{})))\n\n\tfor p != 0 {\n\t\tvar pNext uintptr\n\t\tif (*SorterList)(unsafe.Pointer(pList)).FaMemory != 0 {\n\t\t\tif p == (*SorterList)(unsafe.Pointer(pList)).FaMemory {\n\t\t\t\tpNext = uintptr(0)\n\t\t\t} else {\n\n\t\t\t\tpNext = ((*SorterList)(unsafe.Pointer(pList)).FaMemory + uintptr(*(*int32)(unsafe.Pointer(p + 8 /* &.u */))))\n\t\t\t}\n\t\t} else {\n\t\t\tpNext = *(*uintptr)(unsafe.Pointer(p + 8 /* &.u */))\n\t\t}\n\n\t\t*(*uintptr)(unsafe.Pointer(p + 8 /* &.u */)) = uintptr(0)\n\t\tfor i = 0; *(*uintptr)(unsafe.Pointer(bp /* &aSlot[0] */ + uintptr(i)*8)) != 0; i++ {\n\t\t\tp = vdbeSorterMerge(tls, pTask, p, *(*uintptr)(unsafe.Pointer(bp /* &aSlot[0] */ + uintptr(i)*8)))\n\t\t\t*(*uintptr)(unsafe.Pointer(bp /* &aSlot[0] */ + uintptr(i)*8)) = uintptr(0)\n\t\t}\n\t\t*(*uintptr)(unsafe.Pointer(bp /* &aSlot[0] */ + uintptr(i)*8)) = p\n\t\tp = pNext\n\t}\n\n\tp = uintptr(0)\n\tfor i = 0; i < (int32(uint64(unsafe.Sizeof([64]uintptr{})) / uint64(unsafe.Sizeof(uintptr(0))))); i++ {\n\t\tif *(*uintptr)(unsafe.Pointer(bp /* &aSlot[0] */ + uintptr(i)*8)) == uintptr(0) {\n\t\t\tcontinue\n\t\t}\n\t\tif p != 0 {\n\t\t\tp = vdbeSorterMerge(tls, pTask, p, *(*uintptr)(unsafe.Pointer(bp /* &aSlot[0] */ + uintptr(i)*8)))\n\t\t} else {\n\t\t\tp = *(*uintptr)(unsafe.Pointer(bp /* &aSlot[0] */ + uintptr(i)*8))\n\t\t}\n\t}\n\t(*SorterList)(unsafe.Pointer(pList)).FpList = p\n\n\treturn int32((*UnpackedRecord)(unsafe.Pointer((*SortSubtask)(unsafe.Pointer(pTask)).FpUnpacked)).FerrCode)\n}", "title": "" }, { "docid": "94676f9be4367807ba85c42432662d89", "score": "0.57723176", "text": "func Bubble_sort(list []int) {\n\t// find the length of list - n\n\tn := len(list)\n\tfor i := 0; i < n; i++ { //run the following for loop n amount of times, where n = the length of the list of integers\n\n\t\tfor j := 0; j < n-1; j++ { //run the following if statement n-1 amount of times\n\t\t\tif list[j] > list[j+1] { //if the item in index j is greater than the next item (j+1),\n\t\t\t\ttemp := list[j+1] //create a temporary variable that stores the value of that next item (j+1),\n\t\t\t\tlist[j+1] = list[j] //copy the value of j into j+1,\n\t\t\t\tlist[j] = temp //copy the value temp is holding into index j, effectively swapping the values within\n\t\t\t\t//increment j by 1, now we will compare what was j+1 and j+2, etcetera\n\t\t\t}\n\n\t\t}\n\t}\n}", "title": "" }, { "docid": "357155bba3470391efe5d45f7b6c076b", "score": "0.576651", "text": "func Sort(in []int) {\n\tfor index, num := range in {\n\t\tif index == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tfor i := index - 1; i >= 0; i -- {\n\t\t\tif in[i] > num {\n\t\t\t\tin[i + 1] = in[i]\n\t\t\t\tin[i] = num\n\t\t\t} else {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "8027831e59a8c55a6571466386677e46", "score": "0.5742445", "text": "func (l List) Sort(sorter func(param1 int, param2 int) bool) List {\n\tclone := append(l[:0:0], l...)\n\tif sorter == nil {\n\t\tsorter = func(i int, j int) bool {\n\t\t\treturn clone[i].(int) < clone[j].(int)\n\t\t}\n\t}\n\tsort.Slice(clone, sorter)\n\treturn clone\n}", "title": "" }, { "docid": "7050d4272e71685ef87576b96578422b", "score": "0.5739382", "text": "func sortByArrival(processes []myStruct) (sorted []myStruct) {\n\tlength := len(processes)\n\tsorted = make([]myStruct, length)\n\tsorted = processes\n\n\tfor i := length - 1; i > 0; i-- {\n\t\tfor j := 0; j < i; j++ {\n\t\t\tif(sorted[j].arrival > sorted[j+1].arrival) {\n\t\t\t\ttemp := sorted[j]\n\t\t\t\tsorted[j] = sorted[j+1]\n\t\t\t\tsorted[j+1] = temp\n\t\t\t}\n\t\t}\n\t}\n\treturn sorted\n}", "title": "" }, { "docid": "aaf30ea161fcade9d71a1992789ded10", "score": "0.5736939", "text": "func Sort(input []int, buffer []int) {\n\tfmt.Println(input)\n\tcopy(buffer, input)\n\tsplitMerge(buffer, 0, len(input), input)\n}", "title": "" }, { "docid": "930b10897f1b2683ba87922cd8489f75", "score": "0.5731099", "text": "func sort(a []int) []int {\n\t\tl := len(a)\n\t\th := l/2\n\n\t\tif h == 1 {\n\t\t\t\treturn merge([]int{a[0]}, []int{a[1]})\n\t\t}\n\n\t\tpa := a[0:h]\n\t\tpb := a[h:l]\n\n\t\tfmt.Printf(\"starting part sorting pa: %v pb: %v\\n\", pa, pb)\n\n\t\treturn merge(sort(pa), sort(pb))\n}", "title": "" }, { "docid": "89cd4fdf27177ac1aaa179c5ae8dcb97", "score": "0.57259125", "text": "func SortListing(listings *model.ResultExchangeData) {\n\tsort.SliceStable(listings.ResultData, func(a, b int) bool {\n\t\treturn listings.ResultData[a].Listing.RatioValue < listings.ResultData[b].Listing.RatioValue\n\t})\n\n\treturn\n}", "title": "" }, { "docid": "7861fc0f0131f02a1574926ed60e0f6e", "score": "0.5722442", "text": "func bigSorting(unsorted []string) []string {\n\tvar lst []Item\n\tfor _, k := range unsorted {\n\t\tvar item Item\n\t\titem.len = len(k)\n\t\titem.value = k\n\t\tlst = append(lst, item)\n\t}\n\n\tsortItem(lst)\n\tvar sorted []string\n\tfor _, k := range lst {\n\t\tsorted = append(sorted, k.value)\n\t}\n\n\treturn sorted\n\n}", "title": "" }, { "docid": "864005ef300f48e6e875f3b92723ed25", "score": "0.5720939", "text": "func (suite *MultiKeySorterTestSuite) TestSorting() {\n\tvar elementList []interface{}\n\telementList = append(elementList, elements{1, 1, 1, 1})\n\telementList = append(elementList, elements{1, 1, 1, 4})\n\telementList = append(elementList, elements{2, 2, 2, 4})\n\telementList = append(elementList, elements{3, 3, 3, 2})\n\telementList = append(elementList, elements{3, 3, 3, 2})\n\n\tvalue1 := func(c1, c2 interface{}) bool {\n\t\treturn c1.(elements).value1 < c2.(elements).value1\n\t}\n\n\tvalue2 := func(c1, c2 interface{}) bool {\n\t\treturn c1.(elements).value2 < c2.(elements).value2\n\t}\n\n\tvalue3 := func(c1, c2 interface{}) bool {\n\t\treturn c1.(elements).value3 < c2.(elements).value3\n\t}\n\n\tvalue4 := func(c1, c2 interface{}) bool {\n\t\treturn c1.(elements).value4 < c2.(elements).value4\n\t}\n\n\tOrderedBy(value4, value1, value2, value3).Sort(elementList)\n\n\tsuite.EqualValues(elementList[0].(elements),\n\t\telements{1, 1, 1, 1})\n\tsuite.EqualValues(elementList[1].(elements),\n\t\telements{3, 3, 3, 2})\n\tsuite.EqualValues(elementList[2].(elements),\n\t\telements{3, 3, 3, 2})\n\tsuite.EqualValues(elementList[3].(elements),\n\t\telements{1, 1, 1, 4})\n\tsuite.EqualValues(elementList[4].(elements),\n\t\telements{2, 2, 2, 4})\n\n\tfor _, s := range elementList {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"values\": s.(elements),\n\t\t}).Info(\"Values\")\n\t}\n\n}", "title": "" }, { "docid": "da7f3a3b4bb05e460818bb3d09e5c32d", "score": "0.57125235", "text": "func mergeSorted(a, b []beam.T, less func(any, any) bool) []beam.T {\n\toutput := make([]beam.T, 0, len(a)+len(b))\n\tfor len(a) > 0 && len(b) > 0 {\n\t\tif less(a[0], b[0]) {\n\t\t\toutput = append(output, a[0])\n\t\t\ta = a[1:]\n\t\t} else {\n\t\t\toutput = append(output, b[0])\n\t\t\tb = b[1:]\n\t\t}\n\t}\n\tif len(a) > 0 {\n\t\toutput = append(output, a...)\n\t} else {\n\t\toutput = append(output, b...)\n\t}\n\treturn output\n}", "title": "" }, { "docid": "5f4b46b51f547d6a1673a53df26fd4f6", "score": "0.57091016", "text": "func sortWordCounts(wordCounts []WordCount) {\n\tsort.Slice(wordCounts, func(i, j int) bool {\n\t\twc1 := wordCounts[i]\n\t\twc2 := wordCounts[j]\n\t\tif wc1.Count == wc2.Count {\n\t\t\treturn wc1.Word < wc2.Word\n\t\t}\n\t\treturn wc1.Count > wc2.Count\n\t})\n}", "title": "" }, { "docid": "5f4b46b51f547d6a1673a53df26fd4f6", "score": "0.57091016", "text": "func sortWordCounts(wordCounts []WordCount) {\n\tsort.Slice(wordCounts, func(i, j int) bool {\n\t\twc1 := wordCounts[i]\n\t\twc2 := wordCounts[j]\n\t\tif wc1.Count == wc2.Count {\n\t\t\treturn wc1.Word < wc2.Word\n\t\t}\n\t\treturn wc1.Count > wc2.Count\n\t})\n}", "title": "" }, { "docid": "5f4b46b51f547d6a1673a53df26fd4f6", "score": "0.57091016", "text": "func sortWordCounts(wordCounts []WordCount) {\n\tsort.Slice(wordCounts, func(i, j int) bool {\n\t\twc1 := wordCounts[i]\n\t\twc2 := wordCounts[j]\n\t\tif wc1.Count == wc2.Count {\n\t\t\treturn wc1.Word < wc2.Word\n\t\t}\n\t\treturn wc1.Count > wc2.Count\n\t})\n}", "title": "" }, { "docid": "95d8e5a3e44cabde5991d996c26cf316", "score": "0.5701206", "text": "func ShellSort(data []int) []int {\n\tfor i := len(data) / 2; i > 0; i /= 2 {\n\t\tfor j := i; j < len(data); j++ {\n\t\t\tfor k := j - i; k >= 0; k -= i {\n\t\t\t\tif data[k+i] >= data[k] {\n\t\t\t\t\tbreak\n\t\t\t\t} else {\n\t\t\t\t\tdata[k], data[k+i] = data[k+i], data[k]\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn data\n}", "title": "" }, { "docid": "5b0617cbb3717431cb3d43c7443dd6ba", "score": "0.5700425", "text": "func (l *List) Sort(fn func(v1, v2 interface{}) bool) {\n\tif l.tail == nil {\n\t\treturn\n\t}\n\tfor {\n\t\tvar (\n\t\t\tprev *Node\n\t\t\tdryRun = true // indicates no changes were made during single loop\n\t\t)\n\t\t// loop over all nodes in list starting with HEAD\n\t\tfor i := l.tail; i.next != nil; {\n\t\t\t// check if we need to swap current with next node\n\t\t\tif fn(i.Data, i.next.Data) {\n\t\t\t\t// create temporary nodes\n\t\t\t\tvar (\n\t\t\t\t\ttmpCurr = &Node{}\n\t\t\t\t\ttmpNext = &Node{}\n\t\t\t\t)\n\n\t\t\t\ttmpCurr.Data = i.next.Data\n\t\t\t\ttmpCurr.next = tmpNext\n\t\t\t\ttmpNext.Data = i.Data\n\t\t\t\ttmpNext.next = i.next.next\n\n\t\t\t\tif prev == nil { // means we are at pos 0 (head)\n\t\t\t\t\tl.tail = tmpCurr\n\t\t\t\t} else {\n\t\t\t\t\tprev.next = tmpCurr\n\t\t\t\t}\n\n\t\t\t\tprev = tmpCurr\n\t\t\t\ti = tmpNext\n\n\t\t\t\tdryRun = false // we will loop over all nodes again\n\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tprev = i\n\t\t\ti = i.next\n\t\t}\n\n\t\t// no changes were made during this loop\n\t\t// this means we sorted everything we could\n\t\tif dryRun {\n\t\t\t// restore n.prev pointers\n\t\t\tfor n := l.tail; n != nil; n = n.next {\n\t\t\t\tif n.next != nil {\n\t\t\t\t\tn.next.prev = n\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t}\n}", "title": "" }, { "docid": "6317d6431e3520ed05c981dbc5a39c6a", "score": "0.57001066", "text": "func Sort(ss []string) {\n\tN := len(ss)\n\tcount := make([]int, R+1)\n\n\t// Compute frequency counts\n\tfor _, s := range ss {\n\t\tcount[int(s[0])+1]++\n\t}\n\n\t// Compute cumulates\n\tfor r := 0; r < R; r++ {\n\t\tcount[r+1] = count[r+1] + count[r]\n\t}\n\n\t// Move data in-place\n\ti, r, moved := 0, 0, 0\n\tfor r < R {\n\t\tif count[r] != 0 {\n\t\t\tr--\n\t\t\tbreak\n\t\t}\n\t\tr++\n\t}\n\tfor moved < N {\n\t\tkey := int(ss[i][0])\n\t\tfmt.Printf(\"i: %d, key: %x, \", i, key)\n\t\tif key == r {\n\t\t\ti++\n\t\t\tcount[r]++\n\t\t\tif count[r] == count[r+1] {\n\t\t\t\tr++\n\t\t\t}\n\t\t\tfmt.Printf(\"increase i to %d\\n\", i)\n\t\t} else {\n\t\t\tj := count[key]\n\t\t\tss[i], ss[j] = ss[j], ss[i]\n\t\t\tcount[key] = j + 1\n\t\t\tfmt.Printf(\"swap %d with %d\\n\", i, j)\n\t\t}\n\t\tmoved++\n\t\tfmt.Printf(\"ss: %v\\n\", ss)\n\t}\n}", "title": "" }, { "docid": "072f2e1ff45b3ab785029c3b3ea44a93", "score": "0.5693925", "text": "func (s Float32) SortedList(less func(i, j float32) bool) []float32 {\n\tdest := s.List()\n\tsort.Slice(dest, func(i, j int) bool {\n\t\treturn less(dest[i], dest[j])\n\t})\n\treturn dest\n}", "title": "" }, { "docid": "57d46da5fa5ffbee113a31d5ccb04d04", "score": "0.568443", "text": "func (l *NodeTypeList) Sort() {\n\tsort.Slice(l.NodeTypes, func(i, j int) bool {\n\t\tif l.NodeTypes[i].IsDeprecated != l.NodeTypes[j].IsDeprecated {\n\t\t\treturn !l.NodeTypes[i].IsDeprecated\n\t\t}\n\t\tif l.NodeTypes[i].NodeInstanceType != nil &&\n\t\t\tl.NodeTypes[j].NodeInstanceType != nil {\n\t\t\tif l.NodeTypes[i].NodeInstanceType.LocalDisks !=\n\t\t\t\tl.NodeTypes[j].NodeInstanceType.LocalDisks {\n\t\t\t\treturn l.NodeTypes[i].NodeInstanceType.LocalDisks <\n\t\t\t\t\tl.NodeTypes[j].NodeInstanceType.LocalDisks\n\t\t\t}\n\t\t\tif l.NodeTypes[i].NodeInstanceType.LocalDiskSizeGB !=\n\t\t\t\tl.NodeTypes[j].NodeInstanceType.LocalDiskSizeGB {\n\t\t\t\treturn l.NodeTypes[i].NodeInstanceType.LocalDiskSizeGB <\n\t\t\t\t\tl.NodeTypes[j].NodeInstanceType.LocalDiskSizeGB\n\t\t\t}\n\t\t}\n\t\tif l.NodeTypes[i].MemoryMB != l.NodeTypes[j].MemoryMB {\n\t\t\treturn l.NodeTypes[i].MemoryMB < l.NodeTypes[j].MemoryMB\n\t\t}\n\t\tif l.NodeTypes[i].NumCores != l.NodeTypes[j].NumCores {\n\t\t\treturn l.NodeTypes[i].NumCores < l.NodeTypes[j].NumCores\n\t\t}\n\t\tif l.NodeTypes[i].NumGPUs != l.NodeTypes[j].NumGPUs {\n\t\t\treturn l.NodeTypes[i].NumGPUs < l.NodeTypes[j].NumGPUs\n\t\t}\n\t\treturn l.NodeTypes[i].InstanceTypeID < l.NodeTypes[j].InstanceTypeID\n\t})\n}", "title": "" }, { "docid": "e2b0b9d02a6e443c2524f3d89e2269f3", "score": "0.5683703", "text": "func SortTasks(tasks []*Task) {\n\tsort.Slice(tasks, func(i1, i2 int) bool { return tasks[i1].lessThan(tasks[i2]) })\n}", "title": "" }, { "docid": "fbeb95e8563ae50eb933be70f65f7a10", "score": "0.5655335", "text": "func sort(l Less) Filter {\n\treturn func(i *Index) (*Index, error) {\n\t\tres := i.cp(i.objs)\n\t\tless := func(j, k int) bool { return l(res.objs[j], res.objs[k]) }\n\t\ts.SliceStable(res.objs, less)\n\t\tres.sorted = true\n\t\treturn res, nil\n\t}\n}", "title": "" }, { "docid": "727ca5ad19f0f52b7ae991456e2d7bc6", "score": "0.56521165", "text": "func (p doubleslice) Sort() { sort.Sort(p) }", "title": "" }, { "docid": "5b74cae678475c4822dbe583d997124b", "score": "0.5643734", "text": "func _AnimalsSortSource(data []Animal, lo, hi int) {\n\tif hi-lo <= 12 {\n\t\t// Do insertion sort.\n\t\tfor i := lo + 1; i < hi; i++ {\n\t\t\tfor j := i; j > lo && !(data[j-1].Name <= data[j].Name); j-- {\n\t\t\t\tdata[j], data[j-1] = data[j-1], data[j]\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\t// Do quick sort.\n\tvar (\n\t\tp = lo\n\t\tx = data[lo]\n\t)\n\tfor i := lo + 1; i < hi; i++ {\n\t\tif data[i].Name <= x.Name {\n\t\t\tp++\n\t\t\tdata[p], data[i] = data[i], data[p]\n\t\t}\n\t}\n\tdata[p], data[lo] = data[lo], data[p]\n\n\tif lo < p {\n\t\t_AnimalsSortSource(data, lo, p)\n\t}\n\tif p+1 < hi {\n\t\t_AnimalsSortSource(data, p+1, hi)\n\t}\n}", "title": "" }, { "docid": "5c2a0e4c362a7b8fd2560077cafd6741", "score": "0.56355107", "text": "func sortSliceByZones(list interface{}, zones []Zone) {\n\tif !generic.HasField(list, \"Zone\") {\n\t\treturn\n\t}\n\n\tzoneMap := map[Zone]int{}\n\tfor i, zone := range zones {\n\t\tzoneMap[zone] = i\n\t}\n\tgeneric.SortSliceByField(list, \"Zone\", func(i interface{}, i2 interface{}) bool {\n\t\treturn zoneMap[i.(Zone)] < zoneMap[i2.(Zone)]\n\t})\n}", "title": "" }, { "docid": "417d98a014a84ec556961e80d72815cb", "score": "0.5621646", "text": "func sortByID(processes []myStruct) (sorted []myStruct) {\n\tlength := len(processes)\n\tsorted = make([]myStruct, length)\n\tsorted = processes\n\n\tfor i := length - 1; i > 0; i-- {\n\t\tfor j := 0; j < i; j++ {\n\t\t\tif(sorted[j].ID > sorted[j+1].ID) {\n\t\t\t\ttemp := sorted[j]\n\t\t\t\tsorted[j] = sorted[j+1]\n\t\t\t\tsorted[j+1] = temp\n\t\t\t}\n\t\t}\n\t}\n\treturn sorted\n}", "title": "" }, { "docid": "c2ef7d29413cc683c0b3e18f50eec6e3", "score": "0.56207716", "text": "func sort(s []int) {\n\tif len(s) > 0 {\n\t\tq := partition(s)\n\t\tsort(s[0:q])\n\t\tsort(s[q+1 : len(s)])\n\t}\n}", "title": "" }, { "docid": "b96192b4bed75ad27079d5e7eb1c0a51", "score": "0.5615039", "text": "func (snapshots *Snapshots) Sort() {\n\tsort.Slice(snapshots.List, func(i, k int) bool {\n\t\treturn snapshots.List[i].Version < snapshots.List[k].Version\n\t})\n}", "title": "" }, { "docid": "6a6ee7f0469f1f031cc10548536c6bcf", "score": "0.56119263", "text": "func (e *Elevator) ListSort() {\n\tif e.currentDirection == \"Down\" {\n\t\tsort.Slice(e.StopList, func(i, j int) bool {\n\t\t\treturn e.StopList[i] > e.StopList[j]\n\t\t})\n\t} else {\n\t\tsort.Slice(e.StopList, func(i, j int) bool {\n\t\t\treturn e.StopList[i] < e.StopList[j]\n\t\t})\n\t}\n}", "title": "" }, { "docid": "935ba418b96524e84022ccf8f2693f32", "score": "0.56100136", "text": "func customSort(p1 []Person) ([]Person, []Person) {\n\tfmt.Println(\"Custom sorting\")\n\tageDec := make([]Person, len(p1))\n\tcopy(ageDec, p1)\n\tageInc := make([]Person, len(p1))\n\tcopy(ageInc, p1)\n\n\tsort.Sort(ByAge(ageDec))\n\tfmt.Println(ageDec)\n\n\tsort.Sort(sort.Reverse(ByAge(ageInc)))\n\tfmt.Println(ageInc)\n\n\treturn ageDec, ageInc\n}", "title": "" }, { "docid": "b9285d2bc7761185481ad5037c5e8d57", "score": "0.5598821", "text": "func Sort(values []int) {\n\tvar root *tree\n\tfor _, v := range values {\n\t\troot = add(root, v)\n\t\tfmt.Printf(\"after add %d %v\", v, root)\n\t}\n\tappendValues(values[:0], root)\n}", "title": "" }, { "docid": "6a668733877b1315e6d429695a22cb49", "score": "0.5593547", "text": "func bubbleSort(suggestions []Suggestion) {\n\t// set up a bubble sort for now\n\tfor end := len(suggestions) - 1; end > 0; end-- {\n\t\tfor index := 0; index < end; index++ {\n\t\t\tif suggestions[index].Score < suggestions[index+1].Score {\n\t\t\t\tswap(suggestions, index, index+1)\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "2466f7a6e75076d18f2ba811713fb055", "score": "0.559328", "text": "func getSortedDisplays(allDisplays *map[DisplayTemplate]float64) *[]DisplayTemplate {\n\tvar displayStack []DisplayTemplate\n\tfor display := range *allDisplays {\n\t\tdisplayStack = append(displayStack, display)\n\t}\n\tsort.Sort(DisplayTemplateSlice(displayStack))\n\treturn &displayStack\n}", "title": "" }, { "docid": "d7111f1d15c294ebf9eb62ea100305bc", "score": "0.5592631", "text": "func getAllSort(s []int) [][]int {\n}", "title": "" }, { "docid": "428aa09fb4338ed83fd7e724127fd7d6", "score": "0.55789745", "text": "func (list AnimalsList) Sort(f AnimalLessThanFunc) AnimalsList {\n\tsort.Slice(list, func(i, j int) bool {\n\t\treturn f(list[i], list[j])\n\t})\n\n\treturn list\n}", "title": "" }, { "docid": "ed09919f1c7e4b1b6515e70999ae65b3", "score": "0.5569098", "text": "func sortedIntfs(intfs *ifstate.Interfaces, linkType topology.LinkType) []uint16 {\n\tvar result []uint16\n\tfor ifid, intf := range intfs.All() {\n\t\ttopoInfo := intf.TopoInfo()\n\t\tif topoInfo.LinkType != linkType {\n\t\t\tcontinue\n\t\t}\n\t\tresult = append(result, ifid)\n\t}\n\tsort.Slice(result, func(i, j int) bool { return result[i] < result[j] })\n\treturn result\n}", "title": "" }, { "docid": "6fa4e8599378388b8565b18912caa23c", "score": "0.5566335", "text": "func (jl JobList) Sort(sorter func(i, j int) bool) JobList {\n\tsort.Slice(jl[:], sorter)\n\n\treturn jl\n}", "title": "" }, { "docid": "12172d264a049565fee156263897657f", "score": "0.55663145", "text": "func (s StringChunksList) Sort() {\n\tsort.Sort(s)\n}", "title": "" }, { "docid": "71bd55239daef8b9921a7c5cdb7fc961", "score": "0.5562329", "text": "func mergeSort(l int, r int, slice reflect.Value) {\n\tif r != l {\n\t\tm := l + (r-l)/2\n\t\tmergeSort(l, m, slice)\n\t\tmergeSort(m+1, r, slice)\n\t\tmerge(l, m, r, slice)\n\t}\n}", "title": "" }, { "docid": "57c3fa3ab48b68b967ca1aa56c505844", "score": "0.55590576", "text": "func doSorting(name int, initAlgoritm sort.Algorithm, array []int, wp *sync.WaitGroup) {\n\tfmt.Printf(\"Thread %d sorting subarray: %v\\n\", name, array)\n\tvar tempAlgorithm = initAlgoritm.Init(array)\n\ttempAlgorithm.Sort()\n\twp.Done()\n}", "title": "" }, { "docid": "ed199dbc1e38ebb7b6d57411e968e823", "score": "0.5557417", "text": "func sortRules(rules []*types.TokenRule) {\n\tsort.Slice(rules, func(i, j int) bool {\n\t\tiAcct, jAcct := rules[i].AWSAccount, rules[j].AWSAccount\n\t\t// if accountID is the same, sort based on arn\n\t\tif iAcct == jAcct {\n\t\t\tarn1, arn2 := rules[i].AWSARN, rules[j].AWSARN\n\t\t\treturn arn1 < arn2\n\t\t}\n\n\t\treturn iAcct < jAcct\n\t})\n}", "title": "" }, { "docid": "8c84818df325f83c225f44768ccdf5d2", "score": "0.5556833", "text": "func sortParams(params []Param) {\n\tsort.Slice(params, func(i, j int) bool {\n\t\ta, b := params[i], params[j]\n\t\taPath, bPath := a.Component.Path(), b.Component.Path()\n\t\tfor {\n\t\t\tswitch {\n\t\t\tcase len(aPath) == 0 && len(bPath) == 0:\n\t\t\t\treturn a.Name < b.Name\n\t\t\tcase len(aPath) == 0 && len(bPath) > 0:\n\t\t\t\treturn false\n\t\t\tcase len(aPath) > 0 && len(bPath) == 0:\n\t\t\t\treturn true\n\t\t\tcase aPath[0] != bPath[0]:\n\t\t\t\treturn aPath[0] < bPath[0]\n\t\t\tdefault:\n\t\t\t\taPath, bPath = aPath[1:], bPath[1:]\n\t\t\t}\n\t\t}\n\t})\n}", "title": "" }, { "docid": "166e482d46dce1503db013c9643d3beb", "score": "0.55513084", "text": "func main() {\n\tvalues := []int{10, 5, 100, 800, 700, 600, 2000, -3, -10000}\n\tSort(values)\n\tfmt.Println(values)\n}", "title": "" }, { "docid": "96cf734d2efe8adb3832ecad8af60882", "score": "0.55465645", "text": "func Sort(a []int) {\n\tlen := len(a)\n\th := 1\n\tfor h < (len / 3) {\n\t\th = h*3 + 1 // 1, 4, 13, 40, 121, ...\n\t}\n\tfor h >= 1 {\n\t\t// h-sort the array\n\t\tfor i := h; i < len; i++ {\n\t\t\t// Insert a[i] among a[i-h], a[i-h*2], a[i-h*3], etc.\n\t\t\tfor j := i; j >= h && less(a[j], a[j-h]); j -= h {\n\t\t\t\texch(a, j, j-h)\n\t\t\t}\n\t\t}\n\t\th = h / 3\n\t}\n}", "title": "" }, { "docid": "6ec454d1644a564f1fcb272317d905d0", "score": "0.5539933", "text": "func sortCmdList(vec []Cmd) {\n\tsort.Slice(vec, func(i, j int) bool {\n\t\treturn vec[i].Name < vec[j].Name\n\t})\n}", "title": "" }, { "docid": "6ca84f95cc38537a7478af0c1cb554f3", "score": "0.55383676", "text": "func cmpFunctionLists(wantFuncs []testFunction, haveFuncs []pkg.Function) error {\n\tif len(wantFuncs) != len(haveFuncs) {\n\t\treturn fmt.Errorf(\"incorrect number of functions (want %v, have %v)\", len(wantFuncs), len(haveFuncs))\n\t}\n\n\tfor i, wantFunc := range wantFuncs {\n\t\thaveFunc := haveFuncs[i]\n\n\t\t// Check that the function's name is correct.\n\t\tif wantFunc.name != haveFunc.Name() {\n\t\t\treturn fmt.Errorf(\"name mismatch (want %s, have %s)\", wantFunc.name, haveFunc.Name())\n\t\t}\n\n\t\t// Check that the function's comments are correct.\n\t\tif wantFunc.comments != haveFunc.Comments(99999) {\n\t\t\treturn fmt.Errorf(\"%s: comments mismatch\", haveFunc.Name())\n\t\t}\n\n\t\t// Check that the input parameters are correct.\n\t\tif err := cmpParameterLists(wantFunc.inputs, haveFunc.Inputs()); err != nil {\n\t\t\treturn fmt.Errorf(\"%s: inputs: %w\", haveFunc.Name(), err)\n\t\t}\n\n\t\t// Check that the output parameters are correct.\n\t\tif err := cmpParameterLists(wantFunc.outputs, haveFunc.Outputs()); err != nil {\n\t\t\treturn fmt.Errorf(\"%s: outputs: %w\", haveFunc.Name(), err)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "6262579f6dbf82c29cee01f5f603b1d4", "score": "0.55322516", "text": "func (r *HeuristicAnalysisResult) Sort() {\n\tsort.Slice(r.Items, func(i, j int) bool {\n\t\treturn r.Items[i].Justification.GetScore() > r.Items[j].Justification.GetScore()\n\t})\n}", "title": "" }, { "docid": "234d12bf99b240fb75bde878d6cb82e5", "score": "0.5519559", "text": "func (l TaskGroupList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }", "title": "" }, { "docid": "5859193e0fc89055475baf7199f52b72", "score": "0.5519208", "text": "func (l ProcessList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }", "title": "" }, { "docid": "2ff741e766dfc52ee43375f3628d0286", "score": "0.55145854", "text": "func sort(data []int) {\n\tgoSort.Ints(data)\n}", "title": "" }, { "docid": "50e49b138f0649524942f5ca0b6dc2ba", "score": "0.5503755", "text": "func sortingBacking() {\n\titems := []string{\n\t\t\"pacman\", \"mario\", \"tetris\", \"doom\", \"galaga\", \"frogger\",\n\t\t\"asteroids\", \"simcity\", \"metroid\", \"defender\", \"rayman\",\n\t\t\"tempest\", \"ultima\",\n\t}\n\n\tfmt.Println(\"Original:\", items)\n\t// ADD YOUR CODE HERE\n\tmNum := len(items) / 2\n\tmid := items[mNum-1 : mNum+2]\n\tsort.Strings(items[5:8])\n\n\tfmt.Println(\"Mid:\", mid)\n\tfmt.Println()\n\tfmt.Println(\"Sorted :\", items)\n}", "title": "" }, { "docid": "9ab0ab78501ca4132536b2d9db467e97", "score": "0.54973805", "text": "func (l List) Sort(f LessFn) List {\n\tsort.Slice(l, f)\n\treturn l\n}", "title": "" }, { "docid": "85e7584c73bc92f486a5a9b19d079a88", "score": "0.5493435", "text": "func makeList(elems []int) *listNode {\n\tsort.Slice(elems, func(i, j int) bool {\n\t\treturn elems[i] < elems[j]\n\t})\n\n\tpre := &listNode{}\n\tq := pre\n\tfor _, v := range elems {\n\t\tq.next = &listNode{value: v}\n\t\tq = q.next\n\t}\n\n\treturn pre.next\n}", "title": "" }, { "docid": "da5f30f97354e24a7f69ace5d388148a", "score": "0.54933137", "text": "func bubbleSort(toSort []int) {\n\tfor i := 0; i < len(toSort); i++ {\n\t\tfor j := 0; j < len(toSort)-i-1; j++ {\n\t\t\tswap(toSort, j)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "6b850218ecee7aa269fe2f6a06ac4a0f", "score": "0.5492426", "text": "func MergeSort(list []Item) []Item {\n\tif len(list) < 2 {\n\t\treturn list\n\t}\n\tmiddle := len(list) / 2\n\n\tvar left, right []Item\n\n\twg := sync.WaitGroup{}\n\twg.Add(2)\n\n\tgo func() {\n\t\tleft = MergeSort(list[:middle])\n\t\twg.Done()\n\t}()\n\n\tgo func() {\n\t\tright = MergeSort(list[middle:])\n\t\twg.Done()\n\t}()\n\n\twg.Wait()\n\treturn merge(left, right)\n}", "title": "" }, { "docid": "a27a3ee7d8762ee0b519d1e759cee40d", "score": "0.5485694", "text": "func SortAsks(d []Item) []Item {\n\tsort.Sort(byOBPrice(d))\n\treturn d\n}", "title": "" }, { "docid": "8cc18bd4271b679521b3b153a77143bc", "score": "0.54821587", "text": "func ShellSort(v []int) {\n\tfor i := len(v) / 2; i > 2; i /= 2 {\n\t\tfor j := 0; j < i; j++ {\n\t\t\tincrInsertionSort(v[j:], len(v)-j, i)\n\t\t}\n\t}\n\tincrInsertionSort(v, len(v), 1)\n}", "title": "" }, { "docid": "5cce4c6a84996198c396f71b7751f1ca", "score": "0.54778713", "text": "func (ll *LinkedList[T]) Sort(c comparator.Comparator[T]) {\n\tal := ll.ToArrayList()\n\tal.Sort(c)\n\n\tit := al.Iterator()\n\ttemp := ll.first\n\n\tfor temp != nil && it.HasNext() {\n\t\ttemp.element, _ = it.Next()\n\t\ttemp = temp.next\n\t}\n}", "title": "" }, { "docid": "c5825c41f00dfaa629bd9b12600ec2c6", "score": "0.5475634", "text": "func ByKeySort(x []Elem) { sort.Sort(ByKey(x)) }", "title": "" }, { "docid": "43321671599eb2355083412c4b255e75", "score": "0.5474588", "text": "func (addrs byPriorityWeight) sort() {\n\tsort.Sort(addrs)\n\ti := 0\n\tfor j := 1; j < len(addrs); j++ {\n\t\tif addrs[i].Priority != addrs[j].Priority {\n\t\t\taddrs[i:j].shuffleByWeight()\n\t\t\ti = j\n\t\t}\n\t}\n\taddrs[i:].shuffleByWeight()\n}", "title": "" }, { "docid": "43321671599eb2355083412c4b255e75", "score": "0.5474588", "text": "func (addrs byPriorityWeight) sort() {\n\tsort.Sort(addrs)\n\ti := 0\n\tfor j := 1; j < len(addrs); j++ {\n\t\tif addrs[i].Priority != addrs[j].Priority {\n\t\t\taddrs[i:j].shuffleByWeight()\n\t\t\ti = j\n\t\t}\n\t}\n\taddrs[i:].shuffleByWeight()\n}", "title": "" }, { "docid": "187ac60bf2d7bb4ebb9cc3ddd86ddffd", "score": "0.5459689", "text": "func pcacheSortDirtyList(tls *libc.TLS, pIn uintptr) uintptr { /* sqlite3.c:50046:14: */\n\tbp := tls.Alloc(256)\n\tdefer tls.Free(256)\n\n\t// var a [32]uintptr at bp, 256\n\n\tvar p uintptr\n\tvar i int32\n\tlibc.Xmemset(tls, bp /* &a[0] */, 0, uint64(unsafe.Sizeof([32]uintptr{})))\n\tfor pIn != 0 {\n\t\tp = pIn\n\t\tpIn = (*PgHdr)(unsafe.Pointer(p)).FpDirty\n\t\t(*PgHdr)(unsafe.Pointer(p)).FpDirty = uintptr(0)\n\t\tfor i = 0; i < (N_SORT_BUCKET - 1); i++ {\n\t\t\tif *(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8)) == uintptr(0) {\n\t\t\t\t*(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8)) = p\n\t\t\t\tbreak\n\t\t\t} else {\n\t\t\t\tp = pcacheMergeDirtyList(tls, *(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8)), p)\n\t\t\t\t*(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8)) = uintptr(0)\n\t\t\t}\n\t\t}\n\t\tif i == (N_SORT_BUCKET - 1) {\n\t\t\t// To get here, there need to be 2^(N_SORT_BUCKET) elements in\n\t\t\t// the input list. But that is impossible.\n\t\t\t*(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8)) = pcacheMergeDirtyList(tls, *(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8)), p)\n\t\t}\n\t}\n\tp = *(*uintptr)(unsafe.Pointer(bp /* &a[0] */))\n\tfor i = 1; i < N_SORT_BUCKET; i++ {\n\t\tif *(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8)) == uintptr(0) {\n\t\t\tcontinue\n\t\t}\n\t\tif p != 0 {\n\t\t\tp = pcacheMergeDirtyList(tls, p, *(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8)))\n\t\t} else {\n\t\t\tp = *(*uintptr)(unsafe.Pointer(bp /* &a[0] */ + uintptr(i)*8))\n\t\t}\n\t}\n\treturn p\n}", "title": "" }, { "docid": "6667668cb7bc7d1fecc179b7357c565d", "score": "0.5457209", "text": "func sortByBurst(processes []myStruct) (sorted []myStruct) {\n\tlength := len(processes)\n\tsorted = make([]myStruct, length)\n\tsorted = processes\n\n\tfor i := length - 1; i > 0; i-- {\n\t\tfor j := 0; j < i; j++ {\n\t\t\tif(sorted[j].burst > sorted[j+1].burst) {\n\t\t\t\ttemp := sorted[j]\n\t\t\t\tsorted[j] = sorted[j+1]\n\t\t\t\tsorted[j+1] = temp\n\t\t\t}\n\t\t}\n\t}\n\treturn sorted\n}", "title": "" }, { "docid": "4ea240b1d1e3b0c296a0b2453ffcc534", "score": "0.5450501", "text": "func (t Targets) Sort() {\n\tsort.Slice(t, func(i, j int) bool {\n\t\tswitch strings.Compare(t[i].Job(), t[j].Job()) {\n\t\tcase -1:\n\t\t\treturn true\n\t\tcase 1:\n\t\t\treturn false\n\t\t}\n\t\tswitch strings.Compare(string(t[i].Labels[sourceLabelName]), string(t[j].Labels[sourceLabelName])) {\n\t\tcase -1:\n\t\t\treturn true\n\t\tcase 1:\n\t\t\treturn false\n\t\t}\n\t\treturn t[i].ScrapeURL < t[j].ScrapeURL\n\t})\n}", "title": "" }, { "docid": "2bc6c328730b5e2cf79fc81a0095fe61", "score": "0.5450111", "text": "func TestAddSort(t *testing.T) {\n\tt.Log(\"Given the need to test AddSort functionality.\")\n\t{\n\t\torgNodeData := []string{\"grape\", \"apple\", \"plum\", \"mango\", \"kiwi\"}\n\t\tt.Logf(\"\\tTest 0:\\tWhen adding %d nodes\", len(orgNodeData))\n\t\t{\n\t\t\tvar l list.List\n\n\t\t\tfor _, data := range orgNodeData {\n\t\t\t\tl.AddSort(data)\n\t\t\t}\n\t\t\tif l.Count != len(orgNodeData) {\n\t\t\t\tt.Logf(\"\\t%s\\tTest 0:\\tShould be able to add %d nodes.\", failed, len(orgNodeData))\n\t\t\t\tt.Fatalf(\"\\t\\tTest 0:\\tGot %d, Expected %d.\", l.Count, len(orgNodeData))\n\t\t\t}\n\t\t\tt.Logf(\"\\t%s\\tTest 0:\\tShould be able to add %d nodes.\", succeed, len(orgNodeData))\n\n\t\t\tvar nodeData string\n\t\t\tf := func(n *list.Node) error {\n\t\t\t\tnodeData += n.Data\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tif err := l.Operate(f); err != nil {\n\t\t\t\tt.Fatalf(\"\\t%s\\tTest 0:\\tShould be able to operate on the list : %v\", failed, err)\n\t\t\t}\n\t\t\tt.Logf(\"\\t%s\\tTest 0:\\tShould be able to operate on the list.\", succeed)\n\nsortedNodeData := \"applegrapekiwimangoplum\"\n\t\t\tif sortedNodeData != nodeData {\n\t\t\t\tt.Logf(\"\\t%s\\tTest 0:\\tShould be able to traverse over %d nodes in sort order.\", failed, len(orgNodeData))\n\t\t\t\tt.Fatalf(\"\\t\\tTest 0:\\tGot %s, Expected %s.\", nodeData, sortedNodeData)\n\t\t\t}\n\t\t\tt.Logf(\"\\t%s\\tTest 0:\\tShould be able to traverse over %d nodes in sort order.\", succeed, len(orgNodeData))\n\n\t\t\tnodeData = \"\"\n\t\t\tf = func(n *list.Node) error {\n\t\t\t\tnodeData += n.Data\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\tif err := l.OperateReverse(f); err != nil {\n\t\t\t\tt.Fatalf(\"\\t%s\\tTest 0:\\tShould be able to operate reverse on the list : %v\", failed, err)\n\t\t\t}\n\t\t\tt.Logf(\"\\t%s\\tTest 0:\\tShould be able to operate reverse on the list.\", succeed)\n\t\t\tsortedNodeData = \"plummangokiwigrapeapple\"\n\t\t\tif sortedNodeData != nodeData {\n\t\t\t\tt.Logf(\"\\t%s\\tTest 0:\\tShould be able to traverse over %d nodes in reverse sort order.\", failed, len(orgNodeData))\n\t\t\t\tt.Fatalf(\"\\t\\tTest 0:\\tGot %s, Expected %s.\", nodeData, sortedNodeData)\n\t\t\t}\n\t\t\tt.Logf(\"\\t%s\\tTest 0:\\tShould be able to traverse over %d nodes in reverse sort order.\", succeed, len(orgNodeData))\n\t\t}\n\t}\n}", "title": "" } ]
d752fa6a6156242b3a1a3cfa2db124a9
Path is the path which is to be filtered on
[ { "docid": "554889dad2edc61bb73780b0cd1f6288", "score": "0.6861849", "text": "func (f FilterGroup) Path() string {\n\treturn f.path\n}", "title": "" } ]
[ { "docid": "dcd1b2b0894c373acbc8db1e3746f8c5", "score": "0.6744859", "text": "func (s *ClusterGetSettingsService) FilterPath(filterPath ...string) *ClusterGetSettingsService {\n\ts.filterPath = append(s.filterPath, filterPath...)\n\treturn s\n}", "title": "" }, { "docid": "85092a7623dcb42efb98e2d6187a35c8", "score": "0.66700447", "text": "func (s *IndicesStatsService) FilterPath(filterPath ...string) *IndicesStatsService {\n\ts.filterPath = filterPath\n\treturn s\n}", "title": "" }, { "docid": "117bf656a43291bad291ac9e1d634965", "score": "0.65802956", "text": "func (s *ClusterStateService) FilterPath(filterPath ...string) *ClusterStateService {\n\ts.filterPath = filterPath\n\treturn s\n}", "title": "" }, { "docid": "8c8c0934518654102cff6445ff6579e3", "score": "0.6562853", "text": "func (s *DeleteService) FilterPath(filterPath ...string) *DeleteService {\n\ts.filterPath = filterPath\n\treturn s\n}", "title": "" }, { "docid": "7817134d9c78b4e452c30c46da7079d1", "score": "0.64880526", "text": "func (s *BulkService) FilterPath(filterPath ...string) *BulkService {\n\ts.filterPath = filterPath\n\treturn s\n}", "title": "" }, { "docid": "274f2d569eab54ac202454c14270223a", "score": "0.62657076", "text": "func PathFilter(allowedMap map[string]Allowed) func(h http.Handler) http.Handler {\n\treturn func(nextHandler http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {\n\t\t\tif allowed, ok := allowedMap[req.URL.Path]; ok && allowed.isMethodAllowed(req.Method) {\n\t\t\t\tallowed.Handler(w, req)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tnextHandler.ServeHTTP(w, req)\n\t\t})\n\t}\n}", "title": "" }, { "docid": "4b0c0dfea37bd28878057ac5eae87f6f", "score": "0.62021655", "text": "func (rn *RootNode) isFiltered(path string) bool {\n\tif !rn.args.PlaintextNames {\n\t\treturn false\n\t}\n\t// gocryptfs.conf in the root directory is forbidden\n\tif path == configfile.ConfDefaultName {\n\t\ttlog.Info.Printf(\"The name /%s is reserved when -plaintextnames is used\\n\",\n\t\t\tconfigfile.ConfDefaultName)\n\t\treturn true\n\t}\n\t// Note: gocryptfs.diriv is NOT forbidden because diriv and plaintextnames\n\t// are exclusive\n\treturn false\n}", "title": "" }, { "docid": "5205f0ba272d3510e8cc4e5c61880250", "score": "0.61392945", "text": "func (o *CustconfAwsSignedS3PostV4) SetPathFilter(v string) {\n\to.PathFilter = &v\n}", "title": "" }, { "docid": "835a4a5f390a2a846fb79dd01f25e7cc", "score": "0.6119338", "text": "func (fs *FS) isFiltered(path string) bool {\n\tif !fs.args.PlaintextNames {\n\t\treturn false\n\t}\n\t// gocryptfs.conf in the root directory is forbidden\n\tif path == configfile.ConfDefaultName {\n\t\ttlog.Info.Printf(\"The name /%s is reserved when -plaintextnames is used\\n\",\n\t\t\tconfigfile.ConfDefaultName)\n\t\treturn true\n\t}\n\t// Note: gocryptfs.diriv is NOT forbidden because diriv and plaintextnames\n\t// are exclusive\n\treturn false\n}", "title": "" }, { "docid": "b9ff5f0ce524e91c2f89dd5c2a553ea4", "score": "0.6060316", "text": "func (o *CustconfAwsSignedS3PostV4) GetPathFilter() string {\n\tif o == nil || o.PathFilter == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.PathFilter\n}", "title": "" }, { "docid": "2710f3c4959b66dede153e93a47a7de8", "score": "0.6044304", "text": "func (o *CustconfOriginPullResumeDownload) SetPathFilter(v string) {\n\to.PathFilter = &v\n}", "title": "" }, { "docid": "43fd1cea327aaf999e00d69df12f7413", "score": "0.604011", "text": "func (o *CustconfOriginPullResumeDownload) GetPathFilter() string {\n\tif o == nil || o.PathFilter == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.PathFilter\n}", "title": "" }, { "docid": "4601e8892eab76100a4d2a35e54f5c23", "score": "0.59910005", "text": "func Filter(paths []*combinator.Path, policy Policy) []*combinator.Path {\n\treturn psToPaths(policy.Filter(pathsToPs(paths)))\n}", "title": "" }, { "docid": "dc515d401c8046aedca21aacdcdb2a5f", "score": "0.59414375", "text": "func (o *CustconfOriginPullResumeDownload) HasPathFilter() bool {\n\tif o != nil && o.PathFilter != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "044a5ee809c64e2207a69479fdb6b530", "score": "0.5866878", "text": "func FilterS3Path(path string) (string, string) {\n\tpath = strings.ReplaceAll(path, constants.S3Prefix, \"\")\n\tsplit := strings.Split(path, \"/\")\n\n\treturn split[0], strings.Join(split[1:], \"/\")\n}", "title": "" }, { "docid": "bb5eb2b8fd6375b6394652dd1fcf0e4b", "score": "0.5840976", "text": "func (o *CustconfRedirectMappings) GetPathFilter() string {\n\tif o == nil || o.PathFilter == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.PathFilter\n}", "title": "" }, { "docid": "7304cc87c313d8e44af1794e261689a4", "score": "0.5768928", "text": "func filterDir(source []byte, filter string) []string {\n\tvar result []string\n\tdirectories := strings.Split(string(source), \"\\n\")\n\tif filter == \"all\" {\n\t\treturn directories\n\t}\n\n\tfor _, dir := range directories {\n\t\tif strings.Contains(string(dir), filter) {\n\t\t\tresult = append(result, string(dir))\n\t\t}\n\t}\n\treturn result\n}", "title": "" }, { "docid": "c462d1bca6664a8bf792e250b4c37ca0", "score": "0.5732291", "text": "func FilteredPaths(paths []string, filter Filter) ([]string, error) {\n\treturn FilteredPathsFS(nil, paths, filter)\n}", "title": "" }, { "docid": "ffe60f16fad50eb367efcdafa9ebd09c", "score": "0.5707659", "text": "func (c Condition) Path() string {\n\treturn c.path\n}", "title": "" }, { "docid": "8d018e9310cafea581f2222dd58d7cb4", "score": "0.57043046", "text": "func (o *CustconfRedirectMappings) SetPathFilter(v string) {\n\to.PathFilter = &v\n}", "title": "" }, { "docid": "6e0df345b48bc7fee92319234c8d3f6d", "score": "0.56977737", "text": "func (d *Object) PathFilters() []PathFilter {\n\treturn []PathFilter{\n\t\t*NewPathFilter(d.path, d),\n\t}\n}", "title": "" }, { "docid": "255ec6285a11be2bcb3d798c15f10ba3", "score": "0.5692299", "text": "func (a *HTTPServer) Filter(path string, f Filter) {\n\ta.defaultHandler.filter(path, f)\n}", "title": "" }, { "docid": "d2b7c9e756a33bdb33887a406940e5b6", "score": "0.56825584", "text": "func (o *CustconfAwsSignedS3PostV4) HasPathFilter() bool {\n\tif o != nil && o.PathFilter != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "b3cd331cba7947df417b835da2037cbe", "score": "0.5607616", "text": "func Stream(path string, files chan<- string, filters ...func(path string) bool) error {\n\terr := filepath.\n\t\tWalk(path, func(path string, info os.FileInfo, err error) error {\n\t\t\tif info.IsDir() {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tfor _, filter := range filters {\n\t\t\t\tif !filter(path) {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfiles <- path\n\n\t\t\treturn nil\n\t\t})\n\n\tclose(files)\n\treturn err\n}", "title": "" }, { "docid": "7bf71c27c1f6ecf66d484fd5f93d0ad6", "score": "0.56053585", "text": "func fileFilter(path string) bool {\n\t// Skips all files of the dst folder\n\t// TODO: closure of resizeFolder is not clean code\n\tif strings.HasPrefix(path, resizeFolder) {\n\t\treturn false\n\t}\n\text := filepath.Ext(path)\n\tif strings.EqualFold(ext, \".jpg\") ||\n\t\tstrings.EqualFold(ext, \".jpeg\") {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "d41d09f2897c021588a6e54ced68f02e", "score": "0.5595976", "text": "func (f *jsiiProxy_FilterGroup) AndFilePathIsNot(pattern *string) FilterGroup {\n\tvar returns FilterGroup\n\n\t_jsii_.Invoke(\n\t\tf,\n\t\t\"andFilePathIsNot\",\n\t\t[]interface{}{pattern},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "title": "" }, { "docid": "56e342e917680074b98e666ecb7a819a", "score": "0.55831057", "text": "func (c *TreeWalker) filterFileByDate(path string) bool {\n\tif c.MaxChangeTime == 0 {\n\t\treturn true\n\t}\n\n\tif stat, error := os.Stat(path); error == nil {\n\t\treturn time.Since(stat.ModTime()) < c.MaxChangeTime\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "deda2aecc65300021fd1fe2a57d27930", "score": "0.5558097", "text": "func Filter(body []byte, paths []string) (result []byte, err error) {\n\tvar parsed *shared.Manifest\n\tif err = json.Unmarshal(body, &parsed); err != nil {\n\t\treturn nil, err\n\t}\n\tif parsed, err = parsed.FilterByPath(paths...); err != nil {\n\t\treturn nil, err\n\t}\n\tbody, err = json.Marshal(parsed)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn body, nil\n}", "title": "" }, { "docid": "063fabb5a6932ef8a904082ea9458816", "score": "0.55565625", "text": "func filterBlacklistedPaths(patch []jsonpatch.JsonPatchOperation) []jsonpatch.JsonPatchOperation {\n\tallowed := []jsonpatch.JsonPatchOperation{}\n\tfor _, po := range patch {\n\t\tif ok := allowedPath(po.Path); ok {\n\t\t\tallowed = append(allowed, po)\n\t\t}\n\t}\n\treturn allowed\n}", "title": "" }, { "docid": "ec696bf33739b0970af1917ab446a551", "score": "0.55497533", "text": "func (f *jsiiProxy_FilterGroup) AndFilePathIs(pattern *string) FilterGroup {\n\tvar returns FilterGroup\n\n\t_jsii_.Invoke(\n\t\tf,\n\t\t\"andFilePathIs\",\n\t\t[]interface{}{pattern},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "title": "" }, { "docid": "2f05cd6bd02b87f8223d107a570a5802", "score": "0.55328584", "text": "func (fw *FileSystemWatcher) filterByPattern(path string) bool {\n\tp := fw.findWatchPath(path)\n\tif p == nil {\n\t\treturn false\n\t}\n\n\t// If there was no filter pattern given, we allow it.\n\tif len(p.pattern) == 0 {\n\t\treturn true\n\t}\n\n\t// If this is a file that has been specifically added, we do not try any\n\t// filters and just allow it.\n\tif !p.isdir {\n\t\treturn true\n\t}\n\n\t// Run the filter on the filename only.\n\t_, path = filepath.Split(path)\n\tmatch, err := filepath.Match(p.pattern, path)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn false\n\t}\n\tif match {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "82ca8644ad64f90a36dffc0516d2c592", "score": "0.5529069", "text": "func (f filterNodes) getByPath(p document.Path) *filterNode {\n\tfor _, fn := range f {\n\t\tif fn.path.IsEqual(p) {\n\t\t\treturn fn\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "e4eda8493038c0b5d170923f0163b975", "score": "0.55249584", "text": "func filterShortestPath(paths []string) string {\n\tshortestLength := len(paths[0])\n\tindex := 0\n\tfor i, path := range paths {\n\t\tif length := strings.Count(path, \"->\"); length < shortestLength {\n\t\t\tshortestLength = length\n\t\t\tindex = i\n\t\t}\n\t}\n\treturn paths[index]\n}", "title": "" }, { "docid": "c622c3a6f5da5b36e132944e55ac6f6f", "score": "0.5520407", "text": "func (t *TreeNode) Filter(q string, filter func(q, path string) bool) *TreeNode {\n\tspecs := t.Flatten()\n\tmatches := make([]NodeSpec, 0, len(specs))\n\tfor _, s := range specs {\n\t\tif filter(q, s.AsPath()+s.AsStatus()) {\n\t\t\tmatches = append(matches, s)\n\t\t}\n\t}\n\n\tif len(matches) == 0 {\n\t\treturn nil\n\t}\n\treturn Hydrate(matches)\n}", "title": "" }, { "docid": "c126250fb96050429377f6ea1c8a35e9", "score": "0.5513195", "text": "func (s *PathScanner) ExcludePath(path string) *PathScanner {\n\tabs, err := s.fs.AbsPath(path)\n\tif err != nil {\n\t\tlog.Println(\"scan: error finding absolute path. \", err)\n\t\t// return s // TODO: Fix this. For now, just exclude what was given.\n\t\tlog.Printf(\"scan: excluding path: %q\\n\", path)\n\t\ts.excl[path] = true\n\t}\n\tif !s.excl[abs] {\n\t\tlog.Printf(\"scan: excluding path: %q\\n\", abs)\n\t\ts.excl[abs] = true\n\t}\n\treturn s\n}", "title": "" }, { "docid": "ee2fc61559f9d0502ef1a5937b33b631", "score": "0.5480773", "text": "func (p list) Path() []string {\n\tif len(p) == 0 {\n\t\treturn nil\n\t}\n\treturn p[0].Path()\n}", "title": "" }, { "docid": "f7889f12a9dd1a65d20453d46446f198", "score": "0.54716617", "text": "func (iter *PathIterator) Path() string {\n\treturn iter.path\n}", "title": "" }, { "docid": "54eee65b1c2b7e06ece1e6f417951ba2", "score": "0.5439822", "text": "func (sb *SecurityBuilder) Path(paths ...string) *SecurityBuilder {\n\tsb.pathMatcher = web.Path(paths...)\n\tsb.paths = paths\n\treturn sb\n}", "title": "" }, { "docid": "5bfd3dc389b30560263e3096d71d1c23", "score": "0.5429588", "text": "func (fl fileLoader) Filtered(paths []string, filter Filter) (*Result, error) {\n\treturn all(fl.fsys, paths, filter, func(curr *Result, path string, depth int) error {\n\n\t\tvar (\n\t\t\tbs []byte\n\t\t\terr error\n\t\t)\n\t\tif fl.fsys != nil {\n\t\t\tbs, err = fs.ReadFile(fl.fsys, path)\n\t\t} else {\n\t\t\tbs, err = os.ReadFile(path)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tresult, err := loadKnownTypes(path, bs, fl.metrics, fl.opts)\n\t\tif err != nil {\n\t\t\tif !isUnrecognizedFile(err) {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif depth > 0 {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tresult, err = loadFileForAnyType(path, bs, fl.metrics, fl.opts)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\treturn curr.merge(path, result)\n\t})\n}", "title": "" }, { "docid": "0d1a9db92f5e2e37eaafa566f3fa18e6", "score": "0.54219735", "text": "func (f *faasPlugin) Path() string {\n\treturn \"\"\n}", "title": "" }, { "docid": "c6acd638869cfc84f94c1ab302d76690", "score": "0.5418955", "text": "func allowedPath(p string) bool {\n\tfor _, bp := range blacklistedPaths {\n\t\tif p == bp {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "312c171c989bd504d111fa6e8d52ddfc", "score": "0.5391657", "text": "func Filtered(paths []string, filter Filter) (*Result, error) {\n\treturn NewFileLoader().Filtered(paths, filter)\n}", "title": "" }, { "docid": "6d8e14e34d8b61bee14cb740f52a17eb", "score": "0.5389301", "text": "func (src *Source) Path() string {\n\treturn src.String()\n}", "title": "" }, { "docid": "c4bc3bacc16085b5fc3749301aee17d8", "score": "0.53831697", "text": "func (o SapEccResourceDatasetOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SapEccResourceDataset) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "ad90eed5f62a58f61497183efcda41bf", "score": "0.5368369", "text": "func (o DynamicsAXResourceDatasetOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v DynamicsAXResourceDataset) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "db39e1807c39bcf10e96dcdbfd8f0d31", "score": "0.5366563", "text": "func (index *index) isForbiddenPath(path string) bool {\n\ti := sort.SearchStrings(defaultIndex.files, path)\n\treturn i >= len(defaultIndex.files) || defaultIndex.files[i] != path\n}", "title": "" }, { "docid": "9c25030725b510cea39d0c4f469fb8ed", "score": "0.53529364", "text": "func (o WebTableDatasetOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v WebTableDataset) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "279dde836668743bae396ae5a940b99d", "score": "0.53509605", "text": "func FilteredPathsFS(fsys fs.FS, paths []string, filter Filter) ([]string, error) {\n\tresult := []string{}\n\n\t_, err := all(fsys, paths, filter, func(_ *Result, path string, _ int) error {\n\t\tresult = append(result, path)\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result, nil\n}", "title": "" }, { "docid": "19f6695f09392c1b227d30c268074925", "score": "0.5346452", "text": "func WithFilterPath(filterPath []string) *Option {\n\treturn &Option{\n\t\tname: \"WithFilterPath\",\n\t\tapply: func(r *http.Request) {\n\t\t},\n\t}\n}", "title": "" }, { "docid": "e6ec009fe969b124e129886f000a09a7", "score": "0.53411853", "text": "func (o *CustconfRedirectMappings) HasPathFilter() bool {\n\tif o != nil && o.PathFilter != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "7c8c83b61463bffbce1868a424d0a91c", "score": "0.533695", "text": "func (fw *FileSystemWatcher) filterByOp(path string, op Op) bool {\n\tp := fw.findWatchPath(path)\n\tif p == nil {\n\t\treturn false\n\t}\n\t// This tests whether the given Op is included in the Op list\n\t// (e.g. match Create against Create|Write)\n\tif p.ops&op == op {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "e5e4ed555639d24ea97546d580aa7105", "score": "0.532895", "text": "func (em *exactMatcher) filtersFromCache(path string) error {\n\tcPath := filepath.Join(path, canonicalStemFile)\n\tsizesPath := filepath.Join(path, sizesFile)\n\tcPathExists, err := gnsys.FileExists(cPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif cPathExists {\n\t\tif err := em.getFiltersFromCache(cPath, sizesPath); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6ea3d7da3e508a06247e6bb85d53b760", "score": "0.5322472", "text": "func (s *DirScanner) Path() string {\n\treturn s.path\n}", "title": "" }, { "docid": "24011817589f9dc831c1f11a9b205cc0", "score": "0.52906495", "text": "func (fi fileInfo) Path() string {\n\treturn fi.path\n}", "title": "" }, { "docid": "a97cd016e1a09478a81e7ad95c99a28e", "score": "0.5289334", "text": "func (s *source) Path(path string) (self Source) {\n\tself = s\n\n\tvar (\n\t\tparsed *url.URL\n\t\terr error\n\t)\n\n\tif parsed, err = url.Parse(path); err != nil {\n\t\tloggerError(s.isDebug(), \"cannot parse path: %#v\", path)\n\t\treturn\n\t}\n\n\ts.path = parsed.Path\n\treturn\n}", "title": "" }, { "docid": "2512e1f7cbcfd1a4635bc749a55fda2c", "score": "0.5285298", "text": "func (i *Index) Path() string { return i.path }", "title": "" }, { "docid": "721ca90ddabefc02f6104410e0dae0bd", "score": "0.5283612", "text": "func (ext *Extender) Filter(ctx *gocrawl.URLContext, isVisited bool) bool {\n\treturn !isVisited && exp.MatchString(ctx.NormalizedURL().String())\n}", "title": "" }, { "docid": "640a0a148f7e3cc68813f668df0a6b54", "score": "0.52792776", "text": "func (o *CustconfAwsSignedS3PostV4) GetPathFilterOk() (*string, bool) {\n\tif o == nil || o.PathFilter == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PathFilter, true\n}", "title": "" }, { "docid": "0f387b6e3a1b4d20e52c37d1de3defd5", "score": "0.52698034", "text": "func (l Logger) Path(path string) Logger {\n\treturn Path(l.path + path)\n}", "title": "" }, { "docid": "8a6fa6f180b76e14e53f8384248d9022", "score": "0.5259944", "text": "func (f *File) Filter(name string, cache map[string]*regexp.Regexp) Section {\n\tname = filepath.ToSlash(name)\n\tresult := Section{}\n\tfor i := len(f.Sections) - 1; i >= 0; i-- {\n\t\tsection := f.Sections[i]\n\n\t\trx := cache[section.Name]\n\t\tif rx == nil {\n\t\t\trx = toRegexp(section.Name)\n\t\t\tif cache != nil {\n\t\t\t\tcache[section.Name] = rx\n\t\t\t}\n\t\t}\n\t\tif rx.MatchString(name) {\n\t\t\tresult.Add(section.Properties...)\n\t\t}\n\t}\n\treturn result\n}", "title": "" }, { "docid": "4355882453f05b43ec0136ad1f57ea06", "score": "0.52451175", "text": "func (o ODataResourceDatasetOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v ODataResourceDataset) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "7b1a952c513db65b2a697bef3d282264", "score": "0.5242929", "text": "func (w *Walker) isExcluded(path string) bool {\n\tfor _, e := range w.pol.ExcludePfx {\n\t\tif strings.HasPrefix(path, e) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "1110555b741520b283dda90099fd5cd9", "score": "0.5240078", "text": "func (h *HandlerRouter) findFilter(location Location, requestPath string, c *context.Context) {\n\tif r, ok := h.filters[location]; ok {\n\t\tfor _, filter := range r {\n\t\t\t// check method and path\n\t\t\tif filter.MatchPath(requestPath) {\n\t\t\t\tfilter.RunFilter(c)\n\t\t\t\tlogs.Trace(\"Execution handler filter path:%v\", filter.Pattern)\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "3171102274faf5676932b89b489c6f17", "score": "0.5235531", "text": "func (o *CustconfOriginPullResumeDownload) GetPathFilterOk() (*string, bool) {\n\tif o == nil || o.PathFilter == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PathFilter, true\n}", "title": "" }, { "docid": "5d3246330d03f8f1d925f58d695e0d6b", "score": "0.5222465", "text": "func (f *IndexFile) Path() string { return f.path }", "title": "" }, { "docid": "fd8f786e5cb2113a51d0a2dd10f76c50", "score": "0.5220133", "text": "func (e *DevEngine) Path() string { return e.path }", "title": "" }, { "docid": "4dcc9dbb761774967b4ac60e90c94796", "score": "0.52108145", "text": "func (o WebTableDatasetResponseOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v WebTableDatasetResponse) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "0515d7e79a909de8daa16b0dd2f1dc08", "score": "0.5197416", "text": "func (o ODataResourceDatasetResponseOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v ODataResourceDatasetResponse) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "4b095762b0f4dcadd1fc374901571bca", "score": "0.5193591", "text": "func (this *DefaultExtender) Filter(ctx *URLContext, isVisited bool) bool {\n\treturn !isVisited\n}", "title": "" }, { "docid": "a654080149a95d2e60f00f5a69a7c0dc", "score": "0.5187439", "text": "func (o GetGreyTagRoutesRouteScRuleOutput) Path() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetGreyTagRoutesRouteScRule) string { return v.Path }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "2ca29f4dbf301796c65aa08b742c9acd", "score": "0.5172763", "text": "func (o DynamicsAXResourceDatasetResponseOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v DynamicsAXResourceDatasetResponse) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "fe92eabe714de245fb03341eba63db45", "score": "0.5171581", "text": "func (c *Cache) Path() string {\n\treturn c.cfg.GetString(\"path\")\n}", "title": "" }, { "docid": "15c5ad808272d6eb4eb2690faf02dd0a", "score": "0.5167975", "text": "func (o SapEccResourceDatasetResponseOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SapEccResourceDatasetResponse) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "e9b2a09876045a0f6dcaccba1882cc7a", "score": "0.5155667", "text": "func (bu *Buffalo) Path() string {\n\treturn bu.ctx.Request().URL.Path\n}", "title": "" }, { "docid": "6f68cdd984d97b0646e7da7f09b33e33", "score": "0.51544875", "text": "func (s *ScanOption) Evaluate(path string, info os.FileInfo) bool {\n\tpath = strings.TrimSpace(path)\n\tif len(path) == 0 {\n\t\treturn false\n\t}\n\n\tfor _, filter := range s.Filters {\n\t\tif filter(path, info) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "f03a590bf2278b934678ebaee7390b6c", "score": "0.5153749", "text": "func (f *Filter) validate(path string) bool {\n\tparts := strings.Split(path, \"/\")\n\n\tif len(parts) != len(f.template) {\n\t\treturn false\n\t}\n\n\tfor k, v := range f.template {\n\t\tif !strings.HasPrefix(v, \":\") && v != parts[k] {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "510b10d88c003b4251e45abc9dc2bb5a", "score": "0.5149381", "text": "func matchingPath(template gomodel.PathTemplate) (string, []string, error) {\n\treturn extractPath(template, false)\n}", "title": "" }, { "docid": "e60210976344867eae6b5cd8e524ac26", "score": "0.5144722", "text": "func (o SapCloudForCustomerResourceDatasetOutput) Path() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SapCloudForCustomerResourceDataset) interface{} { return v.Path }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "f0f434601c8a891a47527dddefdd8146", "score": "0.5130179", "text": "func (o GroupOutput) Path() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Group) pulumi.StringOutput { return v.Path }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "e03e30bb77728f62c3874ae57fa0841e", "score": "0.510715", "text": "func MatchPath(path string) NodeFilter {\n\tfragments := strings.Split(path, \"/\")\n\tmatch := make([]string, 0)\n\tfor _, s := range fragments {\n\t\t// Remove empty components essentially clears leading and trailing \"/\"\n\t\t// and normalizes multiple \"//\" to single \"/\"\n\t\tif s != \"\" {\n\t\t\tmatch = append(match, s)\n\t\t}\n\t}\n\tvar name string\n\tif len(match) > 0 {\n\t\tname = match[len(match)-1]\n\t\tmatch = match[:len(match)-1]\n\t}\n\n\treturn func(n *Node) bool {\n\t\t// Edge case:\n\t\t// When path is \"\" or \"/\", match the root folder\n\t\tif len(match) == 0 && name == \"\" {\n\t\t\treturn n.isRoot()\n\t\t}\n\n\t\tif !strings.EqualFold(name, n.Name()) {\n\t\t\treturn false\n\t\t}\n\n\t\tp := n.Path()\n\t\tp = p[1:] // drop root element\n\t\tif len(p) != len(match) {\n\t\t\treturn false\n\t\t}\n\n\t\tfor i := 0; i < len(match); i++ {\n\t\t\tif !strings.EqualFold(p[i], match[i]) {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t\treturn true\n\t}\n}", "title": "" }, { "docid": "e893fc90cf4c32221e2164f7cab24824", "score": "0.5105816", "text": "func (archive *TarballCompressor) Path() string {\n\treturn archive.path\n}", "title": "" }, { "docid": "e8553a3f9f05ed40f32889ca55b8c31b", "score": "0.5102313", "text": "func (this *ExampleExtender) Filter(ctx *gocrawl.URLContext, isVisited bool) bool {\n\treturn !isVisited\n}", "title": "" }, { "docid": "9e32c4c7b15ee01b7c7acdfbd2bc93b5", "score": "0.50964963", "text": "func (v Specification) PathResource(path string) *ResourceDefinition {\n\tvar res *ResourceDefinition\n\tfor _, r := range v {\n\t\tif r.IsPathPrefix {\n\t\t\tif strings.HasPrefix(path, r.Path) {\n\t\t\t\tres = r\n\t\t\t\tbreak\n\t\t\t}\n\t\t} else if r.Path == path {\n\t\t\tres = r\n\t\t\tbreak\n\t\t}\n\t}\n\treturn res\n}", "title": "" }, { "docid": "772e7cc41ea4f6cbe62191781ccacc8b", "score": "0.5092716", "text": "func cleanPath(s string) string {\n\treturn s\n}", "title": "" }, { "docid": "772e7cc41ea4f6cbe62191781ccacc8b", "score": "0.5092716", "text": "func cleanPath(s string) string {\n\treturn s\n}", "title": "" }, { "docid": "98e4ff0678b1d9fb1eaa4d331c9e019a", "score": "0.5088116", "text": "func (s *ShowStreamData) Path() string {\n\treturn \"/stream/:media/show\"\n}", "title": "" }, { "docid": "c1c4f94a066a46d0907b8f053740f9d3", "score": "0.50853235", "text": "func (meta *FileMetadata) Path() string {\n\treturn meta.path\n}", "title": "" }, { "docid": "5c48dae884467dca837a59ff6db6da17", "score": "0.50776476", "text": "func (o HTTPGetActionResponseOutput) Path() pulumi.StringOutput {\n\treturn o.ApplyT(func(v HTTPGetActionResponse) string { return v.Path }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "12f95b25cf6e4f2ca1a0a306c55ac19b", "score": "0.5077527", "text": "func (p *aeBackupInformationKindFilesQueryProperty) Filter(op string, value interface{}) *aeBackupInformationKindFilesQueryBuilder {\n\tswitch op {\n\tcase \"<=\":\n\t\tp.LessThanOrEqual(value)\n\tcase \">=\":\n\t\tp.GreaterThanOrEqual(value)\n\tcase \"<\":\n\t\tp.LessThan(value)\n\tcase \">\":\n\t\tp.GreaterThan(value)\n\tcase \"=\":\n\t\tp.Equal(value)\n\tdefault:\n\t\tp.bldr.q = p.bldr.q.Filter(p.name+\" \"+op, value) // error raised by native query\n\t}\n\tif p.bldr.plugin != nil {\n\t\tp.bldr.plugin.Filter(p.name, op, value)\n\t}\n\treturn p.bldr\n}", "title": "" }, { "docid": "18116f52b5743b1033e9e5f7fb43b7cf", "score": "0.5070336", "text": "func (c *collector) handlePath(path string, info os.FileInfo, err error) error {\n\tif err != nil {\n\t\treturn err\n\t}\n\tif info.IsDir() {\n\t\t// Ignore hidden directories (.git, .cache, etc)\n\t\tif len(path) > 1 && path[0] == '.' ||\n\t\t\t// Staging code is symlinked from vendor/k8s.io, and uses import\n\t\t\t// paths as if it were inside of vendor/. It fails typechecking\n\t\t\t// inside of staging/, but works when typechecked as part of vendor/.\n\t\t\tpath == \"staging\" ||\n\t\t\t// OS-specific vendor code tends to be imported by OS-specific\n\t\t\t// packages. We recursively typecheck imported vendored packages for\n\t\t\t// each OS, but don't typecheck everything for every OS.\n\t\t\tpath == \"vendor\" ||\n\t\t\tpath == \"_output\" ||\n\t\t\t// This is a weird one. /testdata/ is *mostly* ignored by Go,\n\t\t\t// and this translates to kubernetes/vendor not working.\n\t\t\t// edit/record.go doesn't compile without gopkg.in/yaml.v2\n\t\t\t// in $GOSRC/$GOROOT (both typecheck and the shell script).\n\t\t\tpath == \"pkg/kubectl/cmd/testdata/edit\" {\n\t\t\treturn filepath.SkipDir\n\t\t}\n\t\tif c.regex.MatchString(path) {\n\t\t\tc.dirs = append(c.dirs, path)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "95390b471199ca7d03791ff065872ea9", "score": "0.50643885", "text": "func (t *Tester) Path(path string) string {\n\t// add root slash\n\tpath = \"/\" + strings.Trim(path, \"/\")\n\n\t// add prefix if available\n\tif t.Prefix != \"\" {\n\t\tpath = \"/\" + t.Prefix + path\n\t}\n\n\treturn path\n}", "title": "" }, { "docid": "b6d0c34a48148f9f2419c02321ad69fe", "score": "0.5063991", "text": "func (ref FileViews) Path() binpath.Text {\n\treturn binpath.Text{RootBucket, FileBucket, ref.file.String(), ViewBucket}\n}", "title": "" }, { "docid": "a3d439d6e6807c374b69dd3a56770875", "score": "0.50439656", "text": "func (r *Revip) Path(v Config, path string) error {\n\tg, err := gabs.Consume(structs.Map(r.config))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !g.ExistsP(path) {\n\t\treturn &ErrPathNotFound{Path: path}\n\t}\n\n\tp := g.Path(path).Data()\n\n\terr = mapstructure.WeakDecode(p, v)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "934ee49f0ed3c6b4145762948f3a4cd5", "score": "0.503863", "text": "func (p *parts) Path() string {\n\treturn p.path\n}", "title": "" }, { "docid": "dc940cc412d5bcd986496642f7456acf", "score": "0.50341517", "text": "func (fs RootMappingFs) Filter(f func(m RootMapping) bool) *RootMappingFs {\n\tfs.filter = f\n\treturn &fs\n}", "title": "" }, { "docid": "14529557c7009afb5bf8a2c0ae237dd2", "score": "0.5024742", "text": "func (ev *EventCustom) Path() string {\n\treturn ev.path\n}", "title": "" }, { "docid": "927adcc390a8c7abd5dd43f149b8875a", "score": "0.50225765", "text": "func (i *imageProvider) SearchPath(path string) (bool, error) {\n\tconn, err := i.openConnection()\n\tif err != nil {\n\t\ti.config.Logger.Debug().Err(err).Msg(\"failed to open connection\")\n\t\treturn false, err\n\t}\n\tdefer i.closeWithLog(conn)\n\trow, err := conn.Query(\"select 1 from images where path = ?\", path)\n\tif err != nil {\n\t\ti.config.Logger.Debug().Err(err).Msg(\"failed to select from images\")\n\t\treturn false, err\n\t}\n\tdefer i.closeWithLog(row)\n\treturn row.Next(), nil\n}", "title": "" }, { "docid": "7392666142efdc9f2c376f1230a8bfad", "score": "0.50205845", "text": "func (o GreyTagRouteScRuleOutput) Path() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v GreyTagRouteScRule) *string { return v.Path }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "311c87cd2aa984a610ce10f0dc182391", "score": "0.5019942", "text": "func (c *Cache) Path(id interface{}) (string, error) {\n\tid, err := toIntID(id)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\trow := c.db.QueryRow(\"SELECT path FROM oc_filecache WHERE fileid = ?\", id)\n\tvar path string\n\terr = row.Scan(&path)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn path, nil\n}", "title": "" }, { "docid": "d5b5a4dc1df2abaefff4fe1ff8e64536", "score": "0.50169605", "text": "func SourceFilter(source string) FsFilter {\n\treturn func(f *Filesystem) bool {\n\t\treturn f != nil && f.source == source\n\t}\n}", "title": "" } ]
d81f64389fdf76315a5b52759b28e292
Handler GETrequest on url: "login"
[ { "docid": "ea8b64d6e09c71be34f9e931fe5f9b37", "score": "0.6411768", "text": "func GetLogin(c *fiber.Ctx) error {\n\n\treturn c.SendFile(sv.HtmlLogin)\n}", "title": "" } ]
[ { "docid": "8344e4f086cb15afcf27b9a0fe5099f2", "score": "0.74032766", "text": "func LoginGET(w http.ResponseWriter, r *http.Request) {\n\t// Get session\n\tsess := session.Instance(r)\n\n\t// Display the view\n\tv := view.New(r)\n\tv.Name = \"login/login\"\n\tv.Vars[\"token\"] = csrfbanana.Token(w, r, sess)\n\n\t// Refill any form fields\n\tview.Repopulate([]string{\"email\"}, r.Form, v.Vars)\n\tv.Render(w)\n}", "title": "" }, { "docid": "dabb7e15413df5f886bd606f9d3d7272", "score": "0.72792155", "text": "func LoginHandler(rw http.ResponseWriter, r *http.Request) {\n\n}", "title": "" }, { "docid": "96771fb1d883e6b65861edfb35096b54", "score": "0.70586723", "text": "func LoginGet(w http.ResponseWriter, r *http.Request) {\n\t// Get session\n//\tsess := session.Instance(r)\n\n\t// Display the view\n\tsess := session.Instance(r)\n\tv := view.New(r)\n\tv.Name = \"login\"\n\tif sess.Values[\"authenticated\"]==1{\n\t\tv.Data[\"Username\"] = sess.Values[\"username\"]\n\t}else{\n\t\t\tv.Data[\"Username\"] = \"guest\"\n\t}\n\t// Refill any form fields\n\t//view.Repopulate([]string{\"email\"}, r.Form, v.Data)\n\tv.RenderTemplate(w)\n}", "title": "" }, { "docid": "4afe83975bc8b48c74e5cc3662d2a652", "score": "0.6952192", "text": "func getLogin(rw http.ResponseWriter, req *http.Request) {\n localPrefix := fmt.Sprintf(\"getLogin-%s:\", context.Get(req, RequestLogIdKey))\n if glog.V(2) {\n glog.Infof(\"%s handling begins: %s (%d bytes)\", localPrefix, req.URL, req.ContentLength)\n }\n\n loginResp := context.Get(req, CredentialsKey).(*LoginResponse)\n rw.Header().Add(\"Content-Type\", \"application/json\")\n\n // encode response to json and write the response\n enc := json.NewEncoder(rw)\n err := enc.Encode(loginResp)\n if err != nil {\n glog.Errorf(\"%s Failed to encode response into json\", localPrefix)\n }\n if glog.V(2) {\n glog.Infof(\"%s handling ends with result message: %s\", localPrefix, loginResp.ValidationResult.Message)\n }\n}", "title": "" }, { "docid": "b9618f3e94fbc2985b4ff2ff9a364121", "score": "0.687506", "text": "func (a *Auth) handleLoginGet(w http.ResponseWriter, r *http.Request) {\n\n\t// If user is already authenticated then redirect to the loginSuccessURL.\n\tif a.IsAuthenticated(r) {\n\t\thttp.Redirect(w, r, a.loginSuccessURL, http.StatusFound)\n\t}\n\n\ttpl := template.Must(template.New(\"\").Parse(\"<html>{{.}}</html>\"))\n\ttpl.Execute(w, template.HTML(a.LoginFormHTML(r)))\n}", "title": "" }, { "docid": "398c329bedbed2ba0c9cad37e8707b33", "score": "0.6838951", "text": "func GetLogin(w http.ResponseWriter, r *http.Request) {\n\n\tif config.BuildDebug {\n\t\tfmt.Println(`==> GET: ` + r.URL.Path)\n\t}\n\n\tfs := http.FileServer(http.Dir(\"frontend/login/dist\"))\n\thttp.StripPrefix(\"/login\", fs).ServeHTTP(w, r)\n\n}", "title": "" }, { "docid": "811b81581e53d89076e8a5f22cf4eb35", "score": "0.678148", "text": "func getLogin(c *gin.Context) {\n\tc.Status(http.StatusOK)\n}", "title": "" }, { "docid": "8b12adfa458d7dc5177d468b8b2134d9", "score": "0.6775915", "text": "func loginHandler(w http.ResponseWriter, r *http.Request) {\n\n\tsession, err := store.Get(r, \"session-name\")\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n\n\t//name := session.Values[\"name\"]\n\t//if name == nil || name == (User{}) {\n\t//\tlog.Println(\"User null\")\n\t//\thttp.Redirect(w, r, \"/home\", http.StatusSeeOther)\n\t//\treturn\n\t//}\n\n\tif r.Method == \"GET\" {\n\t\tlog.Println(\"GET login page\")\n\t\t//flash := session.Flashes()\n\t\t//t, _ := template.ParseFiles(\"pages/login.html\")\n\t\ttemplates.ExecuteTemplate(w, \"login\", nil)\n\t}\n\n\tif r.Method == \"POST\" {\n\t\terr = r.ParseForm()\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t}\n\t\temail := r.Form[\"email\"][0]\n\t\tpassword := r.Form[\"password\"][0]\n\n\t\tuser, auth := AuthUser(email, password)\n\t\tif auth {\n\t\t\tsession.Values[\"name\"] = user\n\t\t\terr = session.Save(r, w)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t\thttp.Redirect(w, r, \"/login\", http.StatusSeeOther)\n\t\t\t} else {\n\t\t\t\thttp.Redirect(w, r, \"/home\", http.StatusFound)\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Printf(\"Login auth failed: %s\\n\", email)\n\t\t\tsession.AddFlash(\"error\", \"Username or password incorrect.\")\n\t\t\tsession.Save(r, w)\n\t\t\thttp.Redirect(w, r, \"/login\", http.StatusSeeOther)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "c499774e8d2dd3cddc45edb2c88ed03c", "score": "0.6748661", "text": "func LoginHandler(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"r.Method = \", r.Method)\n\tfmt.Println(\"r.URL = \", r.URL)\n\tfmt.Println(\"r.Header = \", r.Header)\n\tfmt.Println(\"r.Body = \", r.Body)\n\n\tr.ParseForm()\n\n\t// 第一种方式\n\t// username := request.Form[\"username\"][0]\n\t// password := request.Form[\"password\"][0]\n\n\t// 第二种方式\n\tusername := r.Form.Get(\"username\")\n\tpassword := r.Form.Get(\"password\")\n\n\tfmt.Printf(\"POST form-urlencoded: username=%s, password=%s\\n\", username, password)\n\n\tkey := \"Diary:User:\" + username + \"-\" + password\n\tredis.Set(key, []byte(\"online\"))\n\n\tt, _ := template.ParseFiles(\"template/content.html\")\n\n\tt.Execute(w, \"\")\n}", "title": "" }, { "docid": "9b01ec5597b131ac65db3514bce49be3", "score": "0.6706118", "text": "func loginHandler(w http.ResponseWriter, r *http.Request) {\n\tif err := runTemplate(\"login\", w, nil); err != nil {\n\t\tglog.Error(err)\n\t\thttp.Error(w, \"Failed to fetch login page\", http.StatusInternalServerError)\n\t}\n}", "title": "" }, { "docid": "64f6eee2752a6730361268106602921d", "score": "0.66942525", "text": "func loginAPIHandler(w http.ResponseWriter, r *http.Request) {\n\t//Process the query parameters\n\tr.ParseForm()\n\t//Ensure the proper parameters were sent\n\tif len(r.Form[\"username\"]) == 0 || len(r.Form[\"password\"]) == 0 {\n\t\tw.WriteHeader(http.StatusNotFound)\n\t\tfmt.Fprint(w, \"Record Not Found\")\n\t\treturn\n\t}\n\tif len(r.Form[\"persistent\"]) == 0 {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tfmt.Fprint(w, \"Persistence setting not set\")\n\t\treturn\n\t}\n\tvar isPersistent = r.Form[\"persistent\"][0] == \"true\"\n\ttoken, err := handleLoginAttempt(r.Form[\"username\"][0], r.Form[\"password\"][0], isPersistent)\n\tif err != nil {\n\t\tfmt.Fprint(w, err.Error())\n\t\treturn\n\t}\n\tjsonBytes, _ := json.Marshal(token)\n\tfmt.Fprintf(w, string(jsonBytes))\n}", "title": "" }, { "docid": "6e3804214088b3bce79f5d0277a2938b", "score": "0.666602", "text": "func loginhandler(w http.ResponseWriter,r *http.Request){\n\temail:=r.FormValue(\"email\")\n\tpassword:=r.FormValue(\"password\")\n\t\n\t//check in db for user authentication\n\tstr:=db.Checkuser(email,password)\n\t\n\t//if user doesn't exist\n\tif str==\"register\"{\n\t\thttp.Redirect(w,r,\"/register\",301)\n\t}else if str==\"proceed\"{\n\t\t//if user exits, save cookie and redirect to feed\n\t\tck.Createcookie(w,email,password)\n\t\thttp.Redirect(w,r,\"/feed\",301)\n\t}else if str==\"wrong credentials\"{\n\t\thttp.Redirect(w,r,\"/\",301)\n\t}else{\n\t\t//some error has been countered\n\t\tlog.Println(str)\n\t\tfmt.Fprintln(w,\"Some error has occured. Please try again later\")\n\t}\n}", "title": "" }, { "docid": "eb98670539b32cce978fd125bd5190b1", "score": "0.66212046", "text": "func mercadoLivreAuthLoginHandler(w http.ResponseWriter, req *http.Request, _ httprouter.Params, session *SessionData) {\n\t// url := sdk.GetAuthURL(mercadoLivreAPPID, sdk.AuthURLMLA, mercadoLivreRedirectURL)\n\turl := sdk.GetAuthURL(mercadoLivreAPPID, sdk.AuthURLMLB, mercadoLivreRedirectURL)\n\tlog.Printf(\"url: %v\", url)\n\thttp.Redirect(w, req, url, http.StatusSeeOther)\n}", "title": "" }, { "docid": "839c071d2347ad56ca7e1fa14cadb563", "score": "0.6620413", "text": "func LoginHandler(res http.ResponseWriter, req *http.Request) {\n\n\tsession, err := store.Get(req, \"test\")\n\tif err != nil {\n\t\thttp.Error(res, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tdec := json.NewDecoder(req.Body)\n\tvar details types.LoginDetails\n\terr = dec.Decode(details)\n\n\tif err == nil {\n\n\t\tusername, password := details.Username, details.Password\n\t\tif dbio.CheckValidLoginCredentials(username, password) {\n\t\t\tsession.Values[\"username\"] = username\n\t\t\tsession.Values[\"password\"] = password\n\t\t\tsession.Save(req, res)\n\t\t\tres.Write([]byte(\"Logged in\"))\n\t\t} else {\n\t\t\tres.Write([]byte(\"Not logged in\"))\n\t\t}\n\n\t} else {\n\t\tlog.Panic(err)\n\t\thttp.Error(res, err.Error(), http.StatusBadRequest)\n\t}\n\n}", "title": "" }, { "docid": "d1f51d23206b308f84dd4d1aff835342", "score": "0.66147524", "text": "func loginHandler(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"Received one login request\")\n\t// Decode a user from request(POST)\n\tdecoder := json.NewDecoder(r.Body)\n\tvar u User\n\tif err := decoder.Decode(&u); err != nil {\n\t\tpanic(err)\n\t\treturn\n\t}\n\t// Make sure the user's credential is correct\n\tif checkUser(u.Username, u.Password) {\n\t\t// Create a new token object to store\n\t\ttoken := jwt.New(jwt.SigningMethodHS256)\n\t\t// Convert it into a map for lookup\n\t\tclaims := token.Claims.(jwt.MapClaims)\n\t\t/*\n\t\t\tSet token claims\n\t\t\tStore username and expiration into it.\n\t\t*/\n\t\tclaims[\"username\"] = u.Username\n\t\tclaims[\"exp\"] = time.Now().Add(time.Hour * 24).Unix()\n\n\t\t/* Sign the token with our secret */\n\t\ttokenString, _ := token.SignedString(mySigningKey)\n\n\t\t/* Finally, write the token to the browser window */\n\t\tw.Write([]byte(tokenString))\n\t} else {\n\t\tfmt.Println(\"Invalid password or username.\")\n\t\thttp.Error(w, \"Invalid password or username\", http.StatusForbidden) //403\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"text/plain\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n}", "title": "" }, { "docid": "26a1d128f3767b1fde40a72eef4d364c", "score": "0.66076255", "text": "func loginHandler(response http.ResponseWriter, request *http.Request) {\n\tvar creds Credentials\n\n\terr := json.NewDecoder(request.Body).Decode(&creds)\n\tif err != nil {\n\t\thttp.Error(response, err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tfmt.Println(\"Received a login request from\", creds.Username, \"with password:\", creds.Password)\n\tresponse.Header().Set(\"Content-Type\", \"application/json\")\n\n\tloginResult, _ := json.Marshal(LoginResult{Success: true, Message: fmt.Sprintf(\"Hello %s\", creds.Username)})\n\n\tfmt.Fprintf(response, string(loginResult))\n}", "title": "" }, { "docid": "3ba9d51335720370028b1497c0e9a6ec", "score": "0.66002196", "text": "func login(res http.ResponseWriter, req *http.Request){\n\t// Check if they're already logged in.\n\t_ ,err := req.Cookie(\"session-info\")\n\t// If they aren't then let them through.\n\tif err!=nil{\n\t\tif(req.Method == \"POST\"){\n\t\t\t// From AJAX if everything is fine push the login request forward.(second authentication handled within.)\n\t\t\tloginRequest(res,req)\n\t\t} else { \n\t\t\t// Set the content type to html , this was to fix a bug where the content header defaulted to text/plain\n\t\t\tres.Header().Set(\"Content-Type\",\"text/html\")\n\t\t\t// Execute our template\n\t\t\tloginT.Execute(res, nil)\n\t\t}\t\n\t} else{\n\t\t//If they're already logged in redirect them to the main page.\n\t\thttp.Redirect(res, req, \"/\", http.StatusFound)\n\t}\n}", "title": "" }, { "docid": "3ba9d51335720370028b1497c0e9a6ec", "score": "0.66002196", "text": "func login(res http.ResponseWriter, req *http.Request){\n\t// Check if they're already logged in.\n\t_ ,err := req.Cookie(\"session-info\")\n\t// If they aren't then let them through.\n\tif err!=nil{\n\t\tif(req.Method == \"POST\"){\n\t\t\t// From AJAX if everything is fine push the login request forward.(second authentication handled within.)\n\t\t\tloginRequest(res,req)\n\t\t} else { \n\t\t\t// Set the content type to html , this was to fix a bug where the content header defaulted to text/plain\n\t\t\tres.Header().Set(\"Content-Type\",\"text/html\")\n\t\t\t// Execute our template\n\t\t\tloginT.Execute(res, nil)\n\t\t}\t\n\t} else{\n\t\t//If they're already logged in redirect them to the main page.\n\t\thttp.Redirect(res, req, \"/\", http.StatusFound)\n\t}\n}", "title": "" }, { "docid": "4c2a308b7eb21609c72405cea897c914", "score": "0.6582", "text": "func loginHandler(res http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\tif alreadyLoggedIn(res, req) {\n\t\thttp.Redirect(res, req, hostURI+\"/account\", http.StatusSeeOther)\n\t\treturn\n\t}\n\tusername := req.FormValue(\"username\")\n\tpassword := req.FormValue(\"password\")\n\n\tresponse := tryAuth(username, password, \"login\")\n\tif response.Status != \"OK\" {\n\t\tInternalServerError(res, req, authMessage(res, response.Status, \"login\", \"error\"))\n\t\treturn\n\t}\n\tcookie := &http.Cookie{\n\t\tName: \"session\",\n\t\tValue: response.Data[\"sessionID\"].(string),\n\t\tPath: \"/\",\n\t\tHttpOnly: true,\n\t\tExpires: time.Now().Add(time.Hour * 420),\n\t}\n\taddress := response.Data[\"address\"].(string)\n\terr := sessionSetKeys(cookie.Value, username, address)\n\tif err != nil {\n\t\thttp.Error(res, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\thttp.SetCookie(res, cookie)\n\thttp.Redirect(res, req, hostURI+\"/account\", http.StatusSeeOther)\n}", "title": "" }, { "docid": "8c544b268443d25ba2f2a6fe4ada3ffd", "score": "0.65808594", "text": "func LoginHandler(w http.ResponseWriter, r *http.Request){\n\n\tname := r.FormValue(\"username\")\n\tpassword := r.FormValue(\"password\")\n\n\tredirectTarget := \"/\"\n\tmanager := filemanager.Manager{}\n\tif err := manager.Load(\"./users/userInfo.json\", &userList); err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\tif len(name) !=0 && len(password)!=0{\n\t\tif userList.MatchPassword(name, password) == true {\n\t\t\tredirectTarget = \"/home\"\n\t\t\tSetCookie(name, w)\n\t\t\temail := userList.GetEmail(name)\n\t\t\tuser := users.User{}.MakeUser(name, password, email)\n\t\t\tAddOnlineUser(user)\n\t\t}\n\t}\n\thttp.Redirect(w, r, redirectTarget, 302)\n}", "title": "" }, { "docid": "9e2f8c29cbb46301ec6820a838daaf00", "score": "0.65513104", "text": "func Login(res http.ResponseWriter, req *http.Request) {\n\tfmt.Println(\"method:\", req.Method) //get request method\n\tif req.Method == \"GET\" {\n\t\tt, _ := template.ParseFiles(\"task3/login.gtpl\")\n\t\tt.Execute(res, nil)\n\n\t} else {\n\t\treq.ParseForm()\n\t\t// logic part of log in\n\t\tname := req.Form[\"username\"]\n\t\tpass := req.Form[\"password\"]\n\t\tfmt.Println(\"username:\", name)\n\t\tfmt.Println(\"password:\", pass)\n\n\t\t//fmt.Fprintf(res, \"hello %v\\n\", name)\n\t\t//-----------------------------------------------\n\t\ttmpl, err := template.ParseFiles(\"task3/template.html\")\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n \n\t\ttodo:=\"todo list for user\"+fmt.Sprint(name)\n\t\t\n\t\tdata := TodoPageData{\n\t\t\tPageTitle: todo,\n\t\t\tTodos: []Todo{\n\t\t\t\t{Title: \"Task 1\", Done: false},\n\t\t\t\t{Title: \"Task 2\", Done: true},\n\t\t\t\t{Title: \"Task 3\", Done: true},\n\t\t\t},\n\t\t}\n\t\ttmpl.Execute(res, data)\n\n\t}\n}", "title": "" }, { "docid": "f8ebd61ef7c0daca906e66a6f4937b77", "score": "0.65411395", "text": "func login(req *web.Request) {\n\tcallback := req.URL.Scheme + \"://\" + req.URL.Host + \"/callback\"\n\ttemporaryCredentials, err := oauthClient.RequestTemporaryCredentials(callback)\n\tif err != nil {\n\t\treq.Error(web.StatusInternalServerError, err)\n\t\treturn\n\t}\n\treq.Redirect(oauthClient.AuthorizationURL(temporaryCredentials), false,\n\t\tweb.HeaderSetCookie, credentialsCookie(\"tmp\", temporaryCredentials, 0))\n}", "title": "" }, { "docid": "032220ad1ebdab68742eb588c3416d41", "score": "0.6532552", "text": "func (h *Handler) Login(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\t// Decode request\n\tvar request struct {\n\t\tUsername string `json:\"username\"`\n\t\tPassword string `json:\"password\"`\n\t}\n\n\terr := json.NewDecoder(r.Body).Decode(&request)\n\tcheckError(err)\n\n\t// Login using the authenticator\n\tsession, user, err := h.auth.Login(request.Username, request.Password)\n\tcheckError(err)\n\n\t// Send login result\n\tloginResult := map[string]interface{}{}\n\tloginResult[\"session\"] = session\n\tif user.ID != 0 && user.Username != \"\" {\n\t\tloginResult[\"user\"] = user\n\t}\n\n\tw.Header().Add(\"Content-Encoding\", \"gzip\")\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\terr = encodeGzippedJSON(w, &loginResult)\n\tcheckError(err)\n}", "title": "" }, { "docid": "2801f93155b9ef411e266e85e91e4926", "score": "0.6508073", "text": "func (s *WebServer) handleLogin(w http.ResponseWriter, r *http.Request) {\n\tcArgs := commonArgs(r, \"Login | Decred DEX\")\n\tif cArgs.UserInfo.Authed {\n\t\thttp.Redirect(w, r, marketsRoute, http.StatusSeeOther)\n\t\treturn\n\t}\n\ts.sendTemplate(w, \"login\", cArgs)\n}", "title": "" }, { "docid": "39e239011bca1269bb610499ff0ae3b2", "score": "0.6503826", "text": "func Login(w http.ResponseWriter, r *http.Request) {\n\t//fmt.Println(\"in login\")\n\n\tvar username, password string\n\n\terr := r.ParseForm()\n\tusername = r.Form.Get(\"username\")\n\tpassword = r.Form.Get(\"password\")\n\n\t//fmt.Println(\"err=\", err)\n\n\tif len(username) == 0 || len(password) == 0 {\n\t\tdecoder := json.NewDecoder(r.Body)\n\t\tvar data models.LoginRequest\n\t\terr := decoder.Decode(&data)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tusername = data.Username\n\t\tpassword = data.Password\n\t}\n\n\t//err := r.ParseForm()\n\t//username := r.Form.Get(\"username\")\n\t//password := r.Form.Get(\"password\")\n\n\tlog.Println(\"Login:\", username)\n\n\t//user := &models.LoginRequest{}\n\t//body, err := ioutil.ReadAll(r.Body)\n\t//fmt.Println(\"Bodyd:\", string(body))\n\n\t//err := json.NewDecoder(r.Body).Decode(user)\n\t//user := models.LoginRequest{\n\t//if err := json.NewDecoder(r.Body).Decode(&user); err != nil {\n\tif err != nil {\n\t\tvar resp = map[string]interface{}{\"status\": false, \"message\": \"Invalid request\"}\n\t\tjson.NewEncoder(w).Encode(resp)\n\t\treturn\n\t}\n\n\t//fmt.Println(\"got\", user.Username+\" \"+user.Password)\n\t//resp := FindOne(user.Username, user.Password)\n\tresp := FindOne(username, password)\n\tjson.NewEncoder(w).Encode(resp)\n}", "title": "" }, { "docid": "9218ae740d652b736d688fdeb6c66ffa", "score": "0.6503585", "text": "func apiLoginHandler(w http.ResponseWriter, r *http.Request) {\n\tusername := r.FormValue(\"u\")\n\tpassword := r.FormValue(\"p\")\n\tresponseJson := invalidUserResponse(\"Invalid login.\")\n\tif username != \"\" && password != \"\" {\n\t\tu, err := users.Login(username, password)\n\t\tif err == nil {\n\t\t\tresponseJson = validUserResponse(u)\n\t\t}\n\t}\n\tw.Header().Add(\"content-type\", \"application/json\")\n\tfmt.Fprintf(w, \"%s\", responseJson)\n}", "title": "" }, { "docid": "f55de33b0c036def4f30bc7c5604c899", "score": "0.6444917", "text": "func (l *loginController) GetLogin(w http.ResponseWriter, r *http.Request) {\n\tdata := utils.GetViewData(r)\n\tutils.GetPlainTemplate(l.ViewBasePath, \"views/auth/login.html\").Execute(w, data)\n}", "title": "" }, { "docid": "5d3a08806dc669fca70b201df2cd1322", "score": "0.64338654", "text": "func LoginHandler(w http.ResponseWriter, req *http.Request) {\n\tif req.Method != http.MethodPost {\n\t\thttp.NotFound(w, req)\n\t\treturn\n\t}\n\tw.Header().Set(\"content-type\", \"application/json\")\n\tencoder := json.NewEncoder(w)\n\tvar body interface{}\n\tjson.NewDecoder(req.Body).Decode(&body)\n\tuser, password, err := validateLoginBody(body)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tencoder.Encode(err)\n\t}\n\tform := loginForm{}\n\tjson.NewDecoder(req.Body).Decode(&form)\n\ttoken, err := userservice.Login(user, password)\n\tif err != nil {\n\t\tswitch err.(type) {\n\t\tcase *autherror.AuthError:\n\t\t\t{\n\t\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t\t\treturn\n\t\t\t}\n\t\tdefault:\n\t\t\t{\n\t\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\tencoder.Encode(authResponse{User: user, Token: token})\n\n}", "title": "" }, { "docid": "59adc89fa632555c5b5fb09d96ec5e45", "score": "0.64253396", "text": "func login(w http.ResponseWriter, r *http.Request) {\n\tif userStillLoggedIn(r) {\n\t\thttp.Redirect(w, r, \"/home\", http.StatusSeeOther)\n\t\treturn\n\t}\n\tif r.Method == http.MethodPost {\n\n\t\tvar loginUser User\n\t\tloginUser.UserName = r.FormValue(\"username\")\n\t\tloginUser.Password = r.FormValue(\"password\")\n\t\tif userNameExists(loginUser.UserName) {\n\n\t\t\tif validatePass(loginUser.Password, loginUser.UserName) {\n\n\t\t\t\t//create new session id\n\t\t\t\tsessionid := newSessionid()\n\t\t\t\t//Add session id to the user in the database\n\t\t\t\taddSessionToUser(loginUser, sessionid)\n\t\t\t\t//set the clients session cookie\n\t\t\t\tsessionCookie := &http.Cookie{\n\t\t\t\t\tName: \"session\",\n\t\t\t\t\tValue: sessionid,\n\t\t\t\t}\n\t\t\t\thttp.SetCookie(w, sessionCookie)\n\t\t\t\t//Create username cookie\n\t\t\t\tusernameCookie := &http.Cookie{\n\t\t\t\t\tName: \"username\",\n\t\t\t\t\tValue: loginUser.UserName,\n\t\t\t\t}\n\t\t\t\t//Set a cookie to username\n\t\t\t\thttp.SetCookie(w, usernameCookie)\n\t\t\t\t//Send the home page to user\n\t\t\t\tlog.Println(\"successfully logged in\") // for testing\n\t\t\t\thttp.Redirect(w, r, \"/\", http.StatusSeeOther)\n\t\t\t} else {\n\t\t\t\thttp.Error(w, \"username and/or password does not match\", http.StatusForbidden)\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\thttp.Error(w, \"username and/or password does not match\", http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\t}\n\terr := tpl.ExecuteTemplate(w, \"login.gohtml\", nil)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "title": "" }, { "docid": "736f2d51c2fff59feefb928b16062645", "score": "0.64198864", "text": "func (rx *Remix) Login(w http.ResponseWriter, r *http.Request) {\n\tvar (\n\t\tdata = render.NewTemplateData()\n\t\tflash = NewFlash()\n\t\tloginPath = \"auth/login\"\n\t\tok bool\n\t\tss *sessions.Session\n\t)\n\n\tif ss, ok = rx.isInSession(r); ok {\n\t\thttp.Redirect(w, r, rx.cfg.LoginRedirect, http.StatusFound)\n\t\treturn\n\t}\n\tif r.Method == \"GET\" {\n\t\tfd := flash.Get(ss)\n\t\tif fd != nil {\n\t\t\tdata.Add(\"flash\", fd.Data)\n\t\t}\n\t\trx.rendr.HTML(w, http.StatusOK, loginPath, data)\n\t\treturn\n\t}\n\tif r.Method == \"POST\" {\n\t\tform := ComposeLoginForm()(r)\n\t\tif !form.IsValid() {\n\t\t\tdata.Add(\"errors\", form.Errors())\n\t\t\trx.rendr.HTML(w, http.StatusOK, loginPath, data)\n\t\t\treturn\n\t\t}\n\n\t\tlform := form.GetModel().(loginForm)\n\t\tuser, err := GetUser(setDB(rx.db, rx.cfg.AccountsDB), rx.cfg.AccountsBucket, lform.Email)\n\t\tif err != nil {\n\t\t\tdata.Add(\"error\", \"email au namba ya siri sio sahihi, tafadhali jaribu tena\")\n\t\t\trx.rendr.HTML(w, http.StatusOK, loginPath, data)\n\t\t\treturn\n\t\t}\n\t\tif err = verifyPass(user.Password(), lform.Password); err != nil {\n\t\t\tdata.Add(\"error\", \"email au namba ya siri sio sahihi, tafadhali jaribu tena\")\n\t\t\trx.rendr.HTML(w, http.StatusOK, loginPath, data)\n\t\t\treturn\n\t\t}\n\t\tss, err = rx.sess.New(r, rx.cfg.SessionName)\n\t\tif err != nil {\n\t\t\t//log this\n\t\t}\n\t\tss.Values[\"user\"] = user.EmailAddress\n\t\tss.Values[\"isAuthorized\"] = true\n\t\terr = ss.Save(r, w)\n\t\tif err != nil {\n\t\t\trx.rendr.HTML(w, http.StatusInternalServerError, \"500\", data)\n\t\t\treturn\n\t\t}\n\t\thttp.Redirect(w, r, rx.cfg.LoginRedirect, http.StatusFound)\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "a5ca6ded1e4e787e47a06a7c5df3101a", "score": "0.64087695", "text": "func Login(w http.ResponseWriter, req *http.Request) {\n\n\tcontentType := req.Header.Get(\"Content-Type\")\n\n\tif !strings.Contains(contentType, \"application/json\") {\n\t\terrors.HandleError(errors.GenerateCustomError(\"Content-Type is not application/json\"))\n\t}\n\n\tbody, err := ioutil.ReadAll(req.Body)\n\terrors.HandleError(errors.ConvertCustomError(err))\n\n\tvar userdata restapi.RESTUser\n\terr = json.Unmarshal(body, &userdata)\n\terrors.HandleError(errors.ConvertCustomError(err))\n\n\tfl, _ := CheckLoginPassword(userdata.Login, userdata.Password)\n\n\tif fl {\n\t\tw.WriteHeader(http.StatusAccepted)\n\t\tfmt.Fprintf(w, \"%s\", \"Authorized\")\n\t} else {\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\tfmt.Fprintf(w, \"%s\", \"Login or password incorrect\")\n\t}\n}", "title": "" }, { "docid": "2290fb22d50d75e59430f1eeac8e4a6b", "score": "0.6403993", "text": "func LoginHandler(w http.ResponseWriter, r *http.Request) {\n\tr.ParseForm()\n\tusername := r.FormValue(\"username\")\n\tpassword := r.FormValue(\"password\")\n\tresult := db.QueryRow(\"select password from users where username=$1\", username)\n\tvar obtainedPassword string\n\terr := result.Scan(&obtainedPassword)\n\tif err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t\tw.Write([]byte(\"<script>alert('No user exist!')</script>\"))\n\t\t\treturn\n\t\t}\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\tif obtainedPassword != password {\n\t\tw.Write([]byte(\"<script>alert('Login Failed!')</script>\"))\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t} else {\n\t\tloggedin_user = username\n\t\thttp.Redirect(w, r, \"/\", 302)\n\t}\n\n}", "title": "" }, { "docid": "76741e2592f188f2d40a9086faf8f167", "score": "0.64033824", "text": "func (conn Connector) Login(w http.ResponseWriter, r *http.Request) {\n\n\tif r.Method == \"GET\" {\n\t\tform := `<form action=\"/login\" method=\"post\">\n\t\t\t\t\t<input name=\"username\" type=\"text\">\n\t\t\t\t\t<input name=\"password\" type=\"password\">\n\t\t\t\t\t\n\t\t\t\t\t<button type=\"submit\">login</button>\n\t\t\t\t</form>`\n\n\t\tw.Header().Add(\"Content-Type\", \"text/html\")\n\t\tfmt.Fprint(w, form)\n\t\treturn\n\t}\n\n\tif r.Method == \"POST\" {\n\t\tr.ParseForm()\n\n\t\tusername := r.FormValue(\"username\")\n\t\tpassword := r.FormValue(\"password\")\n\n\t\tvar user structs.User\n\n\t\t// find by username\n\t\terr := conn.Mongo.C(\"users\").Find(bson.M{\"username\": username}).One(&user)\n\n\t\tif err != nil {\n\t\t\thttp.Redirect(w, r, \"/login?msg=error\", 302)\n\t\t\treturn\n\t\t}\n\n\t\t// make sure username is equal with what you got from database\n\t\tif user.Username == username {\n\t\t\t// password check\n\t\t\thashed := helpers.GetMD5Hash(password)\n\t\t\tif user.Password == hashed {\n\t\t\t\t// todo: use cookie & session\n\t\t\t\thttp.Redirect(w, r, \"/?msg=success login\", 302)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\thttp.Redirect(w, r, \"/login?msg=failed\", 302)\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "dffbc6680b7bab8b87ce66efc773c6c0", "score": "0.6390065", "text": "func Login(w http.ResponseWriter, r *http.Request) {\n\temail, err := DecodeLogin(r.Body)\n\tif err != nil {\n\t\tBadRequest(w, fmt.Sprintf(\"Bad request from client: %v\", err))\n\t\treturn\n\t}\n\n\tuser, err := movr.GetUser(email)\n\tif user == nil {\n\t\tNotFound(w, fmt.Sprintf(\"User Email %v Not Found.\", email))\n\t\treturn\n\t}\n\n\tif err != nil {\n\t\tInternalServerError(w, fmt.Sprintf(\"Error getting data: %v.\", err))\n\t\treturn\n\t}\n\n\tStatusOK(w, LoginResponse{IsAuthenticated: user != nil})\n}", "title": "" }, { "docid": "6319faa9fb911c2aa2d3bdb6519599f2", "score": "0.63739985", "text": "func indexHandler(w http.ResponseWriter, r *http.Request) {\n switch r.Method {\n case http.MethodGet:\n session, err := store.Get(r, \"uid\")\n if err != nil {\n http.Error(w, http.StatusText(500), 500)\n }\n if val, ok := session.Values[\"uid\"].(string); ok {\n log.Println(\"User ID cookie: \", val)\n switch val {\n case \"\": \n http.Redirect(w, r, \"/login\", http.StatusFound)\n default:\n // Serve index page (demo)\n render(w, \"templates/index.html\", nil)\n }\n } else {\n http.Redirect(w, r, \"/login\", http.StatusFound)\n }\n default:\n // Unsupported method\n http.Error(w, http.StatusText(405), 405)\n }\n}", "title": "" }, { "docid": "892d075dd905347229ef08797c19ab88", "score": "0.63617104", "text": "func loginHandler(w http.ResponseWriter, r *http.Request) {\n\t// Validate that this is a POST request\n\tif r.Method != http.MethodPost {\n\t\tlog.Error(\"ERROR - This path only handles a POST request\")\n\t\thttp.Error(w, \"This needs to be a POST request to accept the form submission for login\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\temail := r.FormValue(\"email\")\n\tpassword := r.FormValue(\"password\")\n\tuser, ok := emailToDBUserMap[email]\n\tif !ok {\n\t\tlog.Error(\"ERROR - Could not find user with email provided during login\")\n\t\thttp.Redirect(w, r, \"/?msg=\"+url.QueryEscape(\"Could not find user with email provided\"), http.StatusSeeOther)\n\t\treturn\n\t}\n\tif err := comparePassword(user.HashedPassword, password); err != nil {\n\t\tlog.Error(\"ERROR - Password did not match what is stored in the DB\")\n\t\thttp.Redirect(w, r, \"/?msg=\"+url.QueryEscape(\"incorrect password during login\"), http.StatusSeeOther)\n\t\treturn\n\t}\n\n\tif err := createSession(user.ID, w); err != nil {\n\t\tlog.Error(\"Failed to create session for authenticated user\", err)\n\t\tmsg := \"Failed to create session for authenticated user\"\n\t\thttp.Redirect(w, r, \"/?msg=\"+url.QueryEscape(msg), http.StatusSeeOther)\n\t\treturn\n\t}\n\n\tmsg := fmt.Sprintf(\"Successfully logged in user - %s %s\", user.FirstName, user.LastName)\n\thttp.Redirect(w, r, \"/?msg=\"+url.QueryEscape(msg), http.StatusSeeOther)\n}", "title": "" }, { "docid": "76cc03c93aacb94e677d5cb8b96c8dfe", "score": "0.63554734", "text": "func (r RouteHandler) handleLogin(c echo.Context) error {\n\tusername := c.FormValue(\"username\")\n\tpassword := c.FormValue(\"password\")\n\n\t// TODO: grpc auth goes here\n\tloginReq := &userproto.LoginRequest{\n\t\tUsername: username,\n\t\tPassword: password,\n\t}\n\n\tloginResp, err := r.u.Login(context.Background(), loginReq)\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusForbidden, \"Login failed.\")\n\t}\n\n\treturn setCookie(c, loginResp.Jwt)\n}", "title": "" }, { "docid": "b32f212304ef9a9159d5444e1de3db4d", "score": "0.6351849", "text": "func Login(templateDir string, store sessions.Store) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.Method == \"GET\" {\n\t\t\tn := context.Get(r, \"next\")\n\t\t\tif n == nil {\n\t\t\t\tFetch(templateDir, \"login.html\").Execute(w, \"\", false, []string{\"\", \"/login\"})\n\t\t\t} else {\n\t\t\t\tnext := n.(string)\n\t\t\t\tnext64 := base64.StdEncoding.EncodeToString([]byte(next))\n\t\t\t\tFetch(templateDir, \"login.html\").Execute(w, \"\", false, []string{\"\", fmt.Sprintf(\"/login?next=%s\", string(next64))})\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\tr.ParseForm()\n\t\tnext := r.FormValue(\"next\")\n\n\t\t// Attempt login, taking the user back to the login page with an error message if failed\n\t\tu := r.FormValue(\"username\")\n\t\tp := r.FormValue(\"password\")\n\t\tok, err := database.CheckCredentials(u, p)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\n\t\tif !ok {\n\t\t\tif next == \"\" {\n\t\t\t\tFetch(templateDir, \"login.html\").Execute(w, \"\", false, []string{\"Invalid username or password.\", \"/login\"})\n\t\t\t} else {\n\t\t\t\tFetch(templateDir, \"login.html\").Execute(w, \"\", false, []string{\"Invalid username of password.\", fmt.Sprintf(\"/login?next=%s\", next)})\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\t// Set session information\n\t\tsession, _ := store.Get(r, \"LoginState\")\n\t\tsession.Values[\"status\"] = \"loggedin\"\n\t\tsession.Values[\"user\"] = u\n\t\tsession.Values[\"admin\"], err = database.IsAdmin(u)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\n\t\terr = session.Save(r, w)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\n\t\t// Redirect appropriately\n\t\tif next == \"\" {\n\t\t\thttp.Redirect(w, r, \"/\", 302)\n\t\t} else {\n\t\t\tn, err := base64.StdEncoding.DecodeString(next)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatalf(\"Decoding next paramter: %s\", err.Error())\n\t\t\t}\n\t\t\thttp.Redirect(w, r, string(n), 302)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "84cd70c0b7dc70dd732d30cbe8d9fefe", "score": "0.63477033", "text": "func loginHandler(w http.ResponseWriter, r *http.Request) {\n\tsegs := strings.Split(r.URL.Path, \"/\")\n\taction := segs[2]\n\tprovider := segs[3]\n\tswitch action {\n\tcase \"login\":\n\n\t\tprovider, err := gomniauth.Provider(provider)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"Error when trying to get provider\", provider, \"-\", err)\n\t\t}\n\n\t\tloginURL, err := provider.GetBeginAuthURL(nil, nil)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"Error when trying to GetBeginAuthURL for\", provider, \"-\", err)\n\t\t}\n\n\t\tw.Header().Set(\"Location\", loginURL)\n\t\tw.WriteHeader(http.StatusTemporaryRedirect)\n\n\tcase \"callback\":\n\n\t\tprovider, err := gomniauth.Provider(provider)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"Error when trying to get provider\", provider, \"-\", err)\n\t\t}\n\n\t\t// get the credentials\n\t\tcreds, err := provider.CompleteAuth(objx.MustFromURLQuery(r.URL.RawQuery))\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"Error when trying to complete auth for\", provider, \"-\", err)\n\t\t}\n\n\t\tuser, err := provider.GetUser(creds)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"Error when trying to get user from\", provider, \"-\", err)\n\t\t}\n\t\tchatUser := &chatUser{User: user}\n\n\t\tm := md5.New()\n\t\tio.WriteString(m, strings.ToLower(user.Email()))\n\t\tchatUser.uniqueID = fmt.Sprintf(\"%x\", m.Sum(nil))\n\n\t\tavatarURL, err := avatars.GetAvatarURL(chatUser)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"Error when trying to GetAvatarURL\", \"-\", err)\n\t\t}\n\n\t\tauthCookieValue := objx.New(map[string]interface{}{\n\t\t\t\"userid\": chatUser.uniqueID,\n\t\t\t\"name\": user.Name(),\n\t\t\t\"avatar_url\": avatarURL,\n\t\t}).MustBase64()\n\n\t\thttp.SetCookie(w, &http.Cookie{\n\t\t\tName: \"auth\",\n\t\t\tValue: authCookieValue,\n\t\t\tPath: \"/\"})\n\n\t\tw.Header().Set(\"Location\", \"/chat\")\n\t\tw.WriteHeader(http.StatusTemporaryRedirect)\n\n\tdefault:\n\t\tw.Write([]byte(fmt.Sprintf(\"Auth action %s not supported\", action)))\n\t\tw.WriteHeader(http.StatusNotFound)\n\t}\n}", "title": "" }, { "docid": "a336f6ca2765b57760962774dcd17008", "score": "0.6346323", "text": "func (s *Server) GetLogin(c *gin.Context) {\n\tc.HTML(http.StatusOK, \"login.tmpl\", gin.H{})\n\treturn\n}", "title": "" }, { "docid": "405c1bb3975ffe42a6a5ac5aaece23cb", "score": "0.6341411", "text": "func Login(res http.ResponseWriter, req *http.Request) {\n\tisLoggedIn, _ := alreadyLoggedIn(req)\n\tif isLoggedIn {\n\t\ttoIndexPage(res,req)\n\t\treturn\n\t}\n\tif req.Method == http.MethodPost {\n\t\tusername := req.FormValue(\"username\")\n\t\tpassword := req.FormValue(\"password\")\n\t\tok := authenticateUser(username, password)\n\t\tif ok {\n\t\t\tstartSession(res, req, username)\n\t\t\ttoIndexPage(res,req)\n\t\t\treturn\n\t\t} \n\t\ttpl.ExecuteTemplate(res, \"login.gohtml\", \"Not the right username/password.\")\n\t\treturn\n\t}\n\ttpl.ExecuteTemplate(res, \"login.gohtml\", \"\")\n}", "title": "" }, { "docid": "707728f093440b54aa16abb3d54d0d20", "score": "0.6332437", "text": "func (h LoginPageHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {\n\n\tif r.Method == http.MethodPost {\n\t\tlp := &LoginPage{URLQuery: r.URL.RawQuery}\n\t\tlp.Email = r.FormValue(\"email\")\n\t\tlp.Password = r.FormValue(\"password\")\n\n\t\tuserID, err := h.ur.LoginUser(lp.Email, lp.Password)\n\t\tif err == yoradb.ErrLoginFailed {\n\n\t\t\tlp.ErrorMessage = \"Не угадали email и пароль\"\n\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\th.template.Execute(w, lp)\n\t\t\treturn\n\n\t\t} else if err != nil {\n\t\t\tlog.Printf(\"Error during user login: %v\\n\", err)\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tsession, ok := SessionFromContext(r.Context())\n\t\tif !ok {\n\t\t\tlog.Printf(\"Error during user login: cannot read session.\\n\")\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tsession.UserID.Int64 = userID\n\t\terr = h.sr.UpdateSession(session)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error during user login: %v\\n\", err)\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tredirectURL := r.URL.Query().Get(\"return\")\n\t\tif redirectURL == \"\" {\n\t\t\tredirectURL = \"/\"\n\t\t}\n\n\t\thttp.Redirect(w, r, redirectURL, http.StatusSeeOther)\n\n\t} else if r.Method == http.MethodGet {\n\t\tlp := &LoginPage{URLQuery: r.URL.RawQuery}\n\t\tlp.Email = r.FormValue(\"email\")\n\n\t\tw.WriteHeader(http.StatusOK)\n\t\th.template.Execute(w, lp)\n\t} else {\n\t\tw.WriteHeader(http.StatusMethodNotAllowed)\n\t}\n}", "title": "" }, { "docid": "f59c1fe616d7b17cb9b0fdbf6e01ae24", "score": "0.6332331", "text": "func (sw *SplunkHandler) login() (string, error) {\n\tsplunkURL, _ := url.Parse(sw.url.String())\n\tsplunkURL.Path = splunkURL.Path + \"services/auth/login\"\n\n\tvalues := &url.Values{}\n\tvalues.Set(\"username\", sw.username)\n\tvalues.Set(\"password\", sw.password)\n\tvalues.Set(\"output_mode\", \"json\")\n\n\tresp, err := sw.client.PostForm(splunkURL.String(), *values)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode == 401 {\n\t\treturn \"\", fmt.Errorf(\"unauthorized\")\n\t}\n\n\tbuf, _ := ioutil.ReadAll(resp.Body)\n\n\trespData := struct{ SessionKey string }{}\n\tif err := json.Unmarshal(buf, &respData); err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif respData.SessionKey == \"\" {\n\t\treturn \"\", fmt.Errorf(\"failed to obtain sessionKey in response\")\n\t}\n\n\treturn respData.SessionKey, nil\n}", "title": "" }, { "docid": "fb56bd79e1b4c531e27d0a591b9384c4", "score": "0.63298994", "text": "func LoginHandler(w http.ResponseWriter, r *http.Request) {\n\tvar exec = true\n\n\tif r.Method == http.MethodPost {\n\t\terr := r.ParseForm()\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\texec = false\n\t\t}\n\n\t\tusername := r.FormValue(\"username\")\n\t\tpassword := r.FormValue(\"password\")\n\n\t\tpasswordByte := []byte(password)\n\t\thash := sha256.Sum256(passwordByte)\n\t\tencryptedPassword := base32.StdEncoding.EncodeToString(hash[:])\n\t\t\n\t\tdb, err := sql.Open(\"sqlite3\", \"database.db\")\n\t\tif err != nil {\n\t\t\tlog.Println(err.Error())\n\t\t\thttp.Redirect(w, r, \"\", http.StatusPermanentRedirect)\n\t\t} else {\n\n\t\t\tdefer db.Close()\n\n\t\t\tsqlStmt := fmt.Sprintf(\"SELECT username FROM users WHERE username = '%s'\", username)\n\t\t\trows := db.QueryRow(sqlStmt)\n\t\t\tvar checkUsername string\n\t\t\terr = rows.Scan(&checkUsername)\n\t\t\tif err != nil {\n\t\t\t\tif err == sql.ErrNoRows {\n\t\t\t\t\tquery := fmt.Sprintf(\"insert into users(username,password) values('%s', '%s')\", username, encryptedPassword)\n\t\t\t\t\t_, err = db.Exec(query)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlog.Println(err.Error())\n\t\t\t\t\t\thttp.Redirect(w, r, \"\", http.StatusPermanentRedirect)\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tlog.Println(err.Error())\n\t\t\t\t\thttp.Redirect(w, r, \"\", http.StatusPermanentRedirect)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\texec = false\n\t\t\t\t_, err = fmt.Fprintf(w, \"User already exists. Try another name.\")\n\t\t\t}\n\t\t}\n\t}\n\n\tif exec == true {\n\t\terr := templates.ExecuteTemplate(w, \"login\", nil)\n\t\tif err != nil {\n\t\t\tlog.Println(err.Error())\n\t\t\thttp.Error(w, \"Could not serve page\", http.StatusNotFound)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "98ccf6bd2e01ef505030071f2db611c1", "score": "0.6304312", "text": "func (c *client) Login(res http.ResponseWriter, req *http.Request) (*model.User, error) {\n\treturn nil, nil\n}", "title": "" }, { "docid": "fe458f3be2675d96b5d4749fdcb0f851", "score": "0.6302551", "text": "func (c *Controller) Login(user string, pass string) (err error) {\n\tlogin := unifiLogin{\n\t\tUser: user,\n\t\tPass: pass,\n\t}\n\tjson, err := json.Marshal(&login)\n\tif err != nil {return err}\n\trsp, err := c.httpClient.Post(c.url + `/api/login`,\"application/json\",bytes.NewBuffer(json))\n\tif err != nil { return err }\n\tif rsp.StatusCode == http.StatusOK {\n\t \treturn err\n\t}\n\tbody, err := ioutil.ReadAll(rsp.Body)\n\treturn fmt.Errorf(\"req: %+v, body %s\", rsp, body)\n}", "title": "" }, { "docid": "2afafe1cfe0bd3e7fcabeaeb2a54ac23", "score": "0.6298896", "text": "func doLogin(w http.ResponseWriter, r *http.Request) {\n\n var user auth.User\n if err := parseJsonBody(w, r, &user); err != nil {\n return\n }\n\n accessToken, err := auth.Login(&user)\n if err != nil {\n jsonErrorResponse(w, http.StatusForbidden, err.Error())\n return\n }\n\n jsonResponse(w, http.StatusOK, &LoginResponse{AccessToken: accessToken})\n}", "title": "" }, { "docid": "6a455d48df881c5a07a9efe8a6c10b7f", "score": "0.62960565", "text": "func Login(res http.ResponseWriter, req *http.Request) {\n\n\tif AlreadyLoggedIn(req) {\n\t\thttp.Redirect(res, req, \"/menu\", http.StatusSeeOther)\n\t\treturn\n\t}\n\n\t// process form submission\n\tif req.Method == http.MethodPost {\n\n\t\tusername := strings.ToLower(req.FormValue(\"username\"))\n\t\terr1 := validate.Var(username, \"required,min=3,max=30,alphanum\")\n\t\tif err1 != nil {\n\t\t\thttp.Error(res, \"Username and/or password do not match\", http.StatusForbidden)\n\t\t\tlog.Warning.Println(\"Attempt to login with invalid username.-\", err1)\n\t\t\treturn\n\t\t}\n\n\t\tpassword := req.FormValue(\"password\")\n\t\terr2 := validate.Var(password, \"required,min=6,max=20,alphanum\")\n\t\tif err2 != nil {\n\t\t\thttp.Error(res, \"Username and/or password do not match\", http.StatusForbidden)\n\t\t\tlog.Warning.Println(\"Attempt to login with invalid password.-\", err2)\n\t\t\treturn\n\t\t}\n\n\t\tusername = Policy.Sanitize(username)\n\t\tpassword = Policy.Sanitize(password)\n\n\t\t// check if user exist with username\n\t\tmyUser, ok := MapUsers[username]\n\t\tif !ok {\n\t\t\thttp.Error(res, \"Username and/or password do not match\", http.StatusUnauthorized)\n\t\t\tlog.Warning.Println(\"Failed login attempt.\")\n\t\t\treturn\n\t\t}\n\t\t// Matching of password entered\n\t\terr := bcrypt.CompareHashAndPassword(myUser.Password, []byte(password))\n\t\tif err != nil {\n\t\t\thttp.Error(res, \"Username and/or password do not match\", http.StatusForbidden)\n\t\t\tlog.Warning.Println(\"Failed login attempt.\")\n\t\t\treturn\n\t\t}\n\t\t//check if there is any existing current login, if yes, log out current browser and delete session\n\t\tif multiLogin(username) {\n\t\t\thttp.Error(res, \"Multiple login not allowed, please log out from other device. If you do not log in the account, please contact the adminstrator\", http.StatusUnauthorized)\n\t\t\tlog.Warning.Println(\"Multi login attempt is detected\")\n\t\t\treturn\n\t\t}\n\n\t\t// create session\n\t\tid := uuid.NewV4()\n\t\tmyCookie := &http.Cookie{\n\t\t\tName: \"myCookie\",\n\t\t\tValue: id.String(),\n\t\t\tExpires: time.Now().Add(30 * time.Minute),\n\t\t\tHttpOnly: true,\n\t\t\tPath: \"/\",\n\t\t\tDomain: \"127.0.0.1\",\n\t\t\tSecure: true,\n\t\t}\n\t\thttp.SetCookie(res, myCookie)\n\t\tMapSessions[myCookie.Value] = username\n\t\thttp.Redirect(res, req, \"/\", http.StatusSeeOther)\n\t\treturn\n\t}\n\n\terr := tpl.ExecuteTemplate(res, \"login.gohtml\", nil)\n\tif err != nil {\n\t\tlog.Fatal.Fatalln(err)\n\t}\n}", "title": "" }, { "docid": "9264f4d2a9fec8e62bfb0bc166f418ce", "score": "0.628421", "text": "func (m *Repository) Login(w http.ResponseWriter, r *http.Request) {\n\n\t//Perform some busness logic and pass data to template\n\tstringMap := make(map[string]string)\n\tstringMap[\"testKey\"] = \"Sent from handler\"\n\n\trender.Template(w, r, \"login.page.html\", &models.TemplateData{\n\t\tForm: forms.New(nil),\n\t})\n}", "title": "" }, { "docid": "9ce7547979fc8f17b558996ba837d302", "score": "0.6273425", "text": "func (controller *loginController) get(w http.ResponseWriter, req *http.Request) {\n\tvar messages []string\n\tvm := viewmodels.GetLoginViewModel(messages, controller.appName)\n\tvm.CsrfField = csrf.TemplateField(req)\n\tif err := controller.template.Execute(w, vm); err != nil {\n\t\thttp.Error(w, err.Error(), 500)\n\t}\n}", "title": "" }, { "docid": "909554a7a5a3c892a2df1d92b318fb45", "score": "0.62701404", "text": "func loginHandler(w http.ResponseWriter, r *http.Request) {\n switch r.Method {\n case http.MethodGet:\n // Serve login page (demo)\n render(w, \"templates/login.html\", nil)\n case http.MethodPost:\n // Attempt to login with given credentials\n // Parse form values\n err := r.ParseForm()\n if err != nil {\n http.Error(w, http.StatusText(403), 403)\n }\n username := r.PostFormValue(\"username\")\n password := r.PostFormValue(\"password\")\n\n // Retrieve the matching user from database\n row := db.QueryRow(\"SELECT uid, username, hash FROM users WHERE username = $1\", username)\n user := new(User)\n err = row.Scan(&user.UID, &user.Username, &user.Hash)\n if err == sql.ErrNoRows {\n http.NotFound(w, r)\n return\n } else if err != nil {\n http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n return\n }\n\n // Compare existing hash with given credentials\n valid := crypto.CompareHash(password, user.Hash)\n if !valid {\n log.Printf(\"Failed login attempt: username=%s, password=%s\", username, password)\n return\n }\n log.Printf(\"Successful login attempt: username=%s, password=%s\", username, password)\n // fmt.Fprintf(w, \"Successfully logged in as user %s\\n\", username)\n session, err := store.Get(r, \"uid\")\n if err != nil {\n http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n return\n }\n\n session.Values[\"uid\"] = user.UID\n err = session.Save(r, w)\n if err != nil {\n http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n }\n\n http.Redirect(w, r, \"/\", 302)\n default:\n // Unsupported method\n http.Error(w, http.StatusText(405), 405)\n }\n}", "title": "" }, { "docid": "cc734e8457829939057e049e9a70b6fb", "score": "0.6267246", "text": "func (state *RuntimeState) Checklogin(w http.ResponseWriter, r *http.Request) {\n\tif r.Method != postMethod {\n\t\thttp.Error(w, \"you are not authorized\", http.StatusMethodNotAllowed)\n\t\treturn\n\t}\n\t_, err := checkCSRF(w, r)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\thttp.Error(w, fmt.Sprint(err), http.StatusUnauthorized)\n\t\treturn\n\t}\n\tif r.Method != postMethod {\n\t\thttp.Error(w, \"Unaurthorized\", http.StatusMethodNotAllowed)\n\t\treturn\n\t}\n\terr = r.ParseForm()\n\tif err != nil {\n\t\tlog.Println(\"Error while Parsing Form\")\n\t\thttp.Error(w, fmt.Sprint(err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tusername := r.PostFormValue(\"username\")\n\tpassword := r.PostFormValue(\"password\")\n\tcheckpasswd, err := state.ValidateuserPassword(username, password)\n\tif !checkpasswd || err != nil {\n\t\tlog.Println(\"Password is wrong\")\n\t\thttp.Error(w, \"password is wrong\", http.StatusUnauthorized)\n\t\treturn\n\t}\n\trandomString, err := randomStringGeneration()\n\tif err != nil {\n\t\tlog.Println(err)\n\t\thttp.Error(w, \"cannot generate random string\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\texpires := time.Now().Add(time.Hour * cookieExpirationHours)\n\n\tusercookie := http.Cookie{Name: cookieName, Value: randomString, Path: indexPath, Expires: expires, HttpOnly: true}\n\n\thttp.SetCookie(w, &usercookie)\n\n\tCookieinfo := cookieInfo{username, usercookie.Expires}\n\n\tstate.cookiemutex.Lock()\n\tstate.authcookies[usercookie.Value] = Cookieinfo\n\tstate.cookiemutex.Unlock()\n\n\thttp.Redirect(w, r, indexPath, http.StatusFound)\n}", "title": "" }, { "docid": "cc034acb8b2ccac1b05df7f125761649", "score": "0.62665445", "text": "func loginHandler(w http.ResponseWriter, r *http.Request) {\n\tsegs := strings.Split(r.URL.Path, \"/\")\n\taction := segs[2]\n\tprovider := segs[3]\n\tswitch action {\n\tcase \"login\":\n\t\tlog.Println(\"TODO handle login for\", provider)\n\t\tloginUrl := conf.AuthCodeURL(\"\", oauth2.AccessTypeOffline)\n\t\tw.Header().Set(\"Location\", loginUrl)\n\t\tw.WriteHeader(http.StatusTemporaryRedirect)\n\tcase \"callback\":\n\t\tcode := r.URL.Query().Get(\"code\")\n\t\tif code == \"\" {\n\t\t\thttp.Error(w, \"authorization code not found\", http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t\ttoken, err := conf.Exchange(ctx, code)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\tclient := conf.Client(ctx, token)\n\t\tresponse, err := client.Get(\"https://api.github.com/user\")\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\tdefer response.Body.Close()\n\t\t//fmt.Fprintf(w, \"User Info : %s\\n\", userInfo)\n\t\tvar user User\n\t\tif err := json.NewDecoder(response.Body).Decode(&user); err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\t// set the user id field\n\t\t// GitHub OAuth users need not have a public email visible, but let's worry\n\t\t// about that later.\n\t\tuser.UserId = getUserId(user)\n\t\tdata, err := json.Marshal(user)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\thttp.SetCookie(w, &http.Cookie{Name: \"auth\", Value: base64.URLEncoding.EncodeToString(data), Path: \"/\"})\n\t\tw.Header().Set(\"Location\", \"/chat\")\n\t\tw.WriteHeader(http.StatusTemporaryRedirect)\n\tdefault:\n\t\tw.WriteHeader(http.StatusNotFound)\n\t\tfmt.Fprintf(w, \"auth action %s not supported\", action)\n\t}\n}", "title": "" }, { "docid": "5086932bd864c5fe2dca16419b27a2a1", "score": "0.62637556", "text": "func Login(username string, password string) (int, string, string) {\n\tfmt.Println()\n\turl := \"http://localhost:8080/login\"\n\n\t//Creates a User struct to parse it to JSON and send as Post-data.\n\tu := User{Username: username, Password: password}\n\tjsonstring, _ := json.Marshal(u)\n\t//Creates a new request which sends the JSON data as buffer.\n\treq, err := http.NewRequest(\"GET\", url, bytes.NewBuffer(jsonstring))\n\t//req.Header.Set(\"X-Custom-Header\", \"myvalue\")\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\t//http.Client has a 10 second timeout before aborting an attempted connection\n\tclient := &http.Client{\n\t\tTimeout: 10 * time.Second,\n\t}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn 0, \"\", errors.New(\"Unable to create http client\").Error()\n\t}\n\tbuf := new(bytes.Buffer)\n\tio.Copy(buf, resp.Body)\n\tresp.Body.Close()\n\tif resp.StatusCode == 200 {\n\t\treturn 200, buf.String(), \"\"\n\t}\n\treturn resp.StatusCode, \"\", errors.New(\"Please enter a valid username or password\").Error()\n}", "title": "" }, { "docid": "487a6bfa80fb9be95e17a35b27ef906e", "score": "0.6240599", "text": "func (this *Login) ServeHTTP(res http.ResponseWriter, req *http.Request) { //Serves the web page\n\tonskip := 0;\n\t//Awaits for a method form from the user\n\tif req.Method == \"GET\" { // Get form\n\t} else { //post form\n\t\treq.ParseForm() //Parse the form input\n\t\treq.ParseMultipartForm(0) //Parse Mulitpart forms - (This is used specifically for the file uploading in the home page)\n\t\tonskip =1\n\t\t// Sign In post request from the login html page\n\t\tif len(req.Form[\"SignIn\"]) >= 1{ //if a login is intiated\n\t\t\tfmt.Println(\"Login\")\n\t\t\tsession, err :=mgo.Dial(\"localhost\") // All data on this user is pulled from Mongo and then parsed into the html home template\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t\tdefer session.Close()\n\t\t\tsession.SetMode(mgo.Monotonic, true)\n\t\t\tc := session.DB(\"Sonja\").C(\"Conditions\")\n\t\t\tvar result Person\n\t err = c.Find(nil).All(&result.Conditions)\n\t\t\tc = session.DB(\"Sonja\").C(\"Encounters\")\n\t\t\terr = c.Find(nil).All(&result.Encounters)\n\t\t\tc = session.DB(\"Sonja\").C(\"Connections\")\n\t\t\terr = c.Find(nil).One(&result.Connections)\n\t if err != nil {\n\t panic(err)\n\t }\n\t\t\ttmpl, err := template.ParseFiles(\"Home_Template.html\") // Parse the html file\n\t\t\tif err == nil{\n\t\t\t \ttmpl.Execute(res, result) // execute the response with the struct for the pipelines\n\t\t\t}\n\t\t}\n\t\t}\n\t\t// If no request method is made, the login html page will be serviced\n\tif onskip == 0{\n\t\ttmpl, err := template.ParseFiles(\"Login_Template.html\") // grabs the template file\n\t\tif err != nil{\n\t\t\tpanic(err)\n\t\t}\n\t\ttmpl.Execute(res, \"\")\n\t}\n}", "title": "" }, { "docid": "d113622c47d8d0a0ab9eb385b637477e", "score": "0.6238694", "text": "func Login(env *handler.Env, w http.ResponseWriter, r *http.Request) error {\n\tdat, _ := ioutil.ReadAll(r.Body) // Read the body of the POST request\n\t// Unmarshall this into a map\n\tvar params map[string]string\n\tjson.Unmarshal(dat, &params)\n\n\tcredentials := GetCredentials(env, params[\"Username\"], params[\"Password\"])\n\n\tout, _ := json.MarshalIndent(&credentials, \"\", \" \")\n\tfmt.Fprintf(w, string(out))\n\n\treturn nil\n}", "title": "" }, { "docid": "fac6270e474c35c3edcbb6e9505990c8", "score": "0.6225602", "text": "func login() http.Handler {\r\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\r\n\t\tb := struct {\r\n\t\t\tTitle template.HTML\r\n\t\t\tBusinessName string\r\n\t\t\tSlogan string\r\n\t\t}{\r\n\t\t\tTitle: template.HTML(\"Meeting Appointment Application\"),\r\n\t\t\tBusinessName: \",\",\r\n\t\t\tSlogan: \"\",\r\n\t\t}\r\n\t\terr := login_templates.ExecuteTemplate(w, \"base\", &b)\r\n\t\tif err != nil {\r\n\t\t\thttp.Error(w, fmt.Sprintf(\"index: couldn't parse template: %v\", err), http.StatusInternalServerError)\r\n\t\t\treturn\r\n\t\t}\r\n\t\tw.WriteHeader(http.StatusOK)\r\n\t})\r\n}", "title": "" }, { "docid": "1a17d6b1a99831b718a91bceb61d7bf7", "score": "0.62218034", "text": "func (a *App) LoginHandler(w http.ResponseWriter, r *http.Request) {\n\t// Fetch user based on username\n\t// Hash request password from the body\n\t// Compare password\n\t// If valid user and good password generate the token\n\tsigningKey := []byte(os.Getenv(\"JWT_SECRET\"))\n\n\t// create claims\n\tclaims := &jwt.StandardClaims{\n\t\tExpiresAt: 15000,\n\t\tIssuer: \"test\",\n\t}\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)\n\tsignedString, err := token.SignedString(signingKey)\n\tif err != nil {\n\t\tutils.RespondWithError(w, http.StatusInternalServerError, err.Error())\n\t}\n\tutils.RespondWithJSON(w, http.StatusOK, signedString)\n}", "title": "" }, { "docid": "2029a8e27aca82cddbf38fb9bcfdaaa3", "score": "0.62196743", "text": "func (h *Handler) Login(w http.ResponseWriter, r *http.Request) {\n\tstaffNo := r.FormValue(\"staff_no\")\n\tpassword := r.FormValue(\"password\")\n\n\tu, err := h.DB.GetUserByStaffNo(staffNo)\n\tif err != nil {\n\t\tredirect(w, r, \"/login\", \"Login failed. Username or password invalid\")\n\t\treturn\n\t}\n\n\tif !u.Registered {\n\t\tredirect(w, r, \"/login\", \"User is not registered. Check your email or with your administrator\")\n\t\treturn\n\t}\n\n\terr = bcrypt.CompareHashAndPassword([]byte(*u.Password), []byte(password))\n\tif err != nil {\n\t\tredirect(w, r, \"/login\", \"Login failed. Username or password invalid\")\n\t\treturn\n\t}\n\n\ttoken, err := auth.GenerateToken(u.ID, h.AuthSecret)\n\tif err != nil {\n\t\tredirect(w, r, \"/login\", \"Token generation failed\")\n\t\treturn\n\t}\n\thttp.SetCookie(w, &http.Cookie{Name: \"jwt\", Value: token})\n\n\tredirect(w, r, \"/\", \"\")\n}", "title": "" }, { "docid": "679191b15c23d9f5e9aa461bebc2ef37", "score": "0.620302", "text": "func Login(w http.ResponseWriter, req *http.Request) {\n\tlog.Printf(\"Login request\")\n\n\tres := api.Response{}\n\tloginParam := req.FormValue(\"login\")\n\tpasswordParam := req.FormValue(\"password\")\n\tuser, err := api.VerifyUserPassword(loginParam, passwordParam)\n\tif err != nil {\n\t\tres.Error = err.Error()\n\t\tjson.NewEncoder(w).Encode(&res)\n\t\treturn\n\t}\n\n\t// Get a new JWT Token if the user is validated.\n\t// The second parameter MUST be false since we didn't checked the 2FA yet.\n\ttoken, err := crypto.GetJWTToken(user, false)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tres.Error = \"Error getting a new token\"\n\t\tjson.NewEncoder(w).Encode(&res)\n\t\treturn\n\t}\n\n\tres.Data = token\n\tjson.NewEncoder(w).Encode(&res)\n}", "title": "" }, { "docid": "3812a6adb145a171dcb3938b577f5f65", "score": "0.61835873", "text": "func login(w http.ResponseWriter, r *http.Request) {\n\tgenerateHTML(w, nil, \"layout\", \"public.navbar\", \"login\")\n}", "title": "" }, { "docid": "9449255ae4eb7eedb382dba18ed2fca4", "score": "0.61805314", "text": "func UserLogin(w http.ResponseWriter, req *http.Request){\r\n //params := mux.Vars(req)\r\n //var person Patient\r\n fmt.Println(\"method: \", req.Method)//get request Method\r\n if req.Method == \"GET\" {\r\n t, _ := template.ParseFiles(\"login.html\")\r\n t.Execute(w, nil)\r\n } else {\r\n req.ParseForm()\r\n fmt.Println(\"User ID: \", req.Form[\"User ID\"])\r\n fmt.Println(\"Password: \", req.Form[\"Password\"])\r\n }\r\n\r\n\r\n _ = json.NewDecoder(req.Body).Decode(&Patients)\r\n\r\n}", "title": "" }, { "docid": "c58c6d8e76893ec0c29c2670ae7fdef6", "score": "0.6174021", "text": "func (s *Server) login(w http.ResponseWriter, r *http.Request) {\n\tu, p, ok := r.BasicAuth()\n\tif !ok {\n\t\tWriteJSONError(w, http.StatusBadRequest, \"missing credentials\")\n\t\treturn\n\t}\n\n\tuser, err := s.db.CheckCredentials(u, p)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tWriteJSONError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\n\tcookie, err := s.newEncodedCookie(\"nflpickem\", user)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tWriteJSONError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\n\thttp.SetCookie(w, cookie)\n\n\tWriteJSONSuccess(w, \"successfully logged in\")\n}", "title": "" }, { "docid": "e4f343278ff779c90f2436b03375c4d4", "score": "0.61721677", "text": "func (h *Handler) ServeLogin(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\t// If there is active session, go to index\n\terr := h.auth.AuthenticateUser(r)\n\tif err == nil {\n\t\tredirectPage(w, r, \"/\")\n\t\treturn\n\t}\n\n\t// If it's first run move to register page\n\tif h.isFirstRun() {\n\t\tredirectPage(w, r, \"/register\")\n\t\treturn\n\t}\n\n\t// Serve login file\n\tcurrentEtag := r.Header.Get(\"If-None-Match\")\n\terr = serveAssets(w, \"login.html\", currentEtag)\n\tcheckError(err)\n}", "title": "" }, { "docid": "80735dcf9ca4b0b3f6370356785ff56b", "score": "0.6170011", "text": "func (c Controller) Login(w http.ResponseWriter, r *http.Request) {\n\tif session.AlreadyLoggedIn(w, r) {\n\t\thttp.Redirect(w, r, \"/\", http.StatusSeeOther)\n\t\treturn\n\t}\n\n\tvar user userLogin\n\n\tjson.NewDecoder(r.Body).Decode(&user)\n\n\tif r.Method == http.MethodPost {\n\t\tun := user.userN\n\t\tp := user.pass\n\n\t\t//Check for username\n\t\tu, ok := session.Users[un]\n\t\tif !ok {\n\t\t\thttp.Error(w, \"Username and/or password do not match\", http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\n\t\t//do entered match stored\n\t\terr := bcrypt.CompareHashAndPassword(u.Password, []byte(p))\n\t\tif err != nil {\n\t\t\thttp.Error(w, \"Username and/or password does not match\", http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\n\t\t//create session\n\t\tsID, _ := uuid.NewV4()\n\t\tck := &http.Cookie{\n\t\t\tName: \"session\",\n\t\t\tValue: sID.String(),\n\t\t}\n\n\t\tck.MaxAge = session.SessionLength\n\t\thttp.SetCookie(w, ck)\n\t\tsession.Sessions[ck.Value] = models.Session{un, time.Now()}\n\t\thttp.Redirect(w, r, \"/\", http.StatusSeeOther)\n\t\treturn\n\n\t\tc.tpl.ExecuteTemplate(w, \"login.gohtml\", nil)\n\t}\n\n\tsession.ShowSessions()\n\n}", "title": "" }, { "docid": "76d6d1a4f50bb74b3a152bc3051df2d5", "score": "0.61685055", "text": "func HandleLoginRequest(ctx *gin.Context) {\n\tuser, err := bindDataFromRequest(ctx)\n\tif err != nil {\n\t\tctx.AbortWithStatusJSON(500, err)\n\t\treturn\n\t}\n\t//get user\n\tbl, storedUser, err := models.GetUser(user.Username)\n\tif bl == false {\n\t\tif err != nil {\n\t\t\tctx.AbortWithStatusJSON(500, err)\n\t\t\treturn\n\t\t}\n\t\tctx.AbortWithStatusJSON(500, \"Username or password not correct\")\n\t\treturn\n\t}\n\t//compare password with stored password\n\tcompare, err := services.VerifyPassword(user.Password, storedUser.Password)\n\tif err != nil {\n\t\tctx.AbortWithStatusJSON(500, \"Username or password not correct\")\n\t\treturn\n\t}\n\tif compare == false{\n\t\tctx.AbortWithStatusJSON(500, \"Username or password not correct\")\n\t\treturn\n\t} \n\tctx.AbortWithStatusJSON(200, \"Login Success\")\n\treturn\n}", "title": "" }, { "docid": "57a98d6c7d5d0a6e90fa4ff13b78c109", "score": "0.61667824", "text": "func Login(res http.ResponseWriter, req *http.Request, ps httprouter.Params) {\n\tres.Header().Set(\"Content-Type\", \"application/json\")\n\n\t// #extraction\n\textracted_login, err := extractors.Extracts_login(req)\n\tif err != nil {\n\t\t// If extraction failed.\n\t\tres.WriteHeader(http.StatusUnauthorized)\n\t\tjson.NewEncoder(res).Encode(tools.Data_error{Error: err.Error()})\n\t\treturn\n\t}\n\n\t// #repository [log the api user and get his session token]\n\tapi_user, session_token, err := repositories.Login_api_users(extracted_login.Pseudo, extracted_login.Password)\n\tif err != nil {\n\t\t// If repository failed.\n\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\tjson.NewEncoder(res).Encode(tools.Data_error{Error: \"internal server error\"})\n\t\treturn\n\t} else if api_user == nil {\n\t\t// If pseudo or/and password wrong.\n\t\ttools.Remove_session_cookie(res) // remove the potential old token from the client\n\t\tres.WriteHeader(http.StatusUnauthorized)\n\t\tjson.NewEncoder(res).Encode(tools.Data_error{Error: \"bad pseudo or/and password\"})\n\t\treturn\n\t} else if api_user.Blocked {\n\t\t// If user is blocked.\n\t\ttools.Remove_session_cookie(res) // remove the potential old token from the client\n\t\tres.WriteHeader(http.StatusUnauthorized)\n\t\tjson.NewEncoder(res).Encode(tools.Data_error{Error: \"your account is blocked\"})\n\t\treturn\n\t}\n\n\t// $ success response $\n\ttools.Set_session_cookie(session_token, res) // saves the token at the client\n}", "title": "" }, { "docid": "ace1825f3624a88b0d51d53722515b3a", "score": "0.61640024", "text": "func PostLogin(w http.ResponseWriter, req *http.Request, session *sessions.Session) {\n\n\tbuf := new(bytes.Buffer)\n\tbuf.ReadFrom(req.Body)\n\tresp, statusCode := sendServiceRequest(\"POST\", config.UserService, \"/auth/login\", buf.Bytes(), \"\")\n\tif statusCode >= 400 {\n\t\tvar rError *responseError\n\t\tjson.Unmarshal(resp, &rError)\n\t\thttp.Error(w, rError.Description, statusCode)\n\t\treturn\n\t}\n\n\tvar requestData map[string]interface{}\n\tjson.Unmarshal(resp, &requestData)\n\n\tsession.Values[\"id\"] = requestData[\"id\"].(string)\n\tsession.Values[\"isVerified\"] = requestData[\"isVerified\"].(bool)\n\tsession.Save(req, w)\n\n\tw.Write([]byte(`{\"redirectURL\":\"/home\"}`))\n}", "title": "" }, { "docid": "42e7309055bc0c18bfb04c222c876c24", "score": "0.61624384", "text": "func Login(w http.ResponseWriter, r *http.Request) {\n\tswitch r.Method {\n\n\t//handle POST method\n\tcase http.MethodPost:\n\n\t\t//store inputted data into creds struct\n\t\tdecoder := json.NewDecoder(r.Body)\n\t\tuser := creds{}\n\t\tdecodeErr := decoder.Decode(&user)\n\n\t\t//handle err\n\t\tif decodeErr != nil {\n\t\t\tw.WriteHeader(500)\n\t\t\tw.Write([]byte(\"Internal Server Error\"))\n\t\t\treturn\n\t\t}\n\n\t\t//access user creds struct instance\n\t\tusername := user.Username\n\t\tenteredPassword := user.Password\n\n\t\t//find user in the db by username\n\t\tdbUser, dbErr := userOps.FindUserByUsername(username)\n\n\t\t//handle err\n\t\tif dbErr != nil {\n\t\t\tw.WriteHeader(401)\n\t\t\tw.Write([]byte(\"Invalid credentials\"))\n\t\t\treturn\n\t\t}\n\n\t\t//encrypt entered password and compare with password on the db\n\t\tresult := bcrypt.CompareHashAndPassword([]byte(dbUser.Password), []byte(enteredPassword))\n\n\t\t//non zero response indicates no error\n\t\tif result == nil {\n\t\t\t//generate token and set cookie\n\t\t\tuserToken, genErr := GenerateToken(username)\n\t\t\tc := http.Cookie{Name: \"Token\", Value: userToken}\n\t\t\thttp.SetCookie(w, &c)\n\n\t\t\t//handle err\n\t\t\tif genErr != nil {\n\t\t\t\tw.WriteHeader(500)\n\t\t\t\tw.Write([]byte(\"Internal Server Error\"))\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t//Success, 200 response\n\t\t\tw.WriteHeader(200)\n\t\t\tw.Write([]byte(\"Login successful\"))\n\n\t\t}\n\n\t//ignore all other request methods\n\tdefault:\n\t\tw.WriteHeader(400)\n\t\tw.Write([]byte(\"Bad request\"))\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "55dbd324a8fb7f214f9c65a0ddf59c06", "score": "0.6151948", "text": "func (a *AdminHandler) ServeLogin(w http.ResponseWriter, r *http.Request) {\n\tif r.Method == \"POST\" {\n\t\tip := RealRemoteIP(r)\n\t\terr := a.validate(w, r)\n\t\tif err != nil {\n\t\t\t// Failed authentication\n\t\t\tlog.Printf(\"Failed authentication by %s: %s\", ip, err)\n\t\t\thttp.Redirect(w, r, \"/admin/login?err=1\", 302)\n\t\t\treturn\n\t\t}\n\n\t\t// Successful authentication\n\t\tlog.Printf(\"Successful authentication by %s\", ip)\n\t\thttp.Redirect(w, r, \"/\", 302)\n\t\treturn\n\t}\n\n\tclaims, _ := a.verify(w, r)\n\tdata := &AdminPageData{Title: \"Zerodrop Login\", Claims: claims, Config: a.App.Config}\n\tloginTmpl := a.Templates.Lookup(\"login.tmpl\")\n\terr := loginTmpl.ExecuteTemplate(w, \"login\", data)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n}", "title": "" }, { "docid": "f31405a4e784841722d45b587a77d560", "score": "0.61490643", "text": "func Login(username, password string) (string, error) {\n\trequestBody, err := json.Marshal(map[string]string{\n\t\t\"username\": username,\n\t\t\"password\": password,\n\t})\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\ttimeout := time.Duration(5 * time.Second)\n\tclient := http.Client{\n\t\tTimeout: timeout,\n\t}\n\tbody := bytes.NewBuffer(requestBody)\n\trequest, err := http.NewRequest(http.MethodPost, authURL, body)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\trequest.Header.Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\tresp, err := client.Do(request)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tdefer resp.Body.Close()\n\td, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\tlog.Fatalf(\"ioutil.ReadAll() failed with '%s'\\n\", err)\n\t}\n\n\tvar res map[string]string\n\tjson.Unmarshal(d, &res)\n\n\tif resp.StatusCode != http.StatusOK {\n\t\tfmt.Println(res)\n\t\treturn \"\", errors.New(\"login attempt failed with status code :\" + strconv.Itoa(resp.StatusCode))\n\t}\n\n\treturn res[\"token\"], nil\n}", "title": "" }, { "docid": "bf6fc47d004a88f45a5c904c4d200067", "score": "0.61415094", "text": "func (o *openwrtClient) login() error {\n\tclient := &http.Client{\n\t\t// block redirect\n\t\tCheckRedirect: func(req *http.Request, via []*http.Request) error {\n\t\t\treturn http.ErrUseLastResponse\n\t\t},\n\t}\n\n\t// login\n\tlogin_info := \"luci_username=\" + o.User + \"&luci_password=\" + o.Password\n\tvar req_body = []byte(login_info)\n\treq, _ := http.NewRequest(\"POST\", o.getBaseURL(), bytes.NewBuffer(req_body))\n\treq.Header.Add(\"Content-Type\", \"application/x-www-form-urlencoded\")\n\n\tresp, err := client.Do(req)\n\tif resp != nil {\n\t\tdefer resp.Body.Close()\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t} else if resp.StatusCode != 302 {\n\t\t// fail to auth\n\t\treturn &OpenwrtError{Code: resp.StatusCode, Message: \"Unauthorized\"}\n\t} else {\n\t\t// get token\n\t\tres_cookie := resp.Header[\"Set-Cookie\"][0]\n\t\tres_cookies := strings.Split(res_cookie, \";\")\n\t\tfor _, cookie := range res_cookies {\n\t\t\tcookie := strings.TrimSpace(cookie)\n\t\t\tindex := strings.Index(cookie, \"=\")\n\t\t\tvar key = cookie\n\t\t\tvar value = \"\"\n\t\t\tif index != -1 {\n\t\t\t\tkey = cookie[:index]\n\t\t\t\tvalue = cookie[index+1:]\n\t\t\t}\n\n\t\t\tif key == \"sysauth\" {\n\t\t\t\to.token = value\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "dd03bd8ed67418af3cfb5daffbc4e364", "score": "0.613895", "text": "func discordLoginURLHandler(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"Received request for Discord Login URL\")\n\ttype loginURL struct {\n\t\tURL string `json:\"URL\"`\n\t}\n\tvar err error\n\tif err != nil {\n\t\thttp.Error(w, \"Failed to generate state. Something went wrong \"+\n\t\t\t\"or something is vulnerable.\", http.StatusInternalServerError)\n\t}\n\tcallback := \"http://\" + config.Hostname + config.HTTPPort + config.AuthRedirectHandler\n\turl := config.AuthURL + config.DiscordKey + \"&redirect_url=\" + callback +\n\t\t\"&response_type=token\" + \"&scope=\" + strings.Join(config.Scopes, \"+\")\n\n\turlJSON, err := json.Marshal(loginURL{url})\n\tif err == nil {\n\t\tw.Write(urlJSON)\n\t} else {\n\t\tw.WriteHeader(500)\n\t\tw.Write([]byte(err.Error()))\n\t}\n}", "title": "" }, { "docid": "2e31a1e52951664654f78c03e21769ee", "score": "0.61360943", "text": "func isLogin(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tsession, err := store.Get(r, \"sid\")\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error retrieving session: %v\", err)\n\t\t}\n\n\t\tif userID, found := session.Values[\"userID\"]; found && userID != \"\" {\n\n\t\t\tnext.ServeHTTP(w, r)\n\t\t} else {\n\t\t\thttp.Error(w, \"Forbidden\", http.StatusForbidden)\n\t\t}\n\t})\n}", "title": "" }, { "docid": "11fcc192e9213ed97f3f9356beece94a", "score": "0.6135099", "text": "func HandleLogin(c *gin.Context) {\n\tcUsername := c.PostForm(\"username\")\n\tcPassword := c.PostForm(\"password\")\n\tnowSec := time.Now().Unix()\n\texpireSec := int64(3600 * 24)\n\tvar dbUsername, dbPassword string\n\tdb, err := db.OpenMySQL()\n\tif err != nil {\n\t\tlog.Printf(\"fail to open db in login: %v\", err)\n\t\tc.String(http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\tdefer db.Close()\n\t// TODO: prevent SQL injection (done)\n\tfmt.Printf(\"%s is logging in\\n\", cUsername)\n\tpasswdRow, err := db.Query(\"select * from passwd where username=?\", cUsername)\n\tif err != nil {\n\t\tlog.Printf(\"fail to query: %v\", err)\n\t\tc.String(http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\tdefer passwdRow.Close()\n\tfor passwdRow.Next() {\n\t\terr = passwdRow.Scan(&dbUsername, &dbPassword)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"fail to scan row: %v\", err)\n\t\t\tc.String(http.StatusInternalServerError, err.Error())\n\t\t\treturn\n\t\t}\n\t\t// TODO: prevent sql injection & check username with token?\n\t}\n\terr = bcrypt.CompareHashAndPassword([]byte(dbPassword), []byte(cPassword))\n\tif err != nil {\n\t\tfmt.Printf(\"fail to compare hash and password in login: %v\", err)\n\t\tc.String(http.StatusUnauthorized, err.Error())\n\t\treturn\n\t}\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\"iss\": \"Ryze/general\",\n\t\t\"exp\": nowSec + expireSec,\n\t\t\"iat\": nowSec,\n\t\t\"usr\": cUsername,\n\t})\n\tmySecret := []byte(\"my-secret\")\n\ttokenString, err := token.SignedString(mySecret)\n\tif err == nil {\n\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\"username\": cUsername,\n\t\t\t\"token_type\": \"JWT\",\n\t\t\t\"access_token\": tokenString,\n\t\t\t\"token_exp\": nowSec + expireSec,\n\t\t})\n\t} else {\n\t\tfmt.Printf(\"fail to generate token: %v\", err)\n\t\tc.String(http.StatusBadRequest, err.Error())\n\t}\n}", "title": "" }, { "docid": "dfcd2438380ec3579c49818b66ca7bb9", "score": "0.6134065", "text": "func loginByGuestHandler(ctx iris.Context) {\n\tusername := ctx.PostValue(\"username\")\n\n\tuser, err := account.Lookup(username)\n\tif err != nil {\n\t\tctx.JSON(iris.Map{\n\t\t\t\"status\": \"failed\",\n\t\t\t\"error_code\": \"error_internal_error\"})\n\t\treturn\n\t}\n\n\tif user == nil {\n\t\tctx.JSON(iris.Map{\n\t\t\t\"status\": \"failed\",\n\t\t\t\"error_code\": \"error_user_not_found\"})\n\t\treturn\n\t}\n\n\tif user.Category != account.ACCOUNT_GUEST {\n\t\tctx.JSON(iris.Map{\n\t\t\t\"status\": \"failed\",\n\t\t\t\"error_code\": \"error_password_invalid\"})\n\t\treturn\n\t}\n\n\tdispathAndRsp(ctx, user)\n}", "title": "" }, { "docid": "b41a89d5faf1f775010b1214955130f4", "score": "0.61330706", "text": "func LoginSpotifyControler(w http.ResponseWriter, r *http.Request) {\n\tses, errSes := session.GetSessionSpotify(r)\n\tif errSes != nil {\n\t\tlog.Println(\"SESSION FAILURE : \", errSes)\n\t\thttp.Error(w, \"Server Error\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\ttok := session.GetToken(ses)\n\tif tok != nil {\n\t\thttp.Redirect(w, r, fmt.Sprintf(\"%s/timer\", config.GetConfig().Angular), http.StatusTemporaryRedirect)\n\t\treturn\n\t}\n\n\tstate := session.SetSRandomState(ses)\n\n\terrSave := ses.Save(r, w)\n\tif errSave != nil {\n\t\tlog.Println(\"SESSION FAILURE : \", errSave)\n\t\thttp.Error(w, \"Server Error\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\thttp.Redirect(w, r, spotify.GetAuthURL(state), http.StatusTemporaryRedirect)\n}", "title": "" }, { "docid": "b95d7b1dddc0896df1439504797247c4", "score": "0.61321104", "text": "func login(w http.ResponseWriter, r *http.Request){\n\t//Get value from cookie store with same name\n\tsession, _ := store.Get(r,\"session-name\")\n\t//Set authenticated to true\n\tsession.Values[\"authenticated\"]=true\n\t//if error occured show message dialog error else allright\n\tif err = sessions.Save(r, w); err != nil{\n\t\tlog.Fatalf(\"Error saving session: %v\", err)\n\t}\n\tfmt.Fprintln(w, \"You have successfully logged in.\")\n}", "title": "" }, { "docid": "109515cec850074ddc0450d3b3626fe8", "score": "0.6120609", "text": "func loginAction(c *gin.Context) {\n\tresult := util.NewResult()\n\tdefer c.JSON(http.StatusOK, result)\n\n\targ := map[string]interface{}{}\n\tif err := c.BindJSON(&arg); nil != err {\n\t\tresult.Code = -1\n\t\tresult.Msg = \"parses login request failed\"\n\n\t\treturn\n\t}\n\n\tname := arg[\"name\"].(string)\n\tpassword := arg[\"password\"].(string)\n\n\tuser := service.User.GetUserByName(name)\n\tif nil == user {\n\t\tresult.Code = -1\n\t\tresult.Msg = \"user not found\"\n\n\t\treturn\n\t}\n\n\tcrypt := sha512_crypt.New()\n\tinputHash, _ := crypt.Generate([]byte(password), []byte(user.Password))\n\tif inputHash != user.Password {\n\t\tresult.Code = -1\n\t\tresult.Msg = \"wrong password\"\n\n\t\treturn\n\t}\n\n\townBlog := service.User.GetOwnBlog(user.ID)\n\tsession := &util.SessionData{\n\t\tUID: user.ID,\n\t\tUName: user.Name,\n\t\tUB3Key: user.B3Key,\n\t\tUAvatar: user.AvatarURL,\n\t\tURole: ownBlog.UserRole,\n\t\tBID: ownBlog.ID,\n\t\tBURL: ownBlog.URL,\n\t}\n\tif err := session.Save(c); nil != err {\n\t\tresult.Code = -1\n\t\tresult.Msg = \"saves session failed: \" + err.Error()\n\t}\n}", "title": "" }, { "docid": "0a7c8de9938d3d5bc01623427e508b47", "score": "0.611787", "text": "func loginhandler(w http.ResponseWriter, r *http.Request, db *bolt.DB) {\n\n c := getcall(r)\n\n if rscore.Set.Umax < 1 {\n rscore.Sendstatus(rscore.C_NOSU, \"No such user\", w)\n return\n }\n\n if c.User != rscore.Cleanstring(c.User, rscore.RXUSER) {\n rscore.Sendstatus(rscore.C_UICH,\n \"Username includes illegal characters\", w)\n return\n }\n\n uindex := rsdb.Ruindex(db, rscore.Set)\n if !rscore.Findstrinslice(c.User, uindex.Names) {\n rscore.Sendstatus(rscore.C_NOSU, \"No such user\", w)\n return\n }\n\n u := rsdb.Ruser(db, c.User)\n\n if rscore.Valuser(u, []byte(c.Pass)) {\n u.Skey = rscore.Randstr(rscore.SKEYLEN)\n rsdb.Wruser(db, u)\n }\n\n rsuser.Senduser(u, r, w, rscore.Set)\n}", "title": "" }, { "docid": "43c9d53338109a7f0d68edf5956c662a", "score": "0.61100477", "text": "func (uc UserController) Login(w http.ResponseWriter, r *http.Request, p httprouter.Params) {\n\ttmpl, err := template.ParseFiles(\"templates/login.html\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\terr = tmpl.Execute(w, r.Header.Get(\"Referer\"))\n}", "title": "" }, { "docid": "7c1fffa6c25f9f7b0e514f66a01d79ae", "score": "0.6098248", "text": "func Login(res http.ResponseWriter, req *http.Request) {\n\tif AlreadyLoggedIn(req) {\n\t\thttp.Redirect(res, req, \"/\", http.StatusSeeOther)\n\t\treturn\n\t}\n\n\t// process form submission\n\tif req.Method == http.MethodPost {\n\t\tusername := req.FormValue(\"username\")\n\t\tpassword := req.FormValue(\"password\")\n\t\t// check if user exist with username\n\t\tmyUser, ok := MapUsers[username]\n\t\tif !ok {\n\t\t\thttp.Error(res, \"Username and/or password do not match\", http.StatusUnauthorized)\n\t\t\tlog.Warning(\"Invalid login attempt - user does not exist.\")\n\t\t\treturn\n\t\t}\n\n\t\t// Matching of password entered\n\t\terr := bcrypt.CompareHashAndPassword(myUser.Password, []byte(password))\n\t\tif err != nil { //passwords do not match, mismatch will be logged\n\t\t\thttp.Error(res, \"Username and/or password do not match\", http.StatusForbidden)\n\t\t\terrorString := \"Failed authentication attempt (password mismatch) by \" + myUser.Username\n\t\t\tlog.Warning(errorString)\n\t\t\treturn\n\t\t}\n\n\t\tif multiLogin(username) == true {\n\t\t\thttp.Error(res, \"Multiple login attempt detected, please log out from other device before proceeding.\", http.StatusUnauthorized)\n\t\t\tlog.Warning(\"Multiple session login attempted by username:\", username)\n\t\t\treturn\n\t\t}\n\n\t\t// create session\n\t\tid := uuid.NewV4()\n\t\texpirytime := time.Now().Add(30 * time.Minute)\n\n\t\tmyCookie := &http.Cookie{\n\t\t\tName: \"myCookie\",\n\t\t\tValue: id.String(),\n\t\t\tExpires: expirytime,\n\t\t\tHttpOnly: true,\n\t\t\tPath: \"/\",\n\t\t\tDomain: \"127.0.0.1\",\n\t\t\tSecure: true,\n\t\t}\n\n\t\thttp.SetCookie(res, myCookie)\n\t\tMapSessions[myCookie.Value] = username\n\t\thttp.Redirect(res, req, \"/\", http.StatusTemporaryRedirect)\n\t\treturn\n\t}\n\n\ttpl.ExecuteTemplate(res, \"login.gohtml\", nil)\n}", "title": "" }, { "docid": "fb9321ef5f4d1bba886f3f1f0fc60d8d", "score": "0.6095995", "text": "func LoginHandler(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Origin, X-Requested-With, Content-Type, Accept\")\n\tfmt.Println(r.Form)\n\tid := bson.NewObjectId()\n\t//nw := strings.(id)\n\tuser := &User{\n\t\tEmail: r.FormValue(\"email\"),\n\t\tID: r.FormValue(\"ID\"),\n\t\tName: r.FormValue(\"name\"),\n\t\t_id: id,\n\t}\n\n\tfmt.Println(user)\n\ti, _ := Authenticate(user, r.FormValue(\"provider\"))\n\ti2, _ := json.Marshal(i)\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write(i2)\n}", "title": "" }, { "docid": "cca6ec7958d7d8c6b642276c3d9957a2", "score": "0.6093265", "text": "func loginHandler(w http.ResponseWriter, r *http.Request) {\n\tfirst := r.FormValue(\"first name\")\n\tlast := r.FormValue(\"last name\")\n\n\tvar count int // add if absent (this is just for school)\n\tif db.Find(&User{FirstName: first, LastName: last}).Count(&count); count == 0 {\n\t\tdb.Create(&User{FirstName: first, LastName: last})\n\t\tprintln(\"New User:\", first, last) // DEBUG\n\t}\n\n\tretToView(first, last, w, r)\n}", "title": "" }, { "docid": "9b3efd8e662f73a99017d4761f5a40b1", "score": "0.60930705", "text": "func Login(w http.ResponseWriter, r *http.Request) {\n\n\t// IF USER ALREADY LOGGED IN , REDIRECT\n\t// this was not throwing error because handled in frontend\n\ttokk := r.Header.Get(\"token\")\n\tif tokk != \"\" {\n\t\tmaker, err := token.NewPasetoMaker(\"abcd1234abcd1234abcd1234abcd1234\")\n\t\tif err != nil {\n\t\t\tresponses.ERROR(w, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t\t_, err = maker.VerifyToken(tokk)\n\t\t//\n\t\tif err == nil {\n\t\t\t// lol error and ACCEPTED , what the heck\n\t\t\tresponses.ERROR(w, http.StatusUnauthorized, errors.New(\"user logged in\"))\n\t\t\treturn\n\t\t}\n\t}\n\n\t// read body\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t} // CANT READ BODY\n\n\ttmpuser := struct {\n\t\tUsername string `json:\"username\"`\n\t\tPassword string `json:\"password\"`\n\t}{} // RENAME\n\terr = json.Unmarshal(body, &tmpuser)\n\tlog.Println(\"tmp user\", tmpuser)\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t} // WRONG JSON\n\n\tif tmpuser.Username == \"\" {\n\t\t//cant be empty\n\t\tresponses.ERROR(w, http.StatusUnprocessableEntity, errors.New(\"uname or email cant be empty\"))\n\t\treturn\n\t} // EMPTY UNAME OR EMAIL TO REMOVE, CHECK ON FRONTEND\n\n\t//check for invalid email too , TODO later\n\n\tvar database *gorm.DB\n\tdatabase, err = db.Connect()\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusInternalServerError, err) // better ERROR msg for frontend\n\t\treturn\n\t}\n\tdefer database.Close()\n\n\tvar dbUser schema.User\n\terr = database.Model(&schema.User{}).Where(\"username = ?\", tmpuser.Username).Find(&dbUser).Error\n\tswitch err {\n\tcase nil: // DO NOTHING\n\tcase gorm.ErrRecordNotFound:\n\t\tresponses.ERROR(w, http.StatusNotFound, errors.New(\"no such user\"))\n\t\treturn\n\tdefault:\n\t\tresponses.ERROR(w, http.StatusInternalServerError, err) // better ERROR msg for frontend\n\t\treturn\n\t}\n\t//wrong pass then cant login\n\tif err := utils.Compare(tmpuser.Password, dbUser.Password); err != nil {\n\t\tresponses.ERROR(w, http.StatusUnauthorized, errors.New(\"wrong password\"))\n\t\treturn\n\t}\n\n\t//ISSUE NEW TOKEN\n\tmaker, err := token.NewPasetoMaker(\"abcd1234abcd1234abcd1234abcd1234\")\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusInternalServerError, err) // better ERROR msg for frontend\n\t\treturn\n\t}\n\n\tvar tok string\n\ttok, err = maker.CreateToken(dbUser.Username, time.Minute*20) // FOR 10 min now\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusInternalServerError, err) // better ERROR msg for frontend\n\t\treturn\n\t}\n\n\ttokJson := struct {\n\t\tToken string `json:\"token\"`\n\t\tUname string `json:\"username\"`\n\t}{\n\t\tToken: tok,\n\t\tUname: dbUser.Username,\n\t}\n\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusInternalServerError, err) // better ERROR msg for frontend\n\t\treturn\n\t}\n\n\tresponses.JSON(w, http.StatusAccepted, tokJson)\n\n}", "title": "" }, { "docid": "7bc2c17f474d54096528f1a80c74e6c1", "score": "0.609289", "text": "func (u UserController) Login(c *gin.Context) {\n username := c.Query(\"username\")\n password, exists := c.Get(\"password\")\n\n if IsValidAuth(c, store, username, fmt.Sprintf(\"%v\", password)) && exists {\n Login(c, username)\n c.String(http.StatusOK, \"Logged in\")\n } else {\n c.String(http.StatusUnauthorized, \"Invalid username/password\")\n }\n}", "title": "" }, { "docid": "df87d2512c2dbbb247c8b4f4c6a7a651", "score": "0.6090901", "text": "func authenticationLogin(w http.ResponseWriter, r *http.Request, c *lib.Config) {\n\tr.ParseForm()\n\treq := struct {\n\t\tName string\n\t\tPW string\n\t}{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\tif err != nil {\n\t\tlog.Errorf(\"InvalidObjectError %+v\", err)\n\t\treplyError(w, \"InvalidObjectError\")\n\t\treturn\n\t}\n\n\tdb, serr := c.GetDatabaseConnection()\n\tif serr != nil {\n\t\tlog.Error(\"Could not get database for login\", serr)\n\t\treplyError(w, \"DatabaseError\")\n\t\treturn\n\t}\n\n\tloggedInUser, ses, err := services.LoginHandler(c, db, req.Name, req.PW)\n\n\tif err != nil {\n\t\tlog.Error(\"Error logging in!\", err)\n\t\treplyFameError(w, *lib.AuthenticationError(err))\n\t\treturn\n\t}\n\n\treplyData(w, map[string]interface{}{\n\t\t\"user\": loggedInUser,\n\t\t\"session\": ses.Key,\n\t})\n}", "title": "" }, { "docid": "3b4eddf05dbdc347679a838816d1f3e8", "score": "0.6076033", "text": "func (server *Server) Login(w http.ResponseWriter, r *http.Request) {\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\tuser := models.User{}\n\terr = json.Unmarshal(body, &user)\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tuser.Prepare()\n\terr = user.Validate(\"login\")\n\tif err != nil {\n\t\tresponses.ERROR(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\tsessionID, err := server.signIn(user.Email, user.Password)\n\tif err != nil {\n\t\tformattedError := formaterror.FormatError(err.Error())\n\t\tresponses.ERROR(w, http.StatusUnprocessableEntity, formattedError)\n\t\treturn\n\t}\n\n\tcookie := http.Cookie{Name: cookieName, Value: sessionID}\n\thttp.SetCookie(w, &cookie)\n\n\tresponses.JSON(w, http.StatusOK, sessionID)\n}", "title": "" }, { "docid": "6387fee6028aef7bb43ff0f46935e05b", "score": "0.60733855", "text": "func makeLoginHandler(m *http.ServeMux, endpoints endpoint.Endpoints, options []http1.ServerOption) {\n\tm.Handle(\"/api/v1/auth/login\", http1.NewServer(endpoints.LoginEndpoint, decodeLoginRequest, encodeLoginResponse, options...))\n}", "title": "" }, { "docid": "a88ab47d4aa393c64c293a309f850b98", "score": "0.6070484", "text": "func LoginRequestHandler(w http.ResponseWriter, r *http.Request) {\n\tdec := json.NewDecoder(r.Body)\n\tvar credential Credential\n\t_ = dec.Decode(&credential)\n\tfmt.Println(credential)\n\tuser, token := GetUserFromCredential(credential)\n\t//token := models.Token{Token: \"sdafasdfasdf\"}\n\tfmt.Println(user)\n\tjson.NewEncoder(w).Encode(token)\n}", "title": "" }, { "docid": "52ef39867db129402243bd0aafb90a81", "score": "0.6055763", "text": "func (a *App) LoginPostHandler(db atlas.QBUserDB) server.HandlerWithError {\n\treturn func(w http.ResponseWriter, req *http.Request) error {\n\t\tu, _ := getUser(req)\n\t\t// user is logged in already\n\t\tif u != nil {\n\t\t\thttp.Redirect(w, req, \"/\", http.StatusFound)\n\t\t\treturn nil\n\t\t}\n\t\temail, pass := strings.TrimSpace(req.FormValue(\"email\")), req.FormValue(\"password\")\n\t\t// TODO: add proper flashes and validation\n\t\tif email == \"\" || pass == \"\" {\n\t\t\thttp.Redirect(w, req, \"/login\", http.StatusFound)\n\t\t\treturn server.NewError(http.StatusBadRequest, \"validation error: email or password cannot be empty\", fmt.Errorf(\"email or password cannot be empty\"))\n\t\t}\n\t\tif !govalidator.IsEmail(email) {\n\t\t\thttp.Redirect(w, req, \"/login\", http.StatusFound)\n\t\t\treturn server.NewError(http.StatusBadRequest, \"email has to be a valid email address\", fmt.Errorf(\"email has to be a valid email address\"))\n\n\t\t}\n\t\tu, err := db.GetQBUserByEmail(email)\n\t\tif err != nil {\n\t\t\thttp.Redirect(w, req, \"/login\", http.StatusFound)\n\t\t\treturn server.New500Error(\"internal server error: unable to retrieve user by email\", err)\n\t\t}\n\t\tif !u.IsCorrectPassword(pass, db) {\n\t\t\thttp.Redirect(w, req, \"/login\", http.StatusFound)\n\t\t}\n\n\t\tsess, err := db.CreateAtlasWebSession(u.ID)\n\t\tif err != nil {\n\t\t\thttp.Redirect(w, req, \"/login\", http.StatusFound)\n\t\t\treturn server.New500Error(\"internal server error: error during create web session\", err)\n\t\t}\n\n\t\tsession, err := a.Store.Get(req, sessionName)\n\t\tif err != nil {\n\t\t\treturn server.New500Error(\"internal server error: error during getting of session\", err)\n\t\t}\n\t\tsession.Values[sessionKeyName] = sess.SessionKey\n\t\tsession.Save(req, w)\n\t\thttp.Redirect(w, req, \"/w\", http.StatusFound)\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7df0ee63306f9b98188e2e3c5be17507", "score": "0.6054297", "text": "func handleGoogleLogin(w http.ResponseWriter, r *http.Request) {\n\tif r.Method == \"POST\" {\n\t\tr.ParseForm()\n\t\tif r.FormValue(\"loggedin\") == \"true\" {\n\t\t\tif id := verifyIfGoogleAccountExist(r.FormValue(\"googleID\")); id > 0 {\n\t\t\t\t//log in and redirect to landing page\n\t\t\t\t//Check cookie\n\t\t\t\tMyID = int(id)\n\t\t\t\tcookie, err := r.Cookie(\"logged\")\n\t\t\t\tif err == http.ErrNoCookie { // if not then create one\n\t\t\t\t\tcookie = &http.Cookie{\n\t\t\t\t\t\tName: \"logged\",\n\t\t\t\t\t\tValue: \"1\",\n\t\t\t\t\t\tMaxAge: 300,\n\t\t\t\t\t}\n\t\t\t\t} else { // if cookie exist set value = 1\n\t\t\t\t\tcookie.Value = \"1\"\n\t\t\t\t\tcookie.MaxAge = 300\n\t\t\t\t}\n\n\t\t\t\t// set cookie\n\t\t\t\thttp.SetCookie(w, cookie)\n\n\t\t\t\thttp.Redirect(w, r, \"/dashboard\", http.StatusSeeOther)\n\t\t\t} else {\n\t\t\t\tf, _ := template.ParseFiles(\"views/sorry.htm\")\n\t\t\t\tf.Execute(w, nil)\n\t\t\t}\n\t\t} else {\n\t\t\tf, _ := template.ParseFiles(\"views/sorry.htm\")\n\t\t\tf.Execute(w, nil)\n\t\t}\n\t} else {\n\t\tf, _ := template.ParseFiles(\"views/sorry.htm\")\n\t\tf.Execute(w, nil)\n\t}\n}", "title": "" }, { "docid": "c56cb47a5fd0b414f76555d904c81bd2", "score": "0.6051434", "text": "func (w *WebServices) LoginHandler(ctx *fiber.Ctx) error {\n\tvar cmdLogin models.LoginCMD\n\terr := ctx.BodyParser(&cmdLogin)\n\tif err != nil {\n\t\treturn fiber.NewError(400, \"Login fail, cannot parse params\")\n\t}\n\tid := w.users.Login(cmdLogin)\n\tif id == \"\" {\n\t\treturn fiber.NewError(400, \"user not found or id wrong\")\n\t}\n\n\ttype resLogHandler struct {\n\t\tToken string `json:\"token\"`\n\t}\n\n\treturn ctx.JSON(resLogHandler{\n\t\tToken: signToken(w.tokenKey, id),\n\t})\n}", "title": "" }, { "docid": "cf95c676fafe3d5c72d56b61acc9f070", "score": "0.60511756", "text": "func (h *Handler) Login(w http.ResponseWriter, r *http.Request) {\n\tstatus := http.StatusOK\n\tmessage := JSONMessage{\n\t\tStatus: \"Success\",\n\t\tMessage: \"Login User Success\",\n\t}\n\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tfmt.Printf(\"auth_handler-Login-ReadAll: %s\\n\", err.Error())\n\t\tmessage.Status = \"Failed\"\n\t\tmessage.Message = \"Failed to read body\"\n\t\tstatus = http.StatusBadRequest\n\t\thelpers.RenderJSON(w, helpers.MarshalJSON(message), status)\n\t\treturn\n\t}\n\n\tuserCred := Credential{}\n\terr = json.Unmarshal(body, &userCred)\n\tif err != nil {\n\t\tfmt.Printf(\"auth_handler-Login-Unmarshal: %s\\n\", err)\n\t\tmessage.Status = \"Failed\"\n\t\tmessage.Message = \"Failed to Unmarshal body to usercred\"\n\t\tstatus = http.StatusBadRequest\n\t\thelpers.RenderJSON(w, helpers.MarshalJSON(message), status)\n\t\treturn\n\t}\n\n\tstoredPassword, err := h.GetStoredPassword(userCred)\n\tif err, ok := err.(*pq.Error); ok {\n\t\tfmt.Printf(\"auth_handler-Login-GetStoredPassword: %s\\n\", err)\n\t\tmessage.Status = \"Failed\"\n\t\tmessage.ErrorCode = fmt.Sprintf(\"%s\", err.Code)\n\t\tmessage.Message = \"User does not exist\"\t\n\t\tstatus = http.StatusBadRequest\n\t\thelpers.RenderJSON(w, helpers.MarshalJSON(message), status)\n\t\treturn\n\t}\n\n\tif err = bcrypt.CompareHashAndPassword([]byte(storedPassword), []byte(userCred.Password)); err != nil {\n\t\tfmt.Printf(\"auth_handler-Login-ComparedHashAndPassword: %s\", err)\n\t\tmessage.Status = \"Failed\"\n\t\tmessage.Message = \"Username or password is wrong\"\n\t\tstatus = http.StatusUnauthorized\n\t\thelpers.RenderJSON(w, helpers.MarshalJSON(message), status)\n\t\treturn\n\t}\n\n\thelpers.RenderJSON(w, helpers.MarshalJSON(message), status)\n\n\t// JWT Token Below\n\texpirationTime := time.Now().Add(5 * time.Minute)\n\n\tclaims := &Claims{\n\t\tEmail: userCred.Email,\n\t\tStandardClaims: jwt.StandardClaims{\n\t\t\tExpiresAt: expirationTime.Unix(),\n\t\t},\n\t}\n\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)\n\n\ttokenString, err := token.SignedString([]byte(os.Getenv(\"SECRET_KEY\")))\n\tif err != nil {\n\t\tfmt.Printf(\"auth_handler-Login-SignedString: %s\\n\", err)\n\t\tmessage.Status = \"Failed\"\n\t\tmessage.Message = \"Failed to signed server secret key\"\n\t\tstatus = http.StatusInternalServerError\n\t\thelpers.RenderJSON(w, helpers.MarshalJSON(message), status)\n\t\treturn\n\t}\n\n\tcookie := http.Cookie{\n\t\tName: \"token\",\n\t\tValue: tokenString,\n\t\tExpires: expirationTime,\n\t\tPath: \"/\",\n\t}\n\thttp.SetCookie(w, &cookie)\n\t\n\treturn\n}", "title": "" }, { "docid": "e7285a20bb0ee7ec14d23b1abedc784a", "score": "0.6043221", "text": "func indexhandler(w http.ResponseWriter,r *http.Request){\n\t//Check for cookies\n\texist:=ck.Checkcookie(r)\n\tif exist==true{\n\t\thttp.Redirect(w,r,\"/feed\",301)\n\t}else{\n\t\tcontent, err := ioutil.ReadFile(\"./src/html/login.html\")\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\t\tfmt.Fprintln(w,string(content))\n\t}\n}", "title": "" }, { "docid": "7621e834844b1a5d1fb3347f65b3329c", "score": "0.6039869", "text": "func logonHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tlog.Println(\"got to logonHandler\")\n\tname := r.FormValue(\"name\")\n\tpass := r.FormValue(\"password\")\n\tlog.Println(name)\n\n\tredirectTarget := \"/\"\n\tif name != \"\" && pass != \"\" {\n\t\t// .. check credentials ..\n\t\tvar msg string\n\t\tlog.Println(msg)\n\t\tlog.Println(\"name \", name)\n\n\t\t//userOK := validateUser(name, pass)\n\t\tuserOK := true\n\t\tfmt.Println(\"userOK bool status = \", strconv.FormatBool(userOK))\n\n\t\tif userOK {\n\t\t\tuserName = name\n\t\t\tsetSession(name, w)\n\t\t\tredirectTarget = \"/home\"\n\t\t\thttp.Redirect(w, r, redirectTarget, 302)\n\t\t} else {\n\t\t\tlog.Println(err)\n\t\t\tredirectTarget = \"/\"\n\t\t\thttp.Redirect(w, r, redirectTarget, 302)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "5cc51b5d7a3677f18eb2be1773eb6fab", "score": "0.6030291", "text": "func Login(w http.ResponseWriter, r *http.Request) {\n\tvar user mod.User\n\tdefer r.Body.Close()\n\n\tif err := json.NewDecoder(r.Body).Decode(&user); err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tout, err := ser.Login(r.Context(), user)\n\tif err == uti.ErrUserNotFound {\n\t\thttp.Error(w, err.Error(), http.StatusNotFound)\n\t\treturn\n\t}\n\n\tif err != nil {\n\t\trespondError(w, err)\n\t\treturn\n\t}\n\n\trespond(w, out, http.StatusOK)\n}", "title": "" }, { "docid": "3549815174d3201ec76766a28a54b583", "score": "0.60238904", "text": "func MakeLoginHTTPHandler(logger log.Logger, tracer stdopentracing.Tracer, endpoints LoginEndpoints) http.Handler {\n\toptions := []kithttp.ServerOption{\n\t\tkithttp.ServerBefore(kithttp.PopulateRequestContext),\n\t\tkithttp.ServerErrorEncoder(encodeError(logger)),\n\t}\n\n\tgetUserByUsernameAndPwdHandler := kithttp.NewServer(\n\t\tendpoints.GetByUsernameAndPwd,\n\t\tdecodeGetUserByUsernameAndPwdRequest,\n\t\tencodeResponse,\n\t\tappend(options, httptransport.ServerBefore(opentracing.FromHTTPRequest(tracer, \"calling HTTP POST /login\", logger)))...,\n\t)\n\n\tr := mux.NewRouter().PathPrefix(\"/login/\").Subrouter().StrictSlash(true)\n\tr.Handle(\"/\", getUserByUsernameAndPwdHandler).Methods(\"POST\")\n\n\treturn r\n}", "title": "" }, { "docid": "fabb154bcdc4d7db4fa2f4d4dda5fa06", "score": "0.60238487", "text": "func ServeLogin(w http.ResponseWriter, r *http.Request, opts map[string]interface{}) error {\n\treqID := opts[\"request_id\"].(string)\n\tlog := opts[\"logger\"].(*zap.Logger)\n\tuiFactory := opts[\"ui\"].(*ui.UserInterfaceFactory)\n\tauthURLPath := opts[\"auth_url_path\"].(string)\n\n\ttokenProvider := opts[\"token_provider\"].(*jwtconfig.CommonTokenConfig)\n\n\tcookies := opts[\"cookies\"].(*cookies.Cookies)\n\tredirectToToken := opts[\"redirect_token_name\"].(string)\n\tauthorized := false\n\n\tif v, exists := opts[\"authorized\"]; exists {\n\t\tauthorized = v.(bool)\n\t}\n\n\tif _, exists := opts[\"status_code\"]; !exists {\n\t\topts[\"status_code\"] = 200\n\t}\n\n\t// Remove tokens when authentication failed\n\tif opts[\"auth_credentials_found\"].(bool) && !opts[\"authenticated\"].(bool) {\n\t\tfor _, k := range []string{tokenProvider.TokenName} {\n\t\t\tw.Header().Add(\"Set-Cookie\", k+\"=delete;\"+cookies.GetDeleteAttributes()+\" expires=Thu, 01 Jan 1970 00:00:00 GMT\")\n\t\t}\n\t}\n\n\t// Add non-caching headers\n\tw.Header().Set(\"Cache-Control\", \"no-store\")\n\tw.Header().Set(\"Pragma\", \"no-cache\")\n\n\t// Create JWT token\n\tif opts[\"authenticated\"].(bool) && !authorized {\n\t\tclaims := opts[\"user_claims\"].(*jwtclaims.UserClaims)\n\t\tclaims.Issuer = utils.GetCurrentURL(r)\n\t\tclaims.IssuedAt = time.Now().Unix()\n\t\tvar userToken string\n\t\tvar tokenError error\n\t\tswitch tokenProvider.TokenSignMethod {\n\t\tcase \"HS512\", \"HS384\", \"HS256\":\n\t\t\tuserToken, tokenError = claims.GetToken(tokenProvider.TokenSignMethod, []byte(tokenProvider.TokenSecret))\n\t\tcase \"RS512\", \"RS384\", \"RS256\":\n\t\t\tvar privKey *rsa.PrivateKey\n\t\t\tvar keyID string\n\t\t\tprivKey, keyID, tokenError = tokenProvider.GetPrivateKey()\n\t\t\tif tokenError == nil {\n\t\t\t\ttokenOpts := make(map[string]interface{})\n\t\t\t\ttokenOpts[\"method\"] = tokenProvider.TokenSignMethod\n\t\t\t\tif keyID != \"\" {\n\t\t\t\t\ttokenOpts[\"kid\"] = keyID\n\t\t\t\t}\n\t\t\t\ttokenOpts[\"private_key\"] = privKey\n\t\t\t\tuserToken, tokenError = claims.GetSignedToken(tokenOpts)\n\t\t\t}\n\t\tdefault:\n\t\t\topts[\"status_code\"] = 500\n\t\t\topts[\"authenticated\"] = false\n\t\t\topts[\"message\"] = \"Internal Server Error\"\n\t\t\tlog.Error(\n\t\t\t\t\"invalid signing method\",\n\t\t\t\tzap.String(\"request_id\", reqID),\n\t\t\t\tzap.String(\"token_sign_method\", tokenProvider.TokenSignMethod),\n\t\t\t)\n\t\t}\n\t\tif tokenError != nil {\n\t\t\topts[\"status_code\"] = 500\n\t\t\topts[\"authenticated\"] = false\n\t\t\topts[\"message\"] = \"Internal Server Error\"\n\t\t\tlog.Warn(\n\t\t\t\t\"token signing error\",\n\t\t\t\tzap.String(\"request_id\", reqID),\n\t\t\t\tzap.String(\"error\", tokenError.Error()),\n\t\t\t)\n\t\t} else {\n\t\t\tif opts[\"authenticated\"].(bool) {\n\t\t\t\topts[\"user_token\"] = userToken\n\t\t\t\tw.Header().Set(\"Authorization\", \"Bearer \"+userToken)\n\t\t\t\tw.Header().Set(\"Set-Cookie\", tokenProvider.TokenName+\"=\"+userToken+\";\"+cookies.GetAttributes())\n\t\t\t}\n\t\t}\n\t}\n\n\t// If the requested content type is JSON, then handle it separately.\n\tif opts[\"content_type\"].(string) == \"application/json\" {\n\t\treturn ServeAPILogin(w, r, opts)\n\t}\n\n\t// Follow redirect URL when authenticated.\n\tif opts[\"authenticated\"].(bool) {\n\t\tif cookie, err := r.Cookie(redirectToToken); err == nil {\n\t\t\tif redirectURL, err := url.Parse(cookie.Value); err == nil {\n\t\t\t\tlog.Debug(\n\t\t\t\t\t\"detected cookie-based redirect\",\n\t\t\t\t\tzap.String(\"request_id\", reqID),\n\t\t\t\t\tzap.String(\"redirect_url\", redirectURL.String()),\n\t\t\t\t)\n\t\t\t\tw.Header().Set(\"Location\", redirectURL.String())\n\t\t\t\tw.Header().Add(\"Set-Cookie\", redirectToToken+\"=delete;\"+cookies.GetDeleteAttributes()+\" expires=Thu, 01 Jan 1970 00:00:00 GMT\")\n\t\t\t\tw.WriteHeader(302)\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}\n\n\t// If authenticated, redirect to portal.\n\tif opts[\"authenticated\"].(bool) {\n\t\tw.Header().Set(\"Location\", path.Join(authURLPath, \"portal\"))\n\t\tw.WriteHeader(302)\n\t\treturn nil\n\t}\n\n\t// Display login page\n\tresp := uiFactory.GetArgs()\n\tif title, exists := opts[\"ui_title\"]; exists {\n\t\tresp.Title = title.(string)\n\t} else {\n\t\tresp.Title = \"Sign In\"\n\t}\n\n\tif msg, exists := opts[\"message\"]; exists {\n\t\tresp.Message = msg.(string)\n\t}\n\n\tresp.Data[\"login_options\"] = opts[\"login_options\"]\n\tcontent, err := uiFactory.Render(\"login\", resp)\n\tif err != nil {\n\t\tlog.Error(\"Failed HTML response rendering\", zap.String(\"request_id\", reqID), zap.String(\"error\", err.Error()))\n\t\tw.Header().Set(\"Content-Type\", \"text/plain\")\n\t\tw.WriteHeader(500)\n\t\tw.Write([]byte(`Internal Server Error`))\n\t\treturn err\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"text/html\")\n\tw.WriteHeader(opts[\"status_code\"].(int))\n\tw.Write(content.Bytes())\n\treturn nil\n}", "title": "" }, { "docid": "2041e824133a5939becd9dd8918faf2c", "score": "0.60234195", "text": "func (app *application) loginUser(w http.ResponseWriter, r *http.Request) {\n\terr := r.ParseForm()\n\tif err != nil {\n\t\tapp.clientError(w, http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tform := forms.New(r.PostForm)\n\tform.MatchesPattern(\"email\", forms.RxEmail)\n\n\tif !form.Valid() {\n\t\tapp.render(w, r,\"login.page.tmpl\", &templateData{\n\t\t\tForm: form,\n\t\t})\n\n\t\treturn\n\t}\n\n\t/*\n\t\tFetch the user details\n\t*/\n\tid, err := app.users.Authenticate(form.Get(\"email\"), form.Get(\"password\"))\n\tif err != nil {\n\t\tif errors.Is(err, models.ErrInvalidCredentials)\t{\n\t\t\tform.Errors.Add(\"email\", \"Invalid Password for given email\")\n\t\t\tapp.render(w, r, \"login.page.tmpl\", &templateData{Form: form})\n\t\t}else {\n\t\t\tapp.serverError(w, err)\n\t\t}\n\t\treturn\n\t}\n\n\t/*\n\t\tIf authentication successful add a flash message\n\t */\n\tapp.session.Put(r, \"flash\", \"Logged in successfully\")\n\tapp.session.Put(r, \"authenticatedUserID\", id)\n\n\t/*\n\t\tRedirect to home or snippets index page page\n\t*/\n\thttp.Redirect(w, r, \"/snippet/create\", http.StatusSeeOther)\n}", "title": "" } ]
95314ebabc72dc6a7abad8ec334eeff8
ScriptProcessFilterFunctionName sets the optional parameter "scriptProcessFilter.functionName": Optional field used to limit returned processes to those originating from a script function with the given function name.
[ { "docid": "56b83cda01ae58219d54706730b3d71e", "score": "0.890707", "text": "func (c *ProcessesListScriptProcessesCall) ScriptProcessFilterFunctionName(scriptProcessFilterFunctionName string) *ProcessesListScriptProcessesCall {\n\tc.urlParams_.Set(\"scriptProcessFilter.functionName\", scriptProcessFilterFunctionName)\n\treturn c\n}", "title": "" } ]
[ { "docid": "adaf640bea3c5a0f2b0fe84fbb9418d6", "score": "0.7150304", "text": "func (c *ProcessesListCall) UserProcessFilterFunctionName(userProcessFilterFunctionName string) *ProcessesListCall {\n\tc.urlParams_.Set(\"userProcessFilter.functionName\", userProcessFilterFunctionName)\n\treturn c\n}", "title": "" }, { "docid": "c09e999dda71f07dec0300d49e2e8389", "score": "0.56231713", "text": "func (c *ProcessesListScriptProcessesCall) ScriptProcessFilterTypes(scriptProcessFilterTypes ...string) *ProcessesListScriptProcessesCall {\n\tc.urlParams_.SetMulti(\"scriptProcessFilter.types\", append([]string{}, scriptProcessFilterTypes...))\n\treturn c\n}", "title": "" }, { "docid": "b3fc8a58995b85d37d9117ac41ad0efd", "score": "0.5278736", "text": "func (s *AwsSecurityFindingFilters) SetProcessName(v []*StringFilter) *AwsSecurityFindingFilters {\n\ts.ProcessName = v\n\treturn s\n}", "title": "" }, { "docid": "fec0f20d190e33453ac94bff6c28ad90", "score": "0.5112659", "text": "func setupFilter() error {\n\tff := viper.GetString(\"connect.filter-function\")\n\tif ff == \"\" {\n\t\treturn nil\n\t}\n\n\t// Execute Starlark program in a file.\n\tthread = &starlark.Thread{Name: \"filter thread\"}\n\tglobals, err := starlark.ExecFile(thread, ff, nil, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif _, ok := globals[\"filter\"]; !ok {\n\t\treturn errors.New(\"function filter not found\")\n\t}\n\n\t// Retrieve a module global.\n\tfilterFunction = globals[\"filter\"]\n\n\tfilterFunctionProvided = true\n\n\treturn nil\n}", "title": "" }, { "docid": "4cdf57707219d18bc0d7710f8d0b4209", "score": "0.50329", "text": "func (s *ReservationSetItem) SetFunctionName(v string) *ReservationSetItem {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "5a03a84d58b73987e2ce7e6744b4ca53", "score": "0.50082976", "text": "func (s *DissociateUsersInput) SetFunctionName(v string) *DissociateUsersInput {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "68f9668141767e2dc194865a97cc9913", "score": "0.49544862", "text": "func FilterFunctionTags(input map[string]string) map[string]string {\n\tif input == nil {\n\t\treturn nil\n\t}\n\n\toutput := make(map[string]string)\n\tfor k, v := range input {\n\t\toutput[k] = v\n\t}\n\n\t// filter out DD_TAGS & DD_EXTRA_TAGS\n\tddTags := configUtils.GetConfiguredTags(config.Datadog, false)\n\tfor _, tag := range ddTags {\n\t\ttagParts := strings.SplitN(tag, \":\", 2)\n\t\tif len(tagParts) != 2 {\n\t\t\tlog.Warnf(\"Cannot split tag %s\", tag)\n\t\t\tcontinue\n\t\t}\n\t\ttagKey := tagParts[0]\n\t\tdelete(output, tagKey)\n\t}\n\n\t// filter out function specific tags\n\tfor _, tagKey := range functionTagsToIgnore {\n\t\tdelete(output, tagKey)\n\t}\n\n\treturn output\n}", "title": "" }, { "docid": "93e1829996d692836689ce56c3af0638", "score": "0.4953043", "text": "func (c *ProcessesListScriptProcessesCall) ScriptProcessFilterEndTime(scriptProcessFilterEndTime string) *ProcessesListScriptProcessesCall {\n\tc.urlParams_.Set(\"scriptProcessFilter.endTime\", scriptProcessFilterEndTime)\n\treturn c\n}", "title": "" }, { "docid": "eccb061fe336017db629577f3b3c519e", "score": "0.49475312", "text": "func NewFilteringProcessTable(base ProcessTable, filter ProcessFilterFunc) ProcessTable {\n\treturn &filteringProcessTable{\n\t\tbase: base,\n\t\tfilter: filter,\n\t}\n}", "title": "" }, { "docid": "e6804e992b883e4f4944ae52f4ca6e56", "score": "0.4931282", "text": "func (c *ProcessesListScriptProcessesCall) ScriptProcessFilterStatuses(scriptProcessFilterStatuses ...string) *ProcessesListScriptProcessesCall {\n\tc.urlParams_.SetMulti(\"scriptProcessFilter.statuses\", append([]string{}, scriptProcessFilterStatuses...))\n\treturn c\n}", "title": "" }, { "docid": "905bea4e2919c8224545d63654281fee", "score": "0.48596415", "text": "func (c *ProcessesListScriptProcessesCall) ScriptProcessFilterDeploymentId(scriptProcessFilterDeploymentId string) *ProcessesListScriptProcessesCall {\n\tc.urlParams_.Set(\"scriptProcessFilter.deploymentId\", scriptProcessFilterDeploymentId)\n\treturn c\n}", "title": "" }, { "docid": "f0e1cd06fc2bc53705fca8556e491ec8", "score": "0.48591915", "text": "func (o LookupEventSourceMappingResultOutput) FunctionName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v LookupEventSourceMappingResult) *string { return v.FunctionName }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "d6b4f83178238b7575ebf1b98f6c8593", "score": "0.48190764", "text": "func (c *ProcessesListScriptProcessesCall) ScriptProcessFilterStartTime(scriptProcessFilterStartTime string) *ProcessesListScriptProcessesCall {\n\tc.urlParams_.Set(\"scriptProcessFilter.startTime\", scriptProcessFilterStartTime)\n\treturn c\n}", "title": "" }, { "docid": "4c1071f6644e4bcd925bf8739d8daf75", "score": "0.48036146", "text": "func (c *ProcessesListScriptProcessesCall) ScriptProcessFilterUserAccessLevels(scriptProcessFilterUserAccessLevels ...string) *ProcessesListScriptProcessesCall {\n\tc.urlParams_.SetMulti(\"scriptProcessFilter.userAccessLevels\", append([]string{}, scriptProcessFilterUserAccessLevels...))\n\treturn c\n}", "title": "" }, { "docid": "40e907fdd9d020e2cdc6a3a934452cf5", "score": "0.47473916", "text": "func (ap *Platform) FilterFunctionsByPermissions(ctx context.Context,\n\tpermissionOptions *opa.PermissionOptions,\n\tfunctions []platform.Function) ([]platform.Function, error) {\n\n\t// no cleansing is mandated\n\tif len(permissionOptions.MemberIds) == 0 || len(functions) == 0 {\n\t\treturn functions, nil\n\t}\n\n\t// prepare resource list\n\tresources := make([]string, len(functions))\n\tfor idx, function := range functions {\n\t\tfunctionName := function.GetConfig().Meta.Name\n\t\tprojectName := function.GetConfig().Meta.Labels[common.NuclioResourceLabelKeyProjectName]\n\t\tresources[idx] = opa.GenerateFunctionResourceString(projectName, functionName)\n\t}\n\n\tallowedList, err := ap.QueryOPAMultipleResources(ctx, resources, opa.ActionRead, permissionOptions)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"Failed querying OPA for function permissions\")\n\t}\n\n\t// fill permitted / filtered function list\n\tvar permittedFunctions []platform.Function\n\tvar filteredFunctionNames []string\n\tfor idx, allowed := range allowedList {\n\t\tif allowed {\n\t\t\tpermittedFunctions = append(permittedFunctions, functions[idx])\n\t\t} else {\n\t\t\tfilteredFunctionNames = append(filteredFunctionNames, functions[idx].GetConfig().Meta.Name)\n\t\t}\n\t}\n\n\tif len(filteredFunctionNames) > 0 {\n\t\tap.Logger.DebugWithCtx(ctx,\n\t\t\t\"Some functions were filtered out\",\n\t\t\t\"functionNames\", filteredFunctionNames)\n\t}\n\treturn permittedFunctions, nil\n}", "title": "" }, { "docid": "80149836c8e7af1e764485b79912eda6", "score": "0.47055376", "text": "func (s *DescribeAssociatedUsersInput) SetFunctionName(v string) *DescribeAssociatedUsersInput {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "e34b05bc756e6a1da73de9af98d0c357", "score": "0.46983963", "text": "func (o GetCustomDomainsDomainRouteConfigOutput) FunctionName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetCustomDomainsDomainRouteConfig) string { return v.FunctionName }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "152e6ad712ac333bfe0b21a8e8b0be81", "score": "0.46732077", "text": "func (f *Filter) FilterBySourceName(ctx interfaces.AppFunctionContext, data interface{}) (continuePipeline bool, result interface{}) {\n\tf.ctx = ctx\n\tevent, err := f.setupForFiltering(\"FilterBySourceName\", \"SourceName\", ctx.LoggingClient(), data)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tok := f.doEventFilter(\"SourceName\", event.SourceName, ctx.LoggingClient())\n\tif ok {\n\t\treturn true, *event\n\t}\n\n\treturn false, nil\n}", "title": "" }, { "docid": "a3feeb2aef5c309151431e636635b60f", "score": "0.4662818", "text": "func (s *RequestRuleStruct) SetFunctionName(v string) *RequestRuleStruct {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "a0a3253039e5f4664f7421387b659b03", "score": "0.46491918", "text": "func Filter(filter FilterFunction) {\n\tglobalFilters = append(globalFilters, filter)\n}", "title": "" }, { "docid": "cbea29a00f903c11f13143f8f8de2a19", "score": "0.46445647", "text": "func (o CustomDomainRouteConfigOutput) FunctionName() pulumi.StringOutput {\n\treturn o.ApplyT(func(v CustomDomainRouteConfig) string { return v.FunctionName }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "56246eb700a529c0829ea8078dbe11c9", "score": "0.46324494", "text": "func (s *NiftyDescribePerformanceChartInput) SetFunctionName(v string) *NiftyDescribePerformanceChartInput {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "1b2ee8b1febbd86c24adf3b740b67929", "score": "0.46317324", "text": "func (s *NiftyUpdateAlarmInput) SetFunctionName(v string) *NiftyUpdateAlarmInput {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "0d0a3f2707d55d8c28b92aa7527dfeee", "score": "0.46242535", "text": "func (s *NiftyDeleteAlarmInput) SetFunctionName(v string) *NiftyDeleteAlarmInput {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "f8cbeb9d52388b14c4f28c43185c11eb", "score": "0.45979545", "text": "func Filter() string {\n\treturn `\nfunc Filter<CONDITIONAL_TYPE>(f func(<TYPE>) bool, list []<TYPE>) []<TYPE> {\n\tif f == nil {\n\t\treturn []<TYPE>{}\n\t}\n\tvar newList []<TYPE>\n\tfor _, v := range list {\n\t\tif f(v) {\n\t\t\tnewList = append(newList, v)\n\t\t}\n\t}\n\treturn newList\n}\n`\n}", "title": "" }, { "docid": "46b6f8e948112ae4cc3c764be915a894", "score": "0.45861134", "text": "func (s *AssociateUsersInput) SetFunctionName(v string) *AssociateUsersInput {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "68f6bb6a9f40a40f63947253499d7af9", "score": "0.4563704", "text": "func (c *ResourceConfig) RemoveFunction(resourceName string, functionName string) {\n\tif resourceName == \"\" {\n\t\tresourceName = \"_\"\n\t}\n\n\tident := flect.New(resourceName)\n\trCamel := ident.Camelize().String()\n\tname := getFuncName(ident, functionName)\n\n\tfor i, f := range c.Functions[rCamel] {\n\t\tif name == f.Name {\n\t\t\tc.Functions[rCamel] = append(c.Functions[rCamel][:i], c.Functions[rCamel][i+1:]...)\n\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "40be3834b0c3c7f6b31ad6f2411ee3e9", "score": "0.45583254", "text": "func (s Workflows) FilterByFn(fn WorkflowFilterFn) Workflows {\n\tworkflows := make(Workflows, len(s))\n\tnum := 0\n\tfor _, w := range s {\n\t\tif fn(w) {\n\t\t\tworkflows[num] = w\n\t\t\tnum++\n\t\t}\n\t}\n\treturn workflows[:num]\n}", "title": "" }, { "docid": "4557fbd6021698ea3e00049beace7f69", "score": "0.4549101", "text": "func getWindowsAMIFilter(windowsServerVersion windows.ServerVersion) string {\n\tswitch windowsServerVersion {\n\tcase windows.Server2019:\n\t\treturn \"Windows_Server-2019-English-Core-Base-????.??.??\"\n\tcase windows.Server2022:\n\tdefault:\n\t}\n\treturn \"Windows_Server-2022-English-Core-Base-????.??.??\"\n}", "title": "" }, { "docid": "40dc738235eb2e7a91a91f70a0fc3b25", "score": "0.45475703", "text": "func (ap *Platform) FilterFunctionEventsByPermissions(ctx context.Context,\n\tpermissionOptions *opa.PermissionOptions,\n\tfunctionEvents []platform.FunctionEvent) ([]platform.FunctionEvent, error) {\n\n\t// no cleansing is mandated\n\tif len(permissionOptions.MemberIds) == 0 || len(functionEvents) == 0 {\n\t\treturn functionEvents, nil\n\t}\n\n\tvar resources []string\n\tfor _, functionEventInstance := range functionEvents {\n\t\tprojectName := functionEventInstance.GetConfig().Meta.Labels[common.NuclioResourceLabelKeyProjectName]\n\t\tfunctionName := functionEventInstance.GetConfig().Meta.Labels[common.NuclioResourceLabelKeyFunctionName]\n\t\tfunctionEventName := functionEventInstance.GetConfig().Meta.Name\n\t\tresources = append(resources, opa.GenerateFunctionEventResourceString(projectName,\n\t\t\tfunctionName,\n\t\t\tfunctionEventName))\n\t}\n\tallowedList, err := ap.QueryOPAMultipleResources(ctx, resources, opa.ActionRead, permissionOptions)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"Failed querying OPA for function events permissions\")\n\t}\n\n\t// fill permitted / filtered function event list\n\tvar permittedFunctionEvents []platform.FunctionEvent\n\tvar filteredFunctionEventNames []string\n\tfor idx, allowed := range allowedList {\n\t\tif allowed {\n\t\t\tpermittedFunctionEvents = append(permittedFunctionEvents, functionEvents[idx])\n\t\t} else {\n\t\t\tfilteredFunctionEventNames = append(filteredFunctionEventNames, functionEvents[idx].GetConfig().Meta.Name)\n\t\t}\n\t}\n\n\tif len(filteredFunctionEventNames) > 0 {\n\t\tap.Logger.DebugWithCtx(ctx,\n\t\t\t\"Some function events were filtered out\",\n\t\t\t\"functionEventNames\", filteredFunctionEventNames)\n\t}\n\treturn permittedFunctionEvents, nil\n}", "title": "" }, { "docid": "8e251066b509cf668baca50c11eee4fb", "score": "0.45408857", "text": "func (f FilteredFunctions) Filter(name string) map[string]*Function {\n\tnewMap := make(map[string]*Function)\n\tfor it := range f {\n\t\tif f[it].Annotations().Has(name) {\n\t\t\tnewMap[it] = f[it]\n\t\t}\n\t}\n\n\treturn newMap\n}", "title": "" }, { "docid": "a5d301f85217c62e974ef30526473502", "score": "0.4523695", "text": "func (s *NiftyDescribePerformanceChartOutput) SetFunctionName(v string) *NiftyDescribePerformanceChartOutput {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "97f2f905bfcde2aacc695853d2658954", "score": "0.45153525", "text": "func (c *ProcessesListCall) UserProcessFilterScriptId(userProcessFilterScriptId string) *ProcessesListCall {\n\tc.urlParams_.Set(\"userProcessFilter.scriptId\", userProcessFilterScriptId)\n\treturn c\n}", "title": "" }, { "docid": "5c43359a00e820c17dd68d5d2417891d", "score": "0.4501915", "text": "func ProcessScript(root *ast.File) error {\n\treturn sets.PreprocessBuiltinFunc(root, \"script\", func(values []ast.Node) (ast.Expr, error) {\n\t\tfor _, v := range values {\n\t\t\tlit, ok := v.(*ast.BasicLit)\n\t\t\tif ok {\n\t\t\t\tsrc, err := literal.Unquote(lit.Value)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.WithMessage(err, \"unquote script value\")\n\t\t\t\t}\n\t\t\t\texpr, err := parser.ParseExpr(\"-\", src)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.Errorf(\"script value(%s) is invalid CueLang\", src)\n\t\t\t\t}\n\t\t\t\treturn expr, nil\n\t\t\t}\n\t\t}\n\t\treturn nil, errors.New(\"script parameter error\")\n\t})\n}", "title": "" }, { "docid": "97acd8598bbae3b211c8757dce53584b", "score": "0.44969618", "text": "func (b *Buffering) SetProcessingFunction(processFunction func(string, interface{}) error) {\n\tb.lineProcessingFunction = processFunction\n}", "title": "" }, { "docid": "82b6d7198c1ddbd9a620b6a22981a826", "score": "0.4494372", "text": "func ExecuteFunction(funcName string) error {\n\tcmd := exec.Command(\"dispatch\", \"exec\", funcName, \"--wait\")\n\toutput, err := cmd.CombinedOutput()\n\tif err != nil {\n\t\tlog.Printf(\"Unable to run function: %v, \\n%s\\n\", err, output)\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "642597f43460c3d22d929e1474b1c7f6", "score": "0.44891253", "text": "func (r RunFns) getFunctionsFromFunctions() ([]kio.Filter, error) {\n\treturn r.getFunctionFilters(true, r.Functions...)\n}", "title": "" }, { "docid": "01c48b923514e29ae94997d829a8133c", "score": "0.44725978", "text": "func (me *Shaper) AddFilter(f func(string) string) *Shaper {\n\tme.ShaperStack = func(a func(string) string, b func(string) string) func(string) string {\n\t\treturn func(s string) string {\n\t\t\treturn a(b(s))\n\t\t}\n\t}(f, me.ShaperStack)\n\treturn me\n}", "title": "" }, { "docid": "c8ff47648abcd393d46e5810040e3001", "score": "0.4472546", "text": "func (fn FilterFunc) Filter(file filer.File) bool {\n\treturn fn(file)\n}", "title": "" }, { "docid": "a79b85ee5de1300c48b9cf86a703abe4", "score": "0.4470179", "text": "func (s *ProcessStat) SetPidFilter(filter PidFilterFunc) {\n\ts.filter = filter\n\treturn\n}", "title": "" }, { "docid": "7b60a516384b1b59211b025db8cfab5a", "score": "0.44641355", "text": "func (r *Rule) SetFilterFunc(fn func(val any) (any, error)) *Rule {\n\tr.filterFunc = fn\n\treturn r\n}", "title": "" }, { "docid": "f9d75c8c4d46280c2d17c6da323690f6", "score": "0.44493827", "text": "func (f *Formatter) RemoveFunction(name string) *Formatter {\n\tif _, ok := f.functions[name]; !ok {\n\t\treturn f\n\t}\n\n\tdelete(f.functions, name)\n\n\treturn f\n}", "title": "" }, { "docid": "b65071432e8817204d259a2a827f85de", "score": "0.4441258", "text": "func (r *Alias) FunctionName() pulumi.StringOutput {\n\treturn (pulumi.StringOutput)(r.s.State[\"functionName\"])\n}", "title": "" }, { "docid": "64e3a8e5dba093d200aea129fecb8d08", "score": "0.443065", "text": "func (s *ServerlessConfig) RemoveFunction(n string) {\n\tdelete(s.Functions, n)\n}", "title": "" }, { "docid": "e861566128cb8bf2ed3250d4b6851915", "score": "0.4401103", "text": "func (s *IdentityProviderDetails) SetFunction(v string) *IdentityProviderDetails {\n\ts.Function = &v\n\treturn s\n}", "title": "" }, { "docid": "1e2e4153fb38a8b75d76f09b0f58557d", "score": "0.43992478", "text": "func (s *NiftyCreateAlarmInput) SetFunctionName(v string) *NiftyCreateAlarmInput {\n\ts.FunctionName = &v\n\treturn s\n}", "title": "" }, { "docid": "f7388b21e3b76481ef60caafbdafefad", "score": "0.43679196", "text": "func (h *Handler) SetProcessName(name string) {\n\th.ProcessName = name\n}", "title": "" }, { "docid": "42039b03187f312b05a068d01aea1d14", "score": "0.4352504", "text": "func Filter(frame wdte.Frame, args ...wdte.Func) (filter wdte.Func) {\n\tswitch len(args) {\n\tcase 0:\n\t\treturn wdte.GoFunc(Filter)\n\t}\n\n\tf := args[0]\n\n\treturn wdte.GoFunc(func(frame wdte.Frame, args ...wdte.Func) wdte.Func {\n\t\tswitch len(args) {\n\t\tcase 0:\n\t\t\treturn filter\n\t\t}\n\n\t\ts := args[0].(Stream)\n\n\t\treturn NextFunc(func(frame wdte.Frame) (wdte.Func, bool) {\n\t\t\tfor {\n\t\t\t\tn, ok := s.Next(frame)\n\t\t\t\tif !ok {\n\t\t\t\t\treturn nil, false\n\t\t\t\t}\n\n\t\t\t\tif f.Call(frame.Sub(\"filter\"), n) == wdte.Bool(true) {\n\t\t\t\t\treturn n, true\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t})\n}", "title": "" }, { "docid": "76868d89cf5d86a8ba8ed7206a727778", "score": "0.43519166", "text": "func (c *ProcessesListCall) UserProcessFilterProjectName(userProcessFilterProjectName string) *ProcessesListCall {\n\tc.urlParams_.Set(\"userProcessFilter.projectName\", userProcessFilterProjectName)\n\treturn c\n}", "title": "" }, { "docid": "5c69e6184f2fb405b7f996f1047e231a", "score": "0.4325162", "text": "func (s *Stage) Filter(workers int, f T) *Stage {\n\tvalue := reflect.ValueOf(f)\n\tif value.Kind() != reflect.Func {\n\t\tpanic(\"Filter argument must be a function\")\n\t}\n\n\t// Checks that the filter function accepts one arguments\n\tftype := value.Type()\n\tif ftype.NumIn() != 1 {\n\t\tpanic(\"Filter argument must be a function that receives one values\")\n\t}\n\n\t// Checks that the value received by Map function is the same as\n\t// the value generated in previous stage\n\titype := ftype.In(0)\n\tif itype != s.output.Type().Elem() {\n\t\tpanic(fmt.Sprintf(\"Input of Filter stage function missmatch. Expected %v, found %v\", s.output.Type().Elem(), itype))\n\t}\n\n\t// Checks that return value is boolean\n\tif ftype.NumOut() != 1 {\n\t\tpanic(\"Filter function must return exactly one boolean value\")\n\t}\n\totype := ftype.Out(0)\n\tif otype.Kind() != reflect.Bool {\n\t\tpanic(\"Filter function output must be boolean\")\n\t}\n\n\toutput := reflect.MakeChan(s.output.Type(), ChanBufferDefault)\n\twg := sync.WaitGroup{}\n\tvar executor func()\n\texecutor = func() {\n\t\tdefer wg.Done()\n\t\ti := 0\n\t\tfor e, ok := s.output.Recv(); ok; e, ok = s.output.Recv() {\n\t\t\tpreserve := value.Call([]reflect.Value{e})\n\t\t\tif preserve[0].Bool() {\n\t\t\t\toutput.Send(e)\n\t\t\t}\n\n\t\t\ti++\n\t\t\tif i > 10000 {\n\t\t\t\twg.Add(1)\n\t\t\t\tgo executor()\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\n\tgo func() {\n\t\tfor i := 0; i < workers; i++ {\n\t\t\twg.Add(1)\n\t\t\tgo executor()\n\t\t}\n\t\twg.Wait()\n\t\toutput.Close()\n\t}()\n\n\treturn &Stage{output}\n}", "title": "" }, { "docid": "8a4e6f0230bfc7b6e3a58e8b3a5e9b2c", "score": "0.43104288", "text": "func callPythonFilterText(text string) string {\n\tp := C.CString(text)\n\tret := C.Py_Filter_Text(p)\n\tresult := C.GoString(ret)\n\tdefer C.free(unsafe.Pointer(p))\n\treturn result\n}", "title": "" }, { "docid": "450924a210fbfa9987f2b9d3c1426289", "score": "0.42949364", "text": "func (v *View) filterFunc() func(span.URI) bool {\n\tfilterer := buildFilterer(v.rootURI.Filename(), v.gomodcache, v.Options())\n\treturn func(uri span.URI) bool {\n\t\t// Only filter relative to the configured root directory.\n\t\tif source.InDir(v.folder.Filename(), uri.Filename()) {\n\t\t\treturn pathExcludedByFilter(strings.TrimPrefix(uri.Filename(), v.folder.Filename()), filterer)\n\t\t}\n\t\treturn false\n\t}\n}", "title": "" }, { "docid": "4f12e7b95eedc5be448c4a79bcb34cbf", "score": "0.42908376", "text": "func CreateFunction(funcName, funcLocation string) {\n\t_, file := path.Split(funcLocation)\n\thandler := fmt.Sprintf(\"--handler=%s.handle\", file[0:len(file)-len(path.Ext(funcLocation))])\n\tcmd := exec.Command(\"dispatch\", \"create\", \"function\", funcName, funcLocation, \"--image=python3\", handler)\n output, err := cmd.CombinedOutput()\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to create function %v. %v\\n%s\\n\", funcName, err, output)\n\t\tpanic(\"Unable to create function\")\n\t}\n\tvar fn struct {\n\t\tStatus string\n\t}\n\tfor fn.Status != \"READY\" {\n\t\tcmd := exec.Command(\"dispatch\", \"get\", \"functions\", funcName, \"--json\")\n\t\tstdout, err := cmd.StdoutPipe()\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tif err := cmd.Start(); err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tif err := json.NewDecoder(stdout).Decode(&fn); err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tif err := cmd.Wait(); err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t}\n\tfmt.Printf(\"Created function %v\\n\", funcName)\n\tfmt.Println(\"Running once to negate zero-scaling\")\n\tif err := ExecuteFunction(funcName); err != nil {\n\t\tlog.Fatalf(\"Failed to run function %v. %v\", funcName, err)\n\t}\n}", "title": "" }, { "docid": "c90b29cf5978a835e5fcc09c87d4b6f8", "score": "0.42803183", "text": "func GetFilter(fv string) (filter []rune) {\n\tif fv != \"\" {\n\t\tn, _ := strconv.ParseInt(fv, 10, 32)\n\t\tfilter = []rune(fmt.Sprintf(\"%.3b\", n))\n\t} else {\n\t\tfilter, _ = binutil.GenerateNLengthRandomBinRuneSlice(3, 7)\n\t}\n\treturn\n}", "title": "" }, { "docid": "8122294f7eff11ce8f775799b3aac178", "score": "0.42717654", "text": "func FilterString(f func(string, int) bool, input []string) (output []string) {\n\toutput = make([]string, 0)\n\tfor idx, data := range input {\n\t\tif f(data, idx) {\n\t\t\toutput = append(output, data)\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "a8a53bbbdf3aaf25ac1daad99b73653b", "score": "0.4257701", "text": "func (query *ContractCallQuery) SetFunction(name string, params *ContractFunctionParameters) *ContractCallQuery {\n\tif params == nil {\n\t\tparams = NewContractFunctionParameters()\n\t}\n\n\tquery.functionParameters = params._Build(&name)\n\treturn query\n}", "title": "" }, { "docid": "5a788ffbc443dd647c8644b87b8b0f09", "score": "0.42476273", "text": "func (c *Cursor) UnregisterFilterFunc(name string) {\n\tfor idx, n := range c.filterOrder {\n\t\tif n == name {\n\t\t\tc.filterOrder = append(c.filterOrder[:idx], c.filterOrder[idx+1:]...)\n\t\t\tdelete(c.filters, name)\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "51c72aabdb9f1a40826167ca0dfbad8f", "score": "0.42443475", "text": "func Filter(pattern string, f func(*Context)) {\n\tmListFilter = append(mListFilter, patternFunc{\n\t\tPattern: pattern,\n\t\tHandleFunc: f,\n\t})\n}", "title": "" }, { "docid": "d0a4efcbb876e88465eb50587cc4a615", "score": "0.42397308", "text": "func CreateFilterFuncs(ctx context.Context, r *adapter.LocalRuntime, filters []string, img *adapter.ContainerImage) ([]imagefilters.ResultFilter, error) {\n\tvar filterFuncs []imagefilters.ResultFilter\n\tfor _, filter := range filters {\n\t\tsplitFilter := strings.Split(filter, \"=\")\n\t\tif len(splitFilter) < 2 {\n\t\t\treturn nil, errors.Errorf(\"invalid filter syntax %s\", filter)\n\t\t}\n\t\tswitch splitFilter[0] {\n\t\tcase \"before\":\n\t\t\tbefore, err := r.NewImageFromLocal(splitFilter[1])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrapf(err, \"unable to find image %s in local stores\", splitFilter[1])\n\t\t\t}\n\t\t\tfilterFuncs = append(filterFuncs, imagefilters.CreatedBeforeFilter(before.Created()))\n\t\tcase \"after\":\n\t\t\tafter, err := r.NewImageFromLocal(splitFilter[1])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrapf(err, \"unable to find image %s in local stores\", splitFilter[1])\n\t\t\t}\n\t\t\tfilterFuncs = append(filterFuncs, imagefilters.CreatedAfterFilter(after.Created()))\n\t\tcase \"readonly\":\n\t\t\treadonly, err := strconv.ParseBool(splitFilter[1])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrapf(err, \"invalid filter readonly=%s\", splitFilter[1])\n\t\t\t}\n\t\t\tfilterFuncs = append(filterFuncs, imagefilters.ReadOnlyFilter(readonly))\n\t\tcase \"dangling\":\n\t\t\tdanglingImages, err := strconv.ParseBool(splitFilter[1])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrapf(err, \"invalid filter dangling=%s\", splitFilter[1])\n\t\t\t}\n\t\t\tfilterFuncs = append(filterFuncs, imagefilters.DanglingFilter(danglingImages))\n\t\tcase \"label\":\n\t\t\tlabelFilter := strings.Join(splitFilter[1:], \"=\")\n\t\t\tfilterFuncs = append(filterFuncs, imagefilters.LabelFilter(ctx, labelFilter))\n\t\tcase \"reference\":\n\t\t\treferenceFilter := strings.Join(splitFilter[1:], \"=\")\n\t\t\tfilterFuncs = append(filterFuncs, imagefilters.ReferenceFilter(ctx, referenceFilter))\n\t\tdefault:\n\t\t\treturn nil, errors.Errorf(\"invalid filter %s \", splitFilter[0])\n\t\t}\n\t}\n\tif img != nil {\n\t\tfilterFuncs = append(filterFuncs, imagefilters.OutputImageFilter(img))\n\t}\n\treturn filterFuncs, nil\n}", "title": "" }, { "docid": "cdd7ea338138876d5311fa5b731708e8", "score": "0.4232979", "text": "func (o ConnectorMappingPropertiesOutput) FileFilter() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ConnectorMappingProperties) *string { return v.FileFilter }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "1663fb822eb5b5db1e383cd2e05e85dd", "score": "0.422501", "text": "func (f *Filter) FilterByProfileName(ctx interfaces.AppFunctionContext, data interface{}) (continuePipeline bool, result interface{}) {\n\tf.ctx = ctx\n\tevent, err := f.setupForFiltering(\"FilterByProfileName\", \"ProfileName\", ctx.LoggingClient(), data)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tok := f.doEventFilter(\"ProfileName\", event.ProfileName, ctx.LoggingClient())\n\tif ok {\n\t\treturn true, *event\n\t}\n\n\treturn false, nil\n\n}", "title": "" }, { "docid": "4b469ebaf19a447a875d711be2bb04cb", "score": "0.42221275", "text": "func Filter(vs []string, f func(string) bool) []string {\n\tvsf := make([]string, 0)\n\tfor _, v := range vs {\n\t\tif f(v) {\n\t\t\tvsf = append(vsf, v)\n\t\t}\n\t}\n\treturn vsf\n}", "title": "" }, { "docid": "2be84425531120ffba8a64c22ef1938c", "score": "0.42189205", "text": "func (transaction *ContractExecuteTransaction) SetFunction(name string, params *ContractFunctionParameters) *ContractExecuteTransaction {\n\ttransaction.requireNotFrozen()\n\tif params == nil {\n\t\tparams = NewContractFunctionParameters()\n\t}\n\n\ttransaction.pb.FunctionParameters = params.build(&name)\n\treturn transaction\n}", "title": "" }, { "docid": "f210a975580545ffce342a0f7824fe1f", "score": "0.42106497", "text": "func (p *Process) Match(filter *Process) bool {\n\tif p == nil {\n\t\treturn false\n\t}\n\tif filter == nil {\n\t\treturn true\n\t}\n\tif filter.Protocol != \"\" {\n\t\tif p.Protocol != filter.Protocol {\n\t\t\treturn false\n\t\t}\n\t}\n\t// matches the suffix\n\tif filter.Program != \"\" {\n\t\tif !strings.HasSuffix(p.Program, filter.Program) {\n\t\t\treturn false\n\t\t}\n\t}\n\tif filter.PID != 0 {\n\t\tif p.PID != filter.PID {\n\t\t\treturn false\n\t\t}\n\t}\n\tif filter.LocalIP != \"\" {\n\t\tif p.LocalIP != filter.LocalIP {\n\t\t\treturn false\n\t\t}\n\t}\n\tif filter.LocalPort != \"\" {\n\t\tp0 := p.LocalPort\n\t\tif !strings.HasPrefix(p0, \":\") {\n\t\t\tp0 = \":\" + p0\n\t\t}\n\t\tp1 := filter.LocalPort\n\t\tif !strings.HasPrefix(p1, \":\") {\n\t\t\tp1 = \":\" + p1\n\t\t}\n\t\tif p0 != p1 {\n\t\t\treturn false\n\t\t}\n\t}\n\tif filter.RemoteIP != \"\" {\n\t\tif p.RemoteIP != filter.RemoteIP {\n\t\t\treturn false\n\t\t}\n\t}\n\tif filter.RemotePort != \"\" {\n\t\tp0 := p.RemotePort\n\t\tif !strings.HasPrefix(p0, \":\") {\n\t\t\tp0 = \":\" + p0\n\t\t}\n\t\tp1 := filter.RemotePort\n\t\tif !strings.HasPrefix(p1, \":\") {\n\t\t\tp1 = \":\" + p1\n\t\t}\n\t\tif p0 != p1 {\n\t\t\treturn false\n\t\t}\n\t}\n\tif filter.State != \"\" {\n\t\tif p.State != filter.State {\n\t\t\treturn false\n\t\t}\n\t}\n\t// currently only support user name\n\tif filter.User.Username != \"\" {\n\t\tif p.User.Username != filter.User.Username {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "0bbad79aeba913855d68ca5b3f6e2407", "score": "0.42042547", "text": "func Filter(vs []string, f func(string) bool) []string {\n\tvsm := make([]string, 0, len(vs))\n\tfor _, v := range vs {\n\t\tif f(v) {\n\t\t\tvsm = append(vsm, v)\n\t\t}\n\t}\n\treturn vsm\n}", "title": "" }, { "docid": "fe4f2bd3c6f68f4ecdb733af8c30d3f6", "score": "0.41928375", "text": "func exportFilter(name string) bool {\n\treturn IsExported(name)\n}", "title": "" }, { "docid": "3384a92f9d786393b5421b214b5fa717", "score": "0.41886768", "text": "func (r RunFns) getFunctionsFromFunctionPaths() ([]kio.Filter, error) {\n\tbuff := &kio.PackageBuffer{}\n\tfor i := range r.FunctionPaths {\n\t\terr := kio.Pipeline{\n\t\t\tInputs: []kio.Reader{\n\t\t\t\tkio.LocalPackageReader{PackagePath: r.FunctionPaths[i]},\n\t\t\t},\n\t\t\tOutputs: []kio.Writer{buff},\n\t\t}.Execute()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn r.getFunctionFilters(true, buff.Nodes...)\n}", "title": "" }, { "docid": "f6e972063a5d04ef04323d48bd1c3691", "score": "0.41858727", "text": "func (m *MetricEventStringDimensions) SetFilterType(val string) {\n\n}", "title": "" }, { "docid": "348b63ae869fa7bdb13876a6ca2208a0", "score": "0.41668168", "text": "func Filter(filterFn func(message proto.Message) bool) Option {\n\treturn listinternal.FuncOption(func(options *listinternal.Options) {\n\t\toptions.Filter = filterFn\n\t})\n}", "title": "" }, { "docid": "80cb2e3e5f17518c704321dba6338a57", "score": "0.41654512", "text": "func functionName(programCounter uintptr) []byte {\n\tfunction := runtime.FuncForPC(programCounter)\n\tif function == nil {\n\t\treturn unknown\n\t}\n\tname := []byte(function.Name())\n\n\t// Because we provide the filename we can drop the preceding package name.\n\tif lastslash := bytes.LastIndex(name, []byte(\"/\")); lastslash >= 0 {\n\t\tname = name[lastslash+1:]\n\t}\n\t// And the current package name.\n\tif period := bytes.Index(name, []byte(\".\")); period >= 0 {\n\t\tname = name[period+1:]\n\t}\n\t// And we should just replace the interpunct with a dot\n\tname = bytes.ReplaceAll(name, []byte(\"·\"), []byte(\".\"))\n\treturn name\n}", "title": "" }, { "docid": "702cfdae49efda7f2120aa518dc30e93", "score": "0.41493198", "text": "func RemoveFunctionFromPackage(cxprogram *cxast.CXProgram, packageName, functionName string) error {\n\tpkg, err := FindPackage(cxprogram, packageName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tpkg.RemoveFunction(functionName)\n\n\treturn nil\n}", "title": "" }, { "docid": "0b3b01c12c63ca18f4dc578c38961bfc", "score": "0.4136742", "text": "func FilterCode(code Code) string {\n\tswitch code {\n\tcase ErrCanceled:\n\t\treturn string(code)\n\tcase ErrUnauthorized:\n\t\treturn string(code)\n\tcase ErrInvalidInput:\n\t\treturn string(code)\n\tcase ErrWrongCreds:\n\t\treturn string(code)\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "2504aa5644e8720f4bac1a7c236eedbe", "score": "0.4131756", "text": "func (w *VMWriter) WriteFunction(name string, nLocals int) {\n\tw.write(fmt.Sprintf(\"function %s %d\", name, nLocals))\n}", "title": "" }, { "docid": "580cecc258dc04bba6566df0b401c453", "score": "0.41246238", "text": "func (f *filter) Filter(e *pb.Message) (*pb.Message, error) {\n\t// doing this in reverse to keep order of the middleware.\n\tfor i := len(f.funcs) - 1; i >= 0; i-- {\n\t\tvar err error\n\t\t// we track the error from the relevant func.\n\t\t// the last that errors and the error gets passed along\n\t\te, err = f.funcs[i](e)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(errors.Cause(err), \"filter\")\n\t\t}\n\t}\n\n\treturn e, nil\n}", "title": "" }, { "docid": "27d8eb5e7fbff198da66562e56215dee", "score": "0.41186503", "text": "func functionName(depth int) string {\n\tpc := make([]uintptr, 1)\n\truntime.Callers(depth+2, pc)\n\tf := runtime.FuncForPC(pc[0])\n\treturn functionNameRegex.FindString(f.Name())\n}", "title": "" }, { "docid": "ce3a434d6283164dc9bf9389cbfceea6", "score": "0.41147405", "text": "func (c MethodsCollection) PostProcessFilters() pPostProcessFilters {\n\treturn pPostProcessFilters{\n\t\tMethod: c.MustGet(\"PostProcessFilters\"),\n\t}\n}", "title": "" }, { "docid": "218e6d61a7fe1724a8118330f1af896c", "score": "0.41117603", "text": "func (p DoltDatabaseProvider) Function(_ *sql.Context, name string) (sql.Function, error) {\n\tfn, ok := p.functions[strings.ToLower(name)]\n\tif !ok {\n\t\treturn nil, sql.ErrFunctionNotFound.New(name)\n\t}\n\treturn fn, nil\n}", "title": "" }, { "docid": "950a39cd5fd2fcec53d90097ef9a62e6", "score": "0.41104433", "text": "func Filter(vs []string, f func(string) bool) []string {\n\tvsr := make([]string, 0)\n\tfor _, v := range vs {\n\t\tif f(v) {\n\t\t\tvsr = append(vsr, v)\n\t\t}\n\t}\n\treturn vsr\n}", "title": "" }, { "docid": "1cbbb84a0828df9d90ad7ee9657fa8e8", "score": "0.4106102", "text": "func (c *Configuration) Function(name string) *CommandSequence {\n\tif c.Functions == nil {\n\t\tc.Functions = make(map[string]*CommandSequence)\n\t}\n\n\tseq, ok := c.Functions[name]\n\tif ok {\n\t\treturn seq\n\t}\n\n\tseq = new(CommandSequence)\n\tc.Functions[name] = seq\n\treturn seq\n}", "title": "" }, { "docid": "7ec16926a88c725d1efd0925e1f683ab", "score": "0.409908", "text": "func GetFilterForTransactionScript(block *types.Block, utxoUsed map[types.OutPoint]*types.UtxoWrap) bloom.Filter {\n\tvar vin, vout [][]byte\n\tfor _, tx := range block.Txs {\n\t\tfor _, out := range tx.Vout {\n\t\t\tvout = append(vout, out.ScriptPubKey)\n\t\t}\n\t}\n\tfor _, utxo := range utxoUsed {\n\t\tif utxo != nil && utxo.Output != nil {\n\t\t\tvin = append(vin, utxo.Output.ScriptPubKey)\n\t\t}\n\t}\n\tfilter := bloom.NewFilter(uint32(len(vin)+len(vout)+1), 0.0001)\n\tfor _, scriptBytes := range vin {\n\t\tfilter.Add(scriptBytes)\n\t}\n\tfor _, scriptBytes := range vout {\n\t\tscriptPubKey := script.NewScriptFromBytes(scriptBytes)\n\t\tif scriptPubKey.IsTokenIssue() || scriptPubKey.IsTokenTransfer() {\n\t\t\t// token output: only store the p2pkh prefix part so we can retrieve it later\n\t\t\tscriptBytes = *scriptPubKey.P2PKHScriptPrefix()\n\t\t}\n\t\tfilter.Add(scriptBytes)\n\t}\n\tlogger.Debugf(\"Create Block filter with %d inputs and %d outputs\", len(vin), len(vout))\n\treturn filter\n}", "title": "" }, { "docid": "9589bb95855ba828109164d44234dd3c", "score": "0.4089352", "text": "func LogTracePipeFilter(t *testing.T, filterFn func(ev *TraceEvent) bool) {\n\tlogTracePipe(t, filterFn)\n}", "title": "" }, { "docid": "d417409fcf27596ad17e6a98d312a385", "score": "0.40866935", "text": "func (env *Environment) getFunctionByName(FuncName string) func(*Process, []string) {\n\tFunc, ok := env.FunctionsMap[FuncName]\n\tif !ok {\n\t\tpanic(fmt.Sprintf(\"No such registered function %s\", FuncName))\n\t}\n\treturn Func\n}", "title": "" }, { "docid": "3f576527dedc611dec28c2f57107cffa", "score": "0.40768313", "text": "func NetworkingFilter(fn FilterRequestFunc) {\r\n\tmutex.Lock()\r\n\tdefer mutex.Unlock()\r\n\tconfig.NetworkingFilters = append(config.NetworkingFilters, fn)\r\n}", "title": "" }, { "docid": "6e35f6aad42ace5d27274901fe2e1442", "score": "0.4075347", "text": "func (r RunFns) runFunctions(\n\tinput kio.Reader, output kio.Writer, fltrs []kio.Filter) error {\n\t// use the previously read Resources as input\n\tvar outputs []kio.Writer\n\tif r.Output == nil {\n\t\t// write back to the package\n\t\toutputs = append(outputs, output)\n\t} else {\n\t\t// write to the output instead of the directory if r.Output is specified or\n\t\t// the output is nil (reading from Input)\n\t\toutputs = append(outputs, kio.ByteWriter{Writer: r.Output})\n\t}\n\n\tvar err error\n\tpipeline := kio.Pipeline{\n\t\tInputs: []kio.Reader{input},\n\t\tFilters: fltrs,\n\t\tOutputs: outputs,\n\t\tContinueOnEmptyResult: r.ContinueOnEmptyResult,\n\t}\n\tif r.LogSteps {\n\t\terr = pipeline.ExecuteWithCallback(func(op kio.Filter) {\n\t\t\tvar identifier string\n\n\t\t\tswitch filter := op.(type) {\n\t\t\tcase *container.Filter:\n\t\t\t\tidentifier = filter.Image\n\t\t\tcase *exec.Filter:\n\t\t\t\tidentifier = filter.Path\n\t\t\tcase *starlark.Filter:\n\t\t\t\tidentifier = filter.String()\n\t\t\tdefault:\n\t\t\t\tidentifier = \"unknown-type function\"\n\t\t\t}\n\n\t\t\t_, _ = fmt.Fprintf(r.LogWriter, \"Running %s\\n\", identifier)\n\t\t})\n\t} else {\n\t\terr = pipeline.Execute()\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// check for deferred function errors\n\tvar errs []string\n\tfor i := range fltrs {\n\t\tcf, ok := fltrs[i].(runtimeutil.DeferFailureFunction)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tif cf.GetExit() != nil {\n\t\t\terrs = append(errs, cf.GetExit().Error())\n\t\t}\n\t}\n\tif len(errs) > 0 {\n\t\treturn fmt.Errorf(strings.Join(errs, \"\\n---\\n\"))\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "11a4abf070f39183025220514b66f3df", "score": "0.40688035", "text": "func productFilter(filter *v1.AggregateFilter) []string {\n\tif filter == nil || len(filter.Filters) == 0 {\n\t\treturn nil\n\t}\n\tsort.Sort(filter)\n\tfilters := make([]string, 0, len(filter.Filters))\n\tfor _, f := range filter.Filters {\n\t\tpred, err := searchKeyForProduct(v1.ProductSearchKey(f.Key()))\n\t\tif err != nil {\n\t\t\tlogger.Log.Error(\"dgraph - productFilter - \", zap.String(\"reason\", err.Error()))\n\t\t\tcontinue\n\t\t}\n\t\tswitch pred {\n\t\tcase prodPredSwidTag, prodPredName, prodPredEditor:\n\t\t\tfilters = append(filters, stringFilter(pred.String(), f))\n\t\t}\n\t}\n\treturn filters\n}", "title": "" }, { "docid": "ba544905aab86ef1ba2b69ca81efc32c", "score": "0.40685827", "text": "func (c APIClient) ListJobFilterF(pipelineName string, inputCommit []*pfs.Commit,\n\thistory int64, details bool, jqFilter string,\n\tf func(*pps.JobInfo) error) error {\n\treturn c.ListProjectJobFilterF(pfs.DefaultProjectName, pipelineName, inputCommit, history, details, jqFilter, f)\n}", "title": "" }, { "docid": "e6bb9f9d30974fe1da11c886519101a5", "score": "0.4064675", "text": "func NetworkingFilter(fn FilterRequestFunc) {\n\tmutex.Lock()\n\tdefer mutex.Unlock()\n\tconfig.NetworkingFilters = append(config.NetworkingFilters, fn)\n}", "title": "" }, { "docid": "0ab202b075d6bb24b6b69a9d02dff9aa", "score": "0.40611613", "text": "func FilterName(name string) ast.Filter {\n\treturn func(in string) bool {\n\t\treturn name == in\n\t}\n}", "title": "" }, { "docid": "f57ba2b091b5448858f89e0ca5b1834e", "score": "0.40533268", "text": "func (transaction *ContractExecuteTransaction) SetFunctionParameters(params []byte) *ContractExecuteTransaction {\n\ttransaction.requireNotFrozen()\n\ttransaction.pb.FunctionParameters = params\n\treturn transaction\n}", "title": "" }, { "docid": "aba5b5767c1df207d4859c3ad752cda8", "score": "0.40452388", "text": "func (st *worker) SetExternalFeedbackFunction(fn workerFeedbackFunc) {\n\tst.externalFeedbackFunc = fn\n}", "title": "" }, { "docid": "af27818f7852a217a058cbe926e85d15", "score": "0.40365633", "text": "func (o PermissionOutput) Function() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Permission) pulumi.StringOutput { return v.Function }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "a91f4ddb27adb3309fc8340229ab52cd", "score": "0.4034487", "text": "func NewScriptFunction(script *Script) *ScriptFunction {\n\treturn &ScriptFunction{\n\t\tscript: script,\n\t}\n}", "title": "" }, { "docid": "8817bf3c86946685be212ec5f8142327", "score": "0.40265688", "text": "func FilterRemove(txID utxo.TransactionID) {\n\tif filterEnabled {\n\t\tdoubleSpendFilter.Remove(txID)\n\t}\n}", "title": "" }, { "docid": "988c26eac92f8b0edd4de80ed91bfdb1", "score": "0.40264636", "text": "func GetFunctionName(function http.HandlerFunc) string {\n\treturn runtime.FuncForPC(reflect.ValueOf(function).Pointer()).Name()\n}", "title": "" }, { "docid": "b02c826f67863dac89a8132a71473981", "score": "0.40195212", "text": "func (p *Program) Function(pname, fname string) (*ssa.Function, error) {\n\tpkg, err := p.Package(pname)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tmem, ok := pkg.Members[fname]\n\tif !ok {\n\t\treturn nil, errors.New(\"function \" + fname + \" not found\")\n\t}\n\tf, ok := mem.(*ssa.Function)\n\tif !ok {\n\t\treturn nil, errors.New(\"member \" + fname + \" is not a function\")\n\t}\n\treturn f, nil\n}", "title": "" }, { "docid": "178ba2f2a6c0a59fc474d5b732fb698e", "score": "0.40097484", "text": "func ValidateFunctionName(name string) error {\n\n\tif errs := validation.IsDNS1123Label(name); len(errs) > 0 {\n\t\t// In case of invalid name the error is this:\n\t\t//\t\"a DNS-1123 label must consist of lower case alphanumeric characters or '-',\n\t\t// and must start and end with an alphanumeric character (e.g. 'my-name',\n\t\t// or '123-abc', regex used for validation is '[a-z0-9]([-a-z0-9]*[a-z0-9])?')\"\n\t\t// Let's reuse it for our purposes, ie. replace \"DNS-1123 label\" substring with \"function name\"\n\t\treturn errors.New(strings.Replace(strings.Join(errs, \"\"), \"a DNS-1123 label\", \"Function name\", 1))\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "01c33a0afc0c735b6c8e220ab4bd3ce9", "score": "0.40077507", "text": "func FunctionName(fn interface{}) string {\n\tt := reflect.ValueOf(fn).Type()\n\tif t.Kind() == reflect.Func {\n\t\treturn runtime.FuncForPC(reflect.ValueOf(fn).Pointer()).Name()\n\t}\n\treturn t.String()\n}", "title": "" }, { "docid": "8209aa6cb727fe834329e824a29f032e", "score": "0.39964136", "text": "func CleanFilter(filter string) string {\n\tfilter = strings.ReplaceAll(filter, `\\n`, \"\")\n\tre, _ := regexp.Compile(`\\s+\\(`)\n\tfilter = re.ReplaceAllLiteralString(filter, \"(\")\n\tre, _ = regexp.Compile(`\\)\\s+`)\n\tfilter = re.ReplaceAllLiteralString(filter, \")\")\n\treturn filter\n}", "title": "" }, { "docid": "14a7faa01b6c80dfaf265427168783f4", "score": "0.39906678", "text": "func RegisterFunction(code string, parameterCount int, functionDefinition func(vars ...float64) float64) {\n\t//If function is not defined, do not add new function\n\tif functionDefinition == nil {\n\t\treturn\n\t}\n\n\tnewFunction := function{code, parameterCount, functionDefinition}\n\tfunctions = append(functions, newFunction)\n}", "title": "" }, { "docid": "3e686b2f5082ed60d5f748099e766a50", "score": "0.39886382", "text": "func (t *TileDefRequest) SetTextFilter(v string) {\n\tt.TextFilter = &v\n}", "title": "" } ]
4da7868b9910e2d0d3ccb533b0f2cee3
MarshalJSON marshals this object to a JSON structure
[ { "docid": "ee32526181e45d738171f24672f1fb9a", "score": "0.0", "text": "func (m StandbyPowerSwitch) MarshalJSON() ([]byte, error) {\n\t_parts := make([][]byte, 0, 3)\n\n\taO0, err := swag.WriteJSON(m.Switch)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t_parts = append(_parts, aO0)\n\n\taO1, err := swag.WriteJSON(m.StandbyPowerSwitchAllOf1)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t_parts = append(_parts, aO1)\n\n\taO2, err := swag.WriteJSON(m.StandbyPowerSwitchAllOf2)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t_parts = append(_parts, aO2)\n\treturn swag.ConcatJSON(_parts...), nil\n}", "title": "" } ]
[ { "docid": "18ef54518d7f047f9319176f603a3e4f", "score": "0.6763778", "text": "func (v Goods) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonF3f49c6fEncodeSupermallModel(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "78631571c6da90007c353f33eb3ed89e", "score": "0.6749198", "text": "func (v Good) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonF3f49c6fEncodeSupermallModel1(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "2da94b3a6019c953647e96b3c9b870a1", "score": "0.67152804", "text": "func (v PostFull) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonD2b7633eEncodeGithubComArtbakulevTechdbAppModels12(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "608e1966193ee7d343b7d04e3c63e0d9", "score": "0.6676493", "text": "func (v CopyToParams) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonC5a4559bEncodeGithubComKnqChromedpCdpDom78(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "dc9eec03d3b5beb773b8df8c82b97ded", "score": "0.66673434", "text": "func (ng NatGateway)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n if(ng.Sku != nil) {\n objectMap[\"sku\"] = ng.Sku\n }\n if(ng.NatGatewayPropertiesFormat != nil) {\n objectMap[\"properties\"] = ng.NatGatewayPropertiesFormat\n }\n if(ng.Zones != nil) {\n objectMap[\"zones\"] = ng.Zones\n }\n if(ng.ID != nil) {\n objectMap[\"id\"] = ng.ID\n }\n if(ng.Location != nil) {\n objectMap[\"location\"] = ng.Location\n }\n if(ng.Tags != nil) {\n objectMap[\"tags\"] = ng.Tags\n }\n return json.Marshal(objectMap)\n }", "title": "" }, { "docid": "85e14ccca08bfd0e00e610b1e7ee6d41", "score": "0.6637102", "text": "func (v PostFull) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson6601e8cdEncodeGithubComOlegSchwann2chApiTypes4(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "ede95d8b2a52015c8734420e8f343d19", "score": "0.66346097", "text": "func Marshal(j interface{}) ([]byte, error) {\n\treturn json.Marshal(j)\n}", "title": "" }, { "docid": "610cdaf7ecd3bae1cf3aca5e6760e0ee", "score": "0.6633889", "text": "func (v Post) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson6aa74c22EncodeGoTpDbModels3(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "6b1a2499dd0e41d02280f594c08ed374", "score": "0.6623453", "text": "func (s SecretCreateOrUpdateParameters) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"properties\", s.Properties)\n\tpopulate(objectMap, \"tags\", s.Tags)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "0ab070c8931cb51a2f0c7c9eac62c32b", "score": "0.6613701", "text": "func (v Posts) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson6aa74c22EncodeGoTpDbModels(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "2923572acb49f078b2c70b92677b9c9b", "score": "0.66081", "text": "func (v PostNew) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonD2b7633eEncodeGithubComArtbakulevTechdbAppModels11(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "860dd29d4e153f63a2d6fe366631b7ea", "score": "0.6605296", "text": "func (bf bloom32) JSONMarshal() []byte {\n\tbloomImEx := bloomJSONImExport{}\n\tbloomImEx.SetLocs = uint64(bf.setLocs)\n\tbloomImEx.FilterSet = ToBytes(&(bf.boolSet))\n\tdata, err := json.Marshal(bloomImEx)\n\tif err != nil {\n\t\tlog.Fatal(\"json.Marshal failed: \", err)\n\t}\n\treturn data\n}", "title": "" }, { "docid": "b766c31552a87e99c65255451a431b71", "score": "0.6584176", "text": "func (s SQLPool) MarshalJSON() ([]byte, error) {\n\tobjectMap := s.TrackedResource.marshalInternal()\n\tpopulate(objectMap, \"properties\", s.Properties)\n\tpopulate(objectMap, \"sku\", s.SKU)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "2148105511d9dcb6da78c9bdce4f5288", "score": "0.65656555", "text": "func (toVar TagsObject)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n if(toVar.Tags != nil) {\n objectMap[\"tags\"] = toVar.Tags\n }\n return json.Marshal(objectMap)\n }", "title": "" }, { "docid": "0c59ab4061c9eaa0f584fc6c2eb0d716", "score": "0.65493655", "text": "func (e EventHubEventSourceMutableProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"sharedAccessKey\", e.SharedAccessKey)\n\tpopulate(objectMap, \"timestampPropertyName\", e.TimestampPropertyName)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "21e999e89c4f76504e80b117fa4152d8", "score": "0.6538727", "text": "func (v Post) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonD2b7633eEncodeGithubComArtbakulevTechdbAppModels13(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "100b7f89457faa68e89099db67038c4f", "score": "0.6528104", "text": "func (vnu VirtualNetworkUsage)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n return json.Marshal(objectMap)\n }", "title": "" }, { "docid": "55cc7cc9ae8b3acd1870d063a060e82c", "score": "0.6527042", "text": "func (i IoTHubEventSourceMutableProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"sharedAccessKey\", i.SharedAccessKey)\n\tpopulate(objectMap, \"timestampPropertyName\", i.TimestampPropertyName)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "77a0da2e965ea54a4e5cbb041d68f9b7", "score": "0.65244585", "text": "func (p Process) MarshalJSON() ([]byte, error) {\n\tp.Kind = KindProcess\n\ttype Alias Process\n\treturn json.Marshal(&struct {\n\t\tAlias\n\t}{\n\t\tAlias: (Alias)(p),\n\t})\n}", "title": "" }, { "docid": "7b5c97bb6d4c22aa2ad134900743c919", "score": "0.65181404", "text": "func (v Paper) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonA37a3d7eEncodeMainInternalModels2(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "a23ef3d18bb5a8b87e81735df3f84b31", "score": "0.651696", "text": "func (b BaseModel) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"id\", b.ID)\n\tobjectMap[\"kind\"] = \"Series8000\"\n\tpopulate(objectMap, \"name\", b.Name)\n\tpopulate(objectMap, \"type\", b.Type)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "86a38d8e84ab16185e7eab4f88f5378e", "score": "0.64966774", "text": "func (v AllSkins) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonD2b7633eEncodeApiModels14(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "342a33a489a34bdccc429bb1d21f5c50", "score": "0.6493018", "text": "func (v PostFull) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonD2b7633eEncodeAForumInternalModels8(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "659b57a76be46e02789d524fd2b4709f", "score": "0.6484677", "text": "func (s Scan) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif s.Properties != nil {\n\t\tobjectMap[\"properties\"] = s.Properties\n\t}\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "bfecb71d74fb892c5dc96c2822e41f0d", "score": "0.64842194", "text": "func (v Post) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson6601e8cdEncodeGithubComOlegSchwann2chApiTypes11(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "8615f89281539b360c3ab5059cb08cb8", "score": "0.64814717", "text": "func (s SyncMember) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"id\", s.ID)\n\tpopulate(objectMap, \"name\", s.Name)\n\tpopulate(objectMap, \"properties\", s.Properties)\n\tpopulate(objectMap, \"type\", s.Type)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "4865e7dda8f709f5f25512cb03a70fab", "score": "0.64795953", "text": "func (v PostFullOutput) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson5a72dc82EncodeDbforumDomainEntity2(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "34317e50974e80d7f8c13ffdbdf7a8ce", "score": "0.6478988", "text": "func (j *JSON) Marshal(v interface{}) ([]byte, error) {\n\treturn json.Marshal(v)\n}", "title": "" }, { "docid": "769275e877388db316351006334d25ba", "score": "0.64765877", "text": "func (sfsp SyncFullSchemaProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "2100f033f18ec0a0a657e3c7c4480c12", "score": "0.6475888", "text": "func (v Valuable) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonA37a3d7eEncodeMainInternalModels(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "e235c43c31275b44d82315549b9df922", "score": "0.6471924", "text": "func (this *Simple) MarshalJSON() ([]byte, error) {\n\tstr, err := TypesMarshaler.MarshalToString(this)\n\treturn []byte(str), err\n}", "title": "" }, { "docid": "00b014859292c7fc0a6a6dfd5769deaf", "score": "0.64699626", "text": "func (epdap ElasticPoolDatabaseActivityProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "c56282f9bbf8222a6efa57d67909c976", "score": "0.64659977", "text": "func (s SharedGalleryImageVersion) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"identifier\", s.Identifier)\n\tpopulate(objectMap, \"location\", s.Location)\n\tpopulate(objectMap, \"name\", s.Name)\n\tpopulate(objectMap, \"properties\", s.Properties)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "37529393c2942447515d8bcdcd8f8344", "score": "0.64625585", "text": "func (sp *SnappedPoint) MarshalJSON() ([]byte, error) {\n\tx := encodedSnappedPoint{}\n\tx.safeSnappedPoint = safeSnappedPoint(*sp)\n\n\tx.EncLocation.Latitude = sp.Location.Lat\n\tx.EncLocation.Longitude = sp.Location.Lng\n\n\treturn json.Marshal(x)\n}", "title": "" }, { "docid": "bc3f7e7df13777d4ca2f03f19e02a700", "score": "0.64619404", "text": "func (v PostDetail) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson6aa74c22EncodeGoTpDbModels2(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "a2c6fdc9333fbbb78b7190b912b99ee8", "score": "0.6460467", "text": "func (c CosmosDbMongoDbAPISink) MarshalJSON() ([]byte, error) {\n\tobjectMap := c.CopySink.marshalInternal(\"CosmosDbMongoDbApiSink\")\n\tpopulate(objectMap, \"writeBehavior\", c.WriteBehavior)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "774851ee26865e9db832d963ef52448a", "score": "0.645731", "text": "func (vnp VirtualNetworkPeering)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n if(vnp.VirtualNetworkPeeringPropertiesFormat != nil) {\n objectMap[\"properties\"] = vnp.VirtualNetworkPeeringPropertiesFormat\n }\n if(vnp.Name != nil) {\n objectMap[\"name\"] = vnp.Name\n }\n if(vnp.Type != nil) {\n objectMap[\"type\"] = vnp.Type\n }\n if(vnp.ID != nil) {\n objectMap[\"id\"] = vnp.ID\n }\n return json.Marshal(objectMap)\n }", "title": "" }, { "docid": "4a308e72eedde52393887b4d8542d2dd", "score": "0.6456992", "text": "func (v Ping) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonFa90ddaeEncodeGithubComAsmyasnikovGoMavlinkMavlinkDialectsUAvionix45(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "21e7841148da1af72d9f8e9a20ea222f", "score": "0.64541453", "text": "func (v Ping) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonFa90ddaeEncodeGithubComAsmyasnikovGoMavlinkMavlinkDialectsPaparazzi50(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "0f3f32003c122c30d3e7c23aa810d0a4", "score": "0.6452267", "text": "func (o TeflonObject) MarshalJSON() ([]byte, error) {\n\tm := map[string]interface{}{\n\t\t\"Show\": o.Show.GetPath(),\n\t\t\"Path\": o.Path,\n\t\t\"Parent\": o.Parent.GetPath(),\n\t\t\"FileInfo\": o.FileInfo,\n\t\t\"ShowRoot\": o.ShowRoot,\n\t\t\"Contract\": o.Contract,\n\t\t\"Instances\": o.Instances,\n\t}\n\n\tfor k, v := range o.UserData {\n\t\tm[k] = v\n\t}\n\n\treturn json.Marshal(m)\n}", "title": "" }, { "docid": "844a1d4eab3665ae1500a41eb1127804", "score": "0.6451392", "text": "func (bf bloom64) JSONMarshal() []byte {\n\tbloomImEx := bloomJSONImExport{}\n\tbloomImEx.SetLocs = bf.setLocs\n\tbloomImEx.FilterSet = ToBytes(&(bf.boolSet))\n\tdata, err := json.Marshal(bloomImEx)\n\tif err != nil {\n\t\tlog.Fatal(\"json.Marshal failed: \", err)\n\t}\n\treturn data\n}", "title": "" }, { "docid": "1ff33ab71a3aa338fa4198607e8277f0", "score": "0.64472693", "text": "func (m ManagedInstanceUpdate) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"identity\", m.Identity)\n\tpopulate(objectMap, \"properties\", m.Properties)\n\tpopulate(objectMap, \"sku\", m.SKU)\n\tpopulate(objectMap, \"tags\", m.Tags)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "387925a3734b1b620c7bfe7b386b02ce", "score": "0.6447183", "text": "func (entry *Entry) MarshalJSON() ([] byte, error) {\n\n object := entry.convertToObject(true)\n description, err := json.Marshal(object)\n return description, err\n}", "title": "" }, { "docid": "1c50e55966db89ca63456d8ae97cb59c", "score": "0.64467615", "text": "func (mi ManagedInstance) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif mi.Identity != nil {\n\t\tobjectMap[\"identity\"] = mi.Identity\n\t}\n\tif mi.Sku != nil {\n\t\tobjectMap[\"sku\"] = mi.Sku\n\t}\n\tif mi.ManagedInstanceProperties != nil {\n\t\tobjectMap[\"properties\"] = mi.ManagedInstanceProperties\n\t}\n\tif mi.Location != nil {\n\t\tobjectMap[\"location\"] = mi.Location\n\t}\n\tif mi.Tags != nil {\n\t\tobjectMap[\"tags\"] = mi.Tags\n\t}\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "f58e14b027113c7e813020c0f43d6214", "score": "0.6443974", "text": "func (s ServiceFabric) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"id\", s.ID)\n\tpopulate(objectMap, \"location\", s.Location)\n\tpopulate(objectMap, \"name\", s.Name)\n\tpopulate(objectMap, \"properties\", s.Properties)\n\tpopulate(objectMap, \"tags\", s.Tags)\n\tpopulate(objectMap, \"type\", s.Type)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "5000f6f5f79b75f3b021e550c0f51653", "score": "0.64397335", "text": "func (l ListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"settings\", l.Settings)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "17322cf9fa097ae6c7bb41464f956377", "score": "0.6439341", "text": "func (o *Output) MarshalJSON() ([]byte, error) {\n\ttype Alias Output\n\treturn json.Marshal(&struct {\n\t\tCreatedAt int64 `json:\"created_at\"`\n\t\tUpdatedAt int64 `json:\"updated_at\"`\n\t\t*Alias\n\t}{\n\t\tCreatedAt: o.CreatedAt.Unix(),\n\t\tUpdatedAt: o.UpdatedAt.Unix(),\n\t\tAlias: (*Alias)(o),\n\t})\n}", "title": "" }, { "docid": "c99abffdbed270267156f6d0b6dce7f7", "score": "0.6438585", "text": "func (r Resource)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n if(r.ID != nil) {\n objectMap[\"id\"] = r.ID\n }\n if(r.Location != nil) {\n objectMap[\"location\"] = r.Location\n }\n if(r.Tags != nil) {\n objectMap[\"tags\"] = r.Tags\n }\n return json.Marshal(objectMap)\n }", "title": "" }, { "docid": "59a419db3c5d243a0458a39e979129af", "score": "0.6435681", "text": "func (i InstancePoolUpdate) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"tags\", i.Tags)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "e553bbbbbf60a2659d82d13d8eac3467", "score": "0.64342064", "text": "func (p *Package) MarshalJSON() ([]byte, error) {\n\tp.Version = CurrentVersion\n\treturn json.Marshal(*p)\n}", "title": "" }, { "docid": "142654d232c271aeb5fe5701fbff1d8b", "score": "0.6432686", "text": "func (n NotifyParameters) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"eventName\", n.EventName)\n\tpopulate(objectMap, \"jsonPayload\", n.JSONPayload)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "4dc555d923eb15e1664507ecdf93b9e0", "score": "0.6432541", "text": "func (s ProgressUpdate) MarshalJSON() (bs []byte, err error) { return }", "title": "" }, { "docid": "58a270676526324dce158fa65c9240fc", "score": "0.64317775", "text": "func (o ManagedApp) MarshalJSON() ([]byte, error) {\n\ttoSerialize := map[string]interface{}{}\n\tif o.AppAvailability != nil {\n\t\ttoSerialize[\"appAvailability\"] = o.AppAvailability\n\t}\n\tif o.Version == nil {\n\t\tif o.isExplicitNullVersion {\n\t\t\ttoSerialize[\"version\"] = o.Version\n\t\t}\n\t} else {\n\t\ttoSerialize[\"version\"] = o.Version\n\t}\n\treturn json.Marshal(toSerialize)\n}", "title": "" }, { "docid": "d80127b60532a0ce4cd68adb56741ed3", "score": "0.6431759", "text": "func (s SequencerCapn) MarshalJSON() (bs []byte, err error) { return }", "title": "" }, { "docid": "23bfdccf441f3417d097bc0e1f6ca9ec", "score": "0.6431255", "text": "func (v ConnectionJSON) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson85f0d656EncodeGithubComGoParkMailRu20191EscapadeInternalGame17(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "e6b4c3380e1d96b963f0ebd4a23ec970", "score": "0.64272", "text": "func (sm SyncMember) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif sm.SyncMemberProperties != nil {\n\t\tobjectMap[\"properties\"] = sm.SyncMemberProperties\n\t}\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "6fe226045e2f01806bf7d8dbb8bd4e26", "score": "0.6426357", "text": "func (b BaseContainer) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"args\", b.Args)\n\tpopulate(objectMap, \"command\", b.Command)\n\tpopulate(objectMap, \"env\", b.Env)\n\tpopulate(objectMap, \"image\", b.Image)\n\tpopulate(objectMap, \"name\", b.Name)\n\tpopulate(objectMap, \"resources\", b.Resources)\n\tpopulate(objectMap, \"volumeMounts\", b.VolumeMounts)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "55a494d14639ecdb804df015211b7104", "score": "0.6426042", "text": "func (v InitItemUsePayload) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson85f0d656EncodeGithubComGoParkMailRu20191TheRecoveryTeamInternalAppDomainGame9(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "f0ce74c13ff13f4be549ee3d707aa984", "score": "0.64233595", "text": "func (v MapEntry) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson305a78f8EncodeInmemoryStorageInternalAppStorage(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "480f00b14dc7b39778b9466da292c4d2", "score": "0.6423211", "text": "func (s SecretPatchParameters) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"properties\", s.Properties)\n\tpopulate(objectMap, \"tags\", s.Tags)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "2ac2bb6fa5116cb935ed11eba46ca8cc", "score": "0.64208126", "text": "func (v Disignation) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonCef4e921EncodeGithubComElBroomMeteoAppSchema2(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "156cbd341e05a24da57d45fc286e97a6", "score": "0.64199513", "text": "func (v MoveToParams) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonC5a4559bEncodeGithubComKnqChromedpCdpDom31(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "baff3f720f5da9929499917f4344d6d6", "score": "0.6418704", "text": "func (v PostInfo) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonD2b7633eEncodeGithubComForumTPModels5(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "342e230cfa52c4a36b5966f33d8d0498", "score": "0.6414993", "text": "func (v CaptureMetadata) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonB583f6f9EncodeGithubComSkydiveProjectSkydiveFlowProbes2(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "f74855497585e19b23085c0be16213bb", "score": "0.6412228", "text": "func (vnun VirtualNetworkUsageName)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n return json.Marshal(objectMap)\n }", "title": "" }, { "docid": "dd68dff0109b27c032f18d431dc06768", "score": "0.6412196", "text": "func (v Posts) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson6601e8cdEncodeGithubComOlegSchwann2chApiTypes3(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "08cc1c79f32e187f529a89ea61a3473a", "score": "0.64111423", "text": "func (s SQLPoolInfoListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "3f2b2a98dfb007fb8af8b6d3e31db9fa", "score": "0.64074194", "text": "func (v Posts) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonD2b7633eEncodeGithubComArtbakulevTechdbAppModels9(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "74dad79116f36c9715bd63d9f0ad41ad", "score": "0.6405273", "text": "func (asgpf ApplicationSecurityGroupPropertiesFormat)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n return json.Marshal(objectMap)\n }", "title": "" }, { "docid": "1bcb2bf885b9e40803dac535f0030b22", "score": "0.64045995", "text": "func (v Pair) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson56de76c1EncodeGithubComGoParkMailRu20202MVVMGitModelsVacancy6(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "6be1c385c77dde234e3b480007db522c", "score": "0.6398861", "text": "func (s SharedGalleryList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "4c572c3ca64e3fdfb9a8aa02b3d39559", "score": "0.63960373", "text": "func (a AgentPoolPatchProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"count\", a.Count)\n\tpopulate(objectMap, \"upgradeSettings\", a.UpgradeSettings)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "83dc790a42a8cf8045106e455fac104b", "score": "0.6394738", "text": "func (s SQLPoolStoredProcedureActivity) MarshalJSON() ([]byte, error) {\n\tobjectMap := s.Activity.marshalInternal(\"SqlPoolStoredProcedure\")\n\tpopulate(objectMap, \"sqlPool\", s.SQLPool)\n\tpopulate(objectMap, \"typeProperties\", s.TypeProperties)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "8e95f8707e60ea5ac46c7cf82fab9938", "score": "0.6394441", "text": "func (s SecretPatchProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"attributes\", s.Attributes)\n\tpopulate(objectMap, \"contentType\", s.ContentType)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "1c8c7dd5cc9ebcd169ff4683418230fe", "score": "0.6394274", "text": "func (s SharedGallery) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"identifier\", s.Identifier)\n\tpopulate(objectMap, \"location\", s.Location)\n\tpopulate(objectMap, \"name\", s.Name)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "996d006292ac35bac4b88c0b1beea5a5", "score": "0.639413", "text": "func (esp EncryptionSetProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif esp.ActiveKey != nil {\n\t\tobjectMap[\"activeKey\"] = esp.ActiveKey\n\t}\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "85e3ec30313b3f02d3b51006c04d25b1", "score": "0.63933057", "text": "func (ag ApplicationGateway)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n if(ag.ApplicationGatewayPropertiesFormat != nil) {\n objectMap[\"properties\"] = ag.ApplicationGatewayPropertiesFormat\n }\n if(ag.Zones != nil) {\n objectMap[\"zones\"] = ag.Zones\n }\n if(ag.Identity != nil) {\n objectMap[\"identity\"] = ag.Identity\n }\n if(ag.ID != nil) {\n objectMap[\"id\"] = ag.ID\n }\n if(ag.Location != nil) {\n objectMap[\"location\"] = ag.Location\n }\n if(ag.Tags != nil) {\n objectMap[\"tags\"] = ag.Tags\n }\n return json.Marshal(objectMap)\n }", "title": "" }, { "docid": "1338b7da9db53c9ef66e63d17e38d283", "score": "0.6390536", "text": "func (p Permissions) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"certificates\", p.Certificates)\n\tpopulate(objectMap, \"keys\", p.Keys)\n\tpopulate(objectMap, \"secrets\", p.Secrets)\n\tpopulate(objectMap, \"storage\", p.Storage)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "6c6c4bf1dd24c5eb9f4e2815b70c8c99", "score": "0.6388956", "text": "func (v VirtualClusterUpdate) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"properties\", v.Properties)\n\tpopulate(objectMap, \"tags\", v.Tags)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "470493deb0fb879010f4da0e8c5e1a20", "score": "0.6385763", "text": "func (v ConnectionsJSON) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson85f0d656EncodeGithubComGoParkMailRu20191EscapadeInternalGame8(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "672b2229d04f099a8b7612be8262f65e", "score": "0.637776", "text": "func (s SharedGalleryImage) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"identifier\", s.Identifier)\n\tpopulate(objectMap, \"location\", s.Location)\n\tpopulate(objectMap, \"name\", s.Name)\n\tpopulate(objectMap, \"properties\", s.Properties)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "bc58c42e89c8d6543f48b046382d338f", "score": "0.6376939", "text": "func (sakp SyncAgentKeyProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "931972f1c33c771d4a81fde03c938ba4", "score": "0.63762486", "text": "func (o InlineObject317) MarshalJSON() ([]byte, error) {\n\ttoSerialize := map[string]interface{}{}\n\tif o.DeviceTag == nil {\n\t\tif o.isExplicitNullDeviceTag {\n\t\t\ttoSerialize[\"deviceTag\"] = o.DeviceTag\n\t\t}\n\t} else {\n\t\ttoSerialize[\"deviceTag\"] = o.DeviceTag\n\t}\n\treturn json.Marshal(toSerialize)\n}", "title": "" }, { "docid": "8fdb01c3aa3d9f09d3aba719fcb5327c", "score": "0.63757056", "text": "func (g *WSStructMessageJSON) Marshal() []byte {\n\tj, err := json.Marshal(g)\n\tif err != nil {\n\t\tpanic(\"JSON Marshal WSStructMessage encode failed\")\n\t}\n\treturn j\n}", "title": "" }, { "docid": "bed5cba112cfeceee50115ef24c87530", "score": "0.63733214", "text": "func (o CIAppPipelineEventAttributes) MarshalJSON() ([]byte, error) {\n\ttoSerialize := map[string]interface{}{}\n\tif o.UnparsedObject != nil {\n\t\treturn json.Marshal(o.UnparsedObject)\n\t}\n\tif o.Attributes != nil {\n\t\ttoSerialize[\"attributes\"] = o.Attributes\n\t}\n\tif o.CiLevel != nil {\n\t\ttoSerialize[\"ci_level\"] = o.CiLevel\n\t}\n\tif o.Tags != nil {\n\t\ttoSerialize[\"tags\"] = o.Tags\n\t}\n\n\tfor key, value := range o.AdditionalProperties {\n\t\ttoSerialize[key] = value\n\t}\n\treturn json.Marshal(toSerialize)\n}", "title": "" }, { "docid": "0a0a6f35c66f615b8e18036efc88c4fb", "score": "0.6372463", "text": "func (a AgentPoolPatchParameters) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"properties\", a.Properties)\n\tpopulate(objectMap, \"tags\", a.Tags)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "7f8545cb9eb99c78923d8e7e118c4c1c", "score": "0.6369682", "text": "func (sa SyncAgent) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif sa.SyncAgentProperties != nil {\n\t\tobjectMap[\"properties\"] = sa.SyncAgentProperties\n\t}\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "c2bc61c33ee8ea61bcff7e92edb8deca", "score": "0.63696575", "text": "func (pr ProcessReference) MarshalJSON() ([]byte, error) {\n\tpr.Kind = KindRefprocess\n\ttype Alias ProcessReference\n\treturn json.Marshal(&struct {\n\t\tAlias\n\t}{\n\t\tAlias: (Alias)(pr),\n\t})\n}", "title": "" }, { "docid": "59889b182318a8c7f95ce75df605da67", "score": "0.6367727", "text": "func (app *App) MarshalJSON() ([]byte, error) {\n\tapp.sync()\n\tm := map[string]interface{}{\n\t\tAttrAppUbo: app.UniversalBo.Clone(),\n\t\tbo.SerKeyFields: map[string]interface{}{\n\t\t\tFieldAppOwnerId: app.GetOwnerId(),\n\t\t},\n\t\tbo.SerKeyAttrs: map[string]interface{}{\n\t\t\tAttrAppDomains: app.GetDomains(),\n\t\t\tAttrAppPublicAttrs: app.attrsPublic.clone(),\n\t\t},\n\t}\n\treturn json.Marshal(m)\n}", "title": "" }, { "docid": "833b1dc269283775170b3e82af6514c9", "score": "0.6365955", "text": "func (m ManagedDatabaseUpdate) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"properties\", m.Properties)\n\tpopulate(objectMap, \"tags\", m.Tags)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "0498924b65c09ec37a4eb7a92f400624", "score": "0.63655", "text": "func (p Payload) MarshalJSON() ([]byte, error) {\n\tif p.CustomValues == nil {\n\t\tp.CustomValues = make(map[string]any)\n\t}\n\n\tp.CustomValues[\"aps\"] = p.APS\n\treturn json.Marshal(p.CustomValues)\n}", "title": "" }, { "docid": "27511c3152d00832518decc7e09b89d5", "score": "0.636327", "text": "func (v Post) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonD2b7633eEncodeAForumInternalModels9(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "1fd0474edba978323807a66537e79910", "score": "0.63623184", "text": "func (s SQLPoolStoredProcedureActivityTypeProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"storedProcedureName\", s.StoredProcedureName)\n\tpopulate(objectMap, \"storedProcedureParameters\", s.StoredProcedureParameters)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "1cb3e4224d67ad203c981274672926bc", "score": "0.6359265", "text": "func (obj PlatformFormat) MarshalJSON() ([]byte, error) {\n\ttype Alias PlatformFormat\n\treturn json.Marshal(struct {\n\t\tAction string `json:\"type\"`\n\t\t*Alias\n\t}{Action: \"Platform\", Alias: (*Alias)(&obj)})\n}", "title": "" }, { "docid": "3ca4957f3eabd73b614f6e4450a78cfd", "score": "0.63550836", "text": "func (sap SyncAgentProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif sap.SyncDatabaseID != nil {\n\t\tobjectMap[\"syncDatabaseId\"] = sap.SyncDatabaseID\n\t}\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "b7fb4283903e31172e4ec6820703e2c1", "score": "0.63533026", "text": "func (v EncapsulatedData) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonFa90ddaeEncodeGithubComAsmyasnikovGoMavlinkMavlinkDialectsPaparazzi122(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "5c738c88d1d49f37496125132762f2ab", "score": "0.63525015", "text": "func (v SetItemPayload) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson85f0d656EncodeGithubComGoParkMailRu20191TheRecoveryTeamInternalAppDomainGame(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" }, { "docid": "f481a367b64141f0cafd959f3067c020", "score": "0.63524413", "text": "func (m *Message) JSONMarshal(b *bytes.Buffer) ([]byte, error) {\n\tb.WriteString(\"{\")\n\n\t// encode agent id\n\tm.encodeAgent(b)\n\n\t// encode header\n\tm.encodeHeader(b)\n\n\t// encode data sets\n\tif err := m.encodeDataSet(b); err != nil {\n\t\treturn nil, err\n\t}\n\n\tb.WriteString(\"}\")\n\n\treturn b.Bytes(), nil\n}", "title": "" }, { "docid": "2b26dabf07cd77e7688a51b21c2eea39", "score": "0.63510144", "text": "func (o DataItems0) MarshalJSON() ([]byte, error) {\n\t_parts := make([][]byte, 0, 1)\n\n\taO0, err := swag.WriteJSON(o.PhotoObject)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t_parts = append(_parts, aO0)\n\n\treturn swag.ConcatJSON(_parts...), nil\n}", "title": "" }, { "docid": "02d6392ae1f42910be4a7f1dfe86aeb1", "score": "0.63497424", "text": "func (sr ScanResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif sr.Properties != nil {\n\t\tobjectMap[\"properties\"] = sr.Properties\n\t}\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "bc4366c115104f327de6b4d78469b53e", "score": "0.63488424", "text": "func (v WorkerTakeForm) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson82a45abeEncodeGithubComOvhCdsSdk2(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "title": "" } ]
3ceab30e43efdc1db7fbf8c818da7221
NewDhcpSharednetworkAddInputWithDefaults instantiates a new DhcpSharednetworkAddInput object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set
[ { "docid": "5078b1fb5a639771d5dc80911184a04d", "score": "0.7510781", "text": "func NewDhcpSharednetworkAddInputWithDefaults() *DhcpSharednetworkAddInput {\n\tthis := DhcpSharednetworkAddInput{}\n\treturn &this\n}", "title": "" } ]
[ { "docid": "56db9071f7d2fd1ade53085d063e40e5", "score": "0.68379843", "text": "func NewDhcpSharednetworkAddInput() *DhcpSharednetworkAddInput {\n\tthis := DhcpSharednetworkAddInput{}\n\treturn &this\n}", "title": "" }, { "docid": "48e6acf8f0792f52c639a00386c7107f", "score": "0.52390057", "text": "func NewDhcpSharednetwork6EditInputWithDefaults() *DhcpSharednetwork6EditInput {\n\tthis := DhcpSharednetwork6EditInput{}\n\treturn &this\n}", "title": "" }, { "docid": "8a1fe7d092c072101b8ebc6227176690", "score": "0.52066296", "text": "func (c *CreateClusterACSK) AddDefaults() error {\n\tif c.MasterInstanceType == \"\" {\n\t\tc.MasterInstanceType = DefaultMasterInstanceType\n\t}\n\tif c.MasterSystemDiskCategory == \"\" {\n\t\tc.MasterSystemDiskCategory = DefaultMasterSystemDiskCategory\n\t}\n\tif c.MasterSystemDiskSize < DefaultMasterSystemDiskSize {\n\t\tc.MasterSystemDiskSize = DefaultMasterSystemDiskSize\n\t}\n\n\tif len(c.NodePools) == 0 {\n\t\treturn pkgErrors.ErrorAlibabaNodePoolFieldIsEmpty\n\t}\n\tfor i, np := range c.NodePools {\n\t\tif np.InstanceType == \"\" {\n\t\t\tc.NodePools[i].InstanceType = DefaultWorkerInstanceType\n\t\t}\n\t\tif np.SystemDiskCategory == \"\" {\n\t\t\tc.NodePools[i].SystemDiskCategory = DefaultWorkerSystemDiskCategory\n\t\t}\n\t\tif np.SystemDiskSize < DefaultWorkerSystemDiskSize {\n\t\t\tc.NodePools[i].SystemDiskSize = DefaultWorkerSystemDiskSize\n\t\t}\n\t\tif np.Image == \"\" {\n\t\t\tc.NodePools[i].Image = DefaultImage\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "52bbcd6d55d7ed8b50b0c1d29f64caa6", "score": "0.4990855", "text": "func DefaultAddParams() *AddParams {\n\treturn &AddParams{\n\t\tRecursive: false,\n\t\tLayout: \"\", // corresponds to balanced layout\n\t\tChunker: \"size-262144\",\n\t\tRawLeaves: false,\n\t\tHidden: false,\n\t\tWrap: false,\n\t\tShard: false,\n\t\tProgress: false,\n\t\tCidVersion: 0,\n\t\tHashFun: \"sha2-256\",\n\t\tPinOptions: PinOptions{\n\t\t\tReplicationFactorMin: 0,\n\t\t\tReplicationFactorMax: 0,\n\t\t\tName: \"\",\n\t\t\tShardSize: DefaultShardSize,\n\t\t},\n\t}\n}", "title": "" }, { "docid": "d6e3bec7825ea581dde22a0d79f333d0", "score": "0.49503115", "text": "func (r *CreateClusterRequest) AddDefaults() error {\n\tswitch r.Cloud {\n\tcase Amazon:\n\t\tif r.Properties.CreateClusterPKE != nil {\n\t\t\treturn r.Properties.CreateClusterPKE.AddDefaults()\n\t\t}\n\t\treturn r.Properties.CreateClusterEKS.AddDefaults(r.Location)\n\tdefault:\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7ddb95d62e131cd2bdcc275744a67104", "score": "0.48628044", "text": "func (s *Server) newCommonInput() wrpc.CommonInput {\n\treturn wrpc.CommonInput{\n\t\tEnvTag: s.env,\n\t}\n}", "title": "" }, { "docid": "b5ab42e28aa9f51fb7de27504a63c719", "score": "0.48399082", "text": "func AddDefaultGw(ip, device string) error {\n\treturn AddRoute(\"\", \"\", ip, device)\n}", "title": "" }, { "docid": "c5e3a76b81fb73603bdf6e1b04661c20", "score": "0.48004612", "text": "func (s *System) addInput(config *InputConf) {\n\tif config.Set {\n\t\t// A set of utilities is needed\n\t\tutils := newInputSetUtility(config.Curve, config.NonZero)\n\t\tutils.Limit(config.Min, config.Max)\n\t\ts.utils[config.Id] = utils\n\t} else {\n\t\t// Singleton input utility, insert as is\n\t\tutil := newInputUtility(config.Curve, config.NonZero)\n\t\tutil.Limit(config.Min, config.Max)\n\t\ts.utils[config.Id] = util\n\t}\n}", "title": "" }, { "docid": "fb37b0adc2668085d071a913667aa3e8", "score": "0.4680308", "text": "func NewNetworkHyperFlexNetworkAddressWithDefaults() *NetworkHyperFlexNetworkAddress {\n\tthis := NetworkHyperFlexNetworkAddress{}\n\tvar classId string = \"network.HyperFlexNetworkAddress\"\n\tthis.ClassId = classId\n\tvar objectType string = \"network.HyperFlexNetworkAddress\"\n\tthis.ObjectType = objectType\n\treturn &this\n}", "title": "" }, { "docid": "230fe7e511be5d3dee6737169bfe82c9", "score": "0.46765646", "text": "func NewHostWithDefaults() *Host {\n\tthis := Host{}\n\treturn &this\n}", "title": "" }, { "docid": "2df58cf64cde642c2c894536f4d11bd9", "score": "0.46578312", "text": "func NewAppApplicationAddInputWithDefaults() *AppApplicationAddInput {\n\tthis := AppApplicationAddInput{}\n\treturn &this\n}", "title": "" }, { "docid": "72462f29319937efb76d3bf924c51174", "score": "0.46548852", "text": "func Add(hostname, owner, description string, confirm bool) {\n\t// TODO accept existing pubkey\n\tconf := MustLoadDsnetConfig()\n\n\tif owner == \"\" {\n\t\towner = MustPromptString(\"owner\", true)\n\t}\n\tif description == \"\" {\n\t\tdescription = MustPromptString(\"Description\", true)\n\t}\n\t// publicKey := MustPromptString(\"PublicKey (optional)\", false)\n\tif !confirm {\n\t\tConfirmOrAbort(\"\\nDo you want to add the above configuration?\")\n\t}\n\t// newline (not on stdout) to separate config\n\tfmt.Fprintln(os.Stderr)\n\n\tprivateKey := GenerateJSONPrivateKey()\n\tpublicKey := privateKey.PublicKey()\n\n\tpeer := PeerConfig{\n\t\tOwner: owner,\n\t\tHostname: hostname,\n\t\tDescription: description,\n\t\tAdded: time.Now(),\n\t\tPublicKey: publicKey,\n\t\tPrivateKey: privateKey, // omitted from server config JSON!\n\t\tPresharedKey: GenerateJSONKey(),\n\t\tNetworks: []JSONIPNet{},\n\t}\n\n\tif len(conf.Network.IPNet.Mask) > 0 {\n\t\tpeer.IP = conf.MustAllocateIP()\n\t}\n\n\tif len(conf.Network6.IPNet.Mask) > 0 {\n\t\tpeer.IP6 = conf.MustAllocateIP6()\n\t}\n\n\tif len(conf.IP) == 0 && len(conf.IP6) == 0 {\n\t\tExitFail(\"No IPv4 or IPv6 network defined in config\")\n\t}\n\n\t// TODO Some kind of recovery here would be nice, to avoid\n\t// leaving things in a potential broken state\n\tconf.MustAddPeer(peer)\n\tPrintPeerCfg(&peer, conf)\n\tconf.MustSave()\n\tMustConfigureDevice(conf)\n}", "title": "" }, { "docid": "c53d3d08e07444c77389d8f46d37f229", "score": "0.46392784", "text": "func newClientWithDefaults() *Client {\n\tbaseURL, _ := url.Parse(defaultBaseURL)\n\n\tnewClient := &Client{\n\t\thttpClient: http.DefaultClient,\n\t\tBaseURL: baseURL,\n\t\tUserAgent: defaultUserAgent,\n\t}\n\n\t// Create client after figuring out defaults\n\tnewClient.common.client = newClient\n\n\t// Assign services to client\n\tnewClient.Tasks = (*TasksService)(&newClient.common)\n\n\treturn newClient\n}", "title": "" }, { "docid": "211d83ae88d975ffe542665c2b7640a4", "score": "0.46390194", "text": "func (o *DhcpSharednetworkAddInput) HasSharednetworkName() bool {\n\tif o != nil && o.SharednetworkName != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "35b11289b5ff03f0667e31a18d49aa78", "score": "0.45422554", "text": "func CreateDefaultExtNetwork(networkType string) error {\n\tnetworkType = strings.ToLower(strings.TrimSpace(networkType))\n\tif len(networkType) == 0 {\n\t\treturn nil\n\t}\n\n\tif networkType != hnsL2Bridge && networkType != hnsL2Tunnel {\n\t\treturn fmt.Errorf(\"Invalid hns network type %s\", networkType)\n\t}\n\n\tlogger.Printf(\"[Azure CNS] CreateDefaultExtNetwork\")\n\textHnsNetwork, _ := hcsshim.GetHNSNetworkByName(ExtHnsNetworkName)\n\n\tif extHnsNetwork != nil {\n\t\tlogger.Printf(\"[Azure CNS] Found existing DefaultExtNetwork with type: %s\", extHnsNetwork.Type)\n\t\tif !strings.EqualFold(networkType, extHnsNetwork.Type) {\n\t\t\treturn fmt.Errorf(\"Network type mismatch with existing network: %s\", extHnsNetwork.Type)\n\t\t}\n\n\t\treturn nil\n\t}\n\n\t// create new hns network\n\tlogger.Printf(\"[Azure CNS] Creating DefaultExtNetwork with type %s\", networkType)\n\n\thnsNetwork := &hcsshim.HNSNetwork{\n\t\tName: ExtHnsNetworkName,\n\t\tType: networkType,\n\t}\n\n\thnsSubnet := hcsshim.Subnet{\n\t\tAddressPrefix: ExtHnsNetworkAddressPrefix,\n\t\tGatewayAddress: ExtHnsNetworkGwAddress,\n\t}\n\n\thnsNetwork.Subnets = append(hnsNetwork.Subnets, hnsSubnet)\n\n\treturn createHnsNetwork(hnsNetwork)\n}", "title": "" }, { "docid": "7417661cd8aa13355b76eb6822eca427", "score": "0.45162436", "text": "func (m *ControlplaneClusterInput) SetDefaults() {\n\tif m.CreateTimeout == 0 {\n\t\tm.CreateTimeout = 10 * time.Minute\n\t}\n\n\tif m.DeleteTimeout == 0 {\n\t\tm.DeleteTimeout = 5 * time.Minute\n\t}\n}", "title": "" }, { "docid": "f1fba80d0af4ea7ad699d9eb35f22f6b", "score": "0.4507344", "text": "func NewTransitGenerateDataKeyRequestWithDefaults() *TransitGenerateDataKeyRequest {\n\tvar this TransitGenerateDataKeyRequest\n\n\tthis.Bits = 256\n\n\treturn &this\n}", "title": "" }, { "docid": "18be3ffed5b765b839402b523ddd92f8", "score": "0.45005637", "text": "func NewDefault() (*Client, error) {\n\treturn New(\"\", 0)\n}", "title": "" }, { "docid": "6e0685d3958ddb020ed88704e20d2eb7", "score": "0.4490637", "text": "func (n *Join) AddInput(in Node) {\n\tn.inputs[0] = in\n}", "title": "" }, { "docid": "6fba8d661f7c33eae952ff20dee00213", "score": "0.44799846", "text": "func (store *Store) AddInput(input *graylog.Input) error {\n\tif input == nil {\n\t\treturn fmt.Errorf(\"input is nil\")\n\t}\n\tif input.ID == \"\" {\n\t\tinput.ID = st.NewObjectID()\n\t}\n\tinput.CreatedAt = time.Now().Format(\"2006-01-02T15:04:05.000Z\")\n\n\tstore.imutex.Lock()\n\tdefer store.imutex.Unlock()\n\tstore.inputs[input.ID] = *input\n\treturn nil\n}", "title": "" }, { "docid": "99f2b3c67dd3f9a3fb088026d6faee71", "score": "0.4466547", "text": "func TestNilInputs(t *testing.T) {\n\tif testing.Short() {\n\t\tt.SkipNow()\n\t}\n\ttestdir := build.TempDir(modules.WalletDir, t.Name())\n\tg, err := gateway.New(\"localhost:0\", false, filepath.Join(testdir, modules.GatewayDir), false)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tcs, err := consensus.New(g, false, filepath.Join(testdir, modules.ConsensusDir), false)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\ttp, err := transactionpool.New(cs, g, filepath.Join(testdir, modules.TransactionPoolDir))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\twdir := filepath.Join(testdir, modules.WalletDir)\n\t_, err = New(cs, nil, wdir, modules.DefaultAddressGapLimit, false)\n\tif err != errNilTpool {\n\t\tt.Error(err)\n\t}\n\t_, err = New(nil, tp, wdir, modules.DefaultAddressGapLimit, false)\n\tif err != errNilConsensusSet {\n\t\tt.Error(err)\n\t}\n\t_, err = New(nil, nil, wdir, modules.DefaultAddressGapLimit, false)\n\tif err != errNilConsensusSet {\n\t\tt.Error(err)\n\t}\n}", "title": "" }, { "docid": "edebc2fc4f9e4f90eef8269ba4297579", "score": "0.44602412", "text": "func DefaultParams() Params {\n\treturn NewParams(\n\t\tDefaultUnbondingTime,\n\t\tDefaultMaxValidators,\n\t\tDefaultEpoch,\n\t\tDefaultMaxValsToAddShares,\n\t\tDefaultMinDelegation,\n\t\tDefaultMinSelfDelegation,\n\t)\n}", "title": "" }, { "docid": "adcc095c48a00c35407b529b8f1d6e08", "score": "0.44520825", "text": "func DefaultParams() Params {\n\treturn Params{\n\t\tEvmDenom: ethermint.AttoPhoton,\n\t\tEnableCreate: true,\n\t\tEnableCall: true,\n\t\tExtraEIPs: []int64(nil), // TODO: define default values\n\t}\n}", "title": "" }, { "docid": "5c8852508d2a45bd0a680d062dd05115", "score": "0.44470176", "text": "func defaultContainerNetwork(dst *engine.Step, src *yaml.Container) {\n\tnetwork := &engine.NetworkMapping{\n\t\tName: \"default\",\n\t\tAliases: []string{dst.Name},\n\t}\n\tdst.Networks = append(dst.Networks, network)\n}", "title": "" }, { "docid": "d1eaa97925df3667e5e6b6c5abd579f5", "score": "0.44389835", "text": "func NewPeeringTrustRequestWithDefaults() *PeeringTrustRequest {\n\tthis := PeeringTrustRequest{}\n\treturn &this\n}", "title": "" }, { "docid": "1bd797f0057b95fddc19edcaa3e0f865", "score": "0.44283396", "text": "func (h *Host) SetDefaults(d Default) {\n\tif h.Addr == \"\" {\n\t\th.Addr = d.Addr\n\t}\n\tif h.Dir == \"\" {\n\t\th.Dir = d.Dir\n\t}\n\tif h.Username == \"\" {\n\t\th.Username = d.Username\n\t}\n\tif h.Identity == \"\" {\n\t\th.Identity = d.Identity\n\t}\n\tif h.Password == \"\" {\n\t\th.Password = d.Password\n\t}\n\tif len(h.Build) < 1 {\n\t\th.Build = d.Build\n\t}\n\tif len(h.Cmd) < 1 {\n\t\th.Cmd = d.Cmd\n\t}\n}", "title": "" }, { "docid": "0a6ac1696682f70cee07baee6e29adb6", "score": "0.44259003", "text": "func (c *Client) CreateDefault(obj *jinghzhuv1.Jinghzhu) (*jinghzhuv1.Jinghzhu, error) {\n\treturn c.clientset.JinghzhuV1().Jinghzhus(c.namespace).Create(c.GetContext(), obj, metav1.CreateOptions{})\n}", "title": "" }, { "docid": "21e6a19a81bfd01948fd13e6fd4e10ce", "score": "0.4413835", "text": "func NewDhcpRequest(MAC net.HardwareAddr) *DhcpRequest {\n\tif len(MAC) != 6 {\n\t\tpanic(\"MAC address must be 6 bytes\")\n\t}\n\treturn &DhcpRequest{MAC}\n}", "title": "" }, { "docid": "a530cd79e8a192de83487824a206f953", "score": "0.43899456", "text": "func NewCreateKeypairRequestWithDefaults() *CreateKeypairRequest {\n\tthis := CreateKeypairRequest{}\n\treturn &this\n}", "title": "" }, { "docid": "ebdd8f17d1ab58cc6fd04745b2ad736e", "score": "0.4379886", "text": "func (s *ConfigParserSuite) TestNewWithDefaultsCopied(c *C) {\n\tn := make(configparser.Dict)\n\tn[\"testing\"] = \"value\"\n\tp, err := configparser.NewWithDefaults(n)\n\tc.Assert(err, IsNil)\n\n\tn[\"testing2\"] = \"myvalue\"\n\n\td := p.Defaults()\n\tc.Assert(d[\"testing2\"], Equals, \"\")\n}", "title": "" }, { "docid": "d555e846477e6a55b7b404e0695066d8", "score": "0.43593127", "text": "func (c *VPCClient) NewAddVPCNetworkRequest() *AddVPCNetworkRequest {\n\treq := &AddVPCNetworkRequest{}\n\n\t// setup request with client config\n\tc.Client.SetupRequest(req)\n\n\t// setup retryable with default retry policy (retry for non-create action and common error)\n\treq.SetRetryable(false)\n\treturn req\n}", "title": "" }, { "docid": "480094e91fd72c4592b59547c4668b76", "score": "0.43578428", "text": "func createNetworkHelper(networkID string, tag string, IPv4Data, IPv6Data []driverapi.IPAMData) error {\n\tvar tenantName, networkName, serviceName string\n\tvar err error\n\tif tag != \"\" {\n\t\t// we need to map docker network to policy group or network using the tag\n\t\tlog.Infof(\"Received tag %s\", tag)\n\t\tvar nw *mastercfg.CfgNetworkState\n\t\tepg, err := FindGroupFromTag(tag)\n\t\tif err != nil {\n\t\t\tnw, err = FindNetworkFromTag(tag)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.New(\"failed to lookup tag\")\n\t\t\t}\n\t\t}\n\t\tif epg != nil {\n\t\t\ttenantName = epg.TenantName\n\t\t\tnetworkName = epg.NetworkName\n\t\t\tserviceName = epg.GroupName\n\t\t} else if nw != nil {\n\t\t\ttenantName = nw.Tenant\n\t\t\tnetworkName = nw.NetworkName\n\t\t\tserviceName = \"\"\n\t\t}\n\t} else if len(IPv4Data) > 0 {\n\t\t// if subnet is specified in docker command, we create a contiv network\n\t\tsubnetPool := \"\"\n\t\tgateway := \"\"\n\t\tif IPv4Data[0].Pool != nil {\n\t\t\tsubnetPool = IPv4Data[0].Pool.String()\n\t\t}\n\t\tif IPv4Data[0].Gateway != nil {\n\t\t\tgateway = strings.Split(IPv4Data[0].Gateway.String(), \"/\")[0]\n\t\t}\n\t\tsubnetv6 := \"\"\n\t\tgatewayv6 := \"\"\n\t\tif len(IPv6Data) > 0 {\n\t\t\tif IPv6Data[0].Pool != nil {\n\t\t\t\tsubnetv6 = IPv6Data[0].Pool.String()\n\t\t\t}\n\t\t\tif IPv6Data[0].Gateway != nil {\n\t\t\t\tgatewayv6 = strings.Split(IPv6Data[0].Gateway.String(), \"/\")[0]\n\t\t\t}\n\t\t}\n\t\t// build key and URL\n\t\tkeyStr := \"default\" + \":\" + networkID\n\t\turl := \"/api/v1/networks/\" + keyStr + \"/\"\n\n\t\ttenantName = \"default\"\n\t\tnetworkName = networkID\n\t\tserviceName = \"\"\n\n\t\treq := client.Network{\n\t\t\tTenantName: tenantName,\n\t\t\tNetworkName: networkName,\n\t\t\tSubnet: subnetPool,\n\t\t\tGateway: gateway,\n\t\t\tIpv6Subnet: subnetv6,\n\t\t\tIpv6Gateway: gatewayv6,\n\t\t\tEncap: \"vxlan\",\n\t\t}\n\n\t\tvar resp client.Network\n\t\terr = cluster.MasterPostReq(url, &req, &resp)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"failed to create network in netmaster: %s\", err.Error())\n\t\t\treturn errors.New(\"failed to create network in netmaster\")\n\t\t}\n\t\tlog.Infof(\"Created contiv network %+v\", req)\n\t}\n\t// Create docknet oper state to map the docker network to contiv network\n\t// We do not create a network in docker as it is created explicitly by user\n\terr = docknet.CreateDockNetState(tenantName, networkName, serviceName, networkID)\n\tif err != nil {\n\t\tlog.Errorf(\"Error creating docknet state: %s\", err.Error())\n\t\treturn errors.New(\"Error creating docknet state\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "0177d575de95bfa1ba79de59204c3af7", "score": "0.43497524", "text": "func (s *Shim) AddInput(input telegraf.Input) error {\n\tif p, ok := input.(telegraf.Initializer); ok {\n\t\terr := p.Init()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to init input: %s\", err)\n\t\t}\n\t}\n\n\ts.Inputs = append(s.Inputs, input)\n\treturn nil\n}", "title": "" }, { "docid": "25cfbcfbbc693442a105a6a7b4a82e59", "score": "0.43438178", "text": "func NewAddUserToGroupDefault(code int) *AddUserToGroupDefault {\n\treturn &AddUserToGroupDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "ce085260661ea43372dfc52a6dba0a6b", "score": "0.43412578", "text": "func WithDefaultInput(s string) GetOption {\n\treturn func(input *survey.Input) {\n\t\tinput.Default = s\n\t}\n}", "title": "" }, { "docid": "dcf8e4d4704b35cbdb9f82491d88acc9", "score": "0.43293434", "text": "func (c *Converter) RegisterInputDefaults(in interface{}, fn FieldMappingFunc, defaultFlags FieldMatchingFlags) error {\n\tfv := reflect.ValueOf(in)\n\tft := fv.Type()\n\tif ft.Kind() != reflect.Ptr {\n\t\treturn fmt.Errorf(\"expected pointer 'in' argument, got: %v\", ft)\n\t}\n\tc.inputFieldMappingFuncs[ft] = fn\n\tc.inputDefaultFlags[ft] = defaultFlags\n\treturn nil\n}", "title": "" }, { "docid": "d0dd47ef4ac4fb69b80c5b35dac0df4d", "score": "0.43275324", "text": "func (o *ClusterCreate) DefaultAndValidate() error {\n\tif o.TemplateID == \"\" {\n\t\treturn errors.New(\"template ID is required\")\n\t}\n\n\tif o.ProviderID == \"\" {\n\t\treturn errors.New(\"provider ID is required\")\n\t}\n\n\tif o.Name == \"\" {\n\t\treturn errors.New(\"name is required\")\n\t}\n\n\tif o.Environment == \"\" {\n\t\treturn errors.New(\"environment is required\")\n\t}\n\n\tif err := o.defaultAndValidateMetrics(); err != nil {\n\t\treturn errors.Wrapf(err, \"validating %s plugin\", plugin.TypeMetrics)\n\t}\n\n\tif err := o.defaultAndValidateClusterManagement(); err != nil {\n\t\treturn errors.Wrapf(err, \"validating %s plugin\", plugin.TypeClusterManagement)\n\t}\n\n\tif err := o.defaultAndValidateAutoscaler(); err != nil {\n\t\treturn errors.Wrapf(err, \"validating %s plugin\", plugin.TypeAutoscaler)\n\t}\n\n\tif err := o.defaultAndValidateAuditLogs(); err != nil {\n\t\treturn errors.Wrapf(err, \"validating %s plugin\", plugin.TypeAuditLogs)\n\t}\n\n\tif err := o.defaultAndValidateGPUDevice(); err != nil {\n\t\treturn errors.Wrapf(err, \"validating %s plugin\", plugin.TypeGPUDevice)\n\t}\n\n\to.labels = map[string]string{\n\t\t\"cluster.containership.io/name\": o.Name,\n\t\t\"cluster.containership.io/environment\": o.Environment,\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "96bb3fa19d13f5a3689efd3a38a035c1", "score": "0.42976528", "text": "func NewSshConfigureZeroAddressRequestWithDefaults() *SshConfigureZeroAddressRequest {\n\tvar this SshConfigureZeroAddressRequest\n\n\treturn &this\n}", "title": "" }, { "docid": "26d59fd3195f93e877b989395d6519c7", "score": "0.4297605", "text": "func (inv *ActionNetworkUpdateInvocation) NewInput() *ActionNetworkUpdateInput {\n\tinv.Input = &ActionNetworkUpdateInput{}\n\treturn inv.Input\n}", "title": "" }, { "docid": "caeb23c07692a852f42ecc2a7efefc2b", "score": "0.42934993", "text": "func (o *CreateNetworkLocationsParams) SetDefaults() {\n\t// no default values defined for this parameter\n}", "title": "" }, { "docid": "f8747e5df88f6cc27763dd24aa9a8bd3", "score": "0.42877308", "text": "func InitDefaultClient(options *HTTPOptions) {\n\tdefaultZ = New(options)\n}", "title": "" }, { "docid": "400e862430f612fc4a906c99f1535916", "score": "0.42691556", "text": "func (neuron *Neuron) AddInput(input *Neuron, w Data) {\n\tinput.AddOutput(&Connection {c: neuron.input, weight: w })\n\tch := make(chan string)\n\tneuron.command <- message{ message: AddInput, payload: input, reply: ch}\n\t_ = <-ch\n}", "title": "" }, { "docid": "421e3803d4828dc3bad4e169407348fc", "score": "0.42637482", "text": "func NewPutCwfNetworkIDDefault(code int) *PutCwfNetworkIDDefault {\n\treturn &PutCwfNetworkIDDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "2292dbad2cee0635aef24cf3b0b4299e", "score": "0.42596245", "text": "func NewAddParallelForwardingDefault(code int) *AddParallelForwardingDefault {\n\treturn &AddParallelForwardingDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "5d542c5cd972587665709993d928a954", "score": "0.42569903", "text": "func NewClientDefaultToken(localIPs, remoteIPs []string, username, password, portgroupName string, logger *log.Logger) (*Client, error) {\n\t// validate input value\n\tif len(username) == 0 {\n\t\treturn nil, errors.New(\"username is required\")\n\t}\n\tif len(password) == 0 {\n\t\treturn nil, errors.New(\"password is required\")\n\t}\n\tif len(localIPs) == 0 || len(remoteIPs) == 0 {\n\t\treturn nil, errors.New(\"IPs is required\")\n\t}\n\n\tif logger == nil {\n\t\tl := log.New(ioutil.Discard, \"\", log.LstdFlags)\n\t\tlogger = l\n\t}\n\n\ttlsConfig := tls.Config{\n\t\tInsecureSkipVerify: true,\n\t}\n\ttransport := *http.DefaultTransport.(*http.Transport)\n\ttransport.TLSClientConfig = &tlsConfig\n\thttpClient := &http.Client{Transport: &transport}\n\n\tlocalDevice, err := newDevice(localIPs, username, password, httpClient, logger)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create Local Device: %w\", err)\n\t}\n\n\tremoteDevice, err := newDevice(remoteIPs, username, password, httpClient, logger)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create Remote Device: %w\", err)\n\t}\n\n\tc := &Client{\n\t\tLocalDevice: localDevice,\n\t\tRemoteDevice: remoteDevice,\n\t\tPortGroupName: portgroupName,\n\t\tLogger: logger,\n\t}\n\n\treturn c, nil\n}", "title": "" }, { "docid": "2a32f067d3e290b6f1fbf20d1af1c386", "score": "0.4253301", "text": "func (c *CreateCommand) constructInput(serviceID string, serviceVersion int) *fastly.CreateNewRelicOTLPInput {\n\tvar input fastly.CreateNewRelicOTLPInput\n\n\tif c.name.WasSet {\n\t\tinput.Name = &c.name.Value\n\t}\n\tinput.ServiceID = serviceID\n\tinput.ServiceVersion = serviceVersion\n\tif c.key.WasSet {\n\t\tinput.Token = &c.key.Value\n\t}\n\n\tif c.format.WasSet {\n\t\tinput.Format = &c.format.Value\n\t}\n\n\tif c.formatVersion.WasSet {\n\t\tinput.FormatVersion = &c.formatVersion.Value\n\t}\n\n\tif c.placement.WasSet {\n\t\tinput.Placement = &c.placement.Value\n\t}\n\n\tif c.region.WasSet {\n\t\tinput.Region = &c.region.Value\n\t}\n\n\tif c.responseCondition.WasSet {\n\t\tinput.ResponseCondition = &c.responseCondition.Value\n\t}\n\n\tif c.url.WasSet {\n\t\tinput.URL = &c.url.Value\n\t}\n\n\treturn &input\n}", "title": "" }, { "docid": "334b88130b8d014efd36974d974f6ecb", "score": "0.4246912", "text": "func (mec *MockEventCollector) AddDefaultTag(key, value string) {\n\tmec.defaultTags = append(mec.defaultTags, fmt.Sprintf(\"%s:%s\", key, value))\n}", "title": "" }, { "docid": "8830a58a67aa75cc5428989d0bb7cd82", "score": "0.42458135", "text": "func NewAddDeviceToGroupDefault(code int) *AddDeviceToGroupDefault {\n\treturn &AddDeviceToGroupDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "c4251a283f0d102b8e95281d2beb587d", "score": "0.42431968", "text": "func InitDockerDefault(ic ipamapi.Callback, l, g interface{}) error {\n\tvar (\n\t\tok bool\n\t\tlocalDs, globalDs datastore.DataStore\n\t)\n\n\tif l != nil {\n\t\tif localDs, ok = l.(datastore.DataStore); !ok {\n\t\t\treturn errors.New(\"incorrect local datastore passed to built-in ipam init\")\n\t\t}\n\t}\n\n\tif g != nil {\n\t\tif globalDs, ok = g.(datastore.DataStore); !ok {\n\t\t\treturn errors.New(\"incorrect global datastore passed to built-in ipam init\")\n\t\t}\n\t}\n\n\tipamutils.ConfigLocalScopeDefaultNetworks(nil)\n\n\ta, err := ipam.NewAllocator(localDs, globalDs)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcps := &ipamapi.Capability{RequiresRequestReplay: true}\n\n\treturn ic.RegisterIpamDriverWithCapabilities(ipamapi.DefaultIPAM, a, cps)\n}", "title": "" }, { "docid": "c03e1614fa08712e0526d65305350454", "score": "0.42378283", "text": "func NewPostNetworksDefault(code int) *PostNetworksDefault {\n\treturn &PostNetworksDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "4cb03c1c07c8d3879342bdb27dd4508b", "score": "0.42374054", "text": "func (o *MetroclusterDiagnosticsCreateParams) SetDefaults() {\n\tvar (\n\t\treturnTimeoutDefault = int64(0)\n\t)\n\n\tval := MetroclusterDiagnosticsCreateParams{\n\t\tReturnTimeout: &returnTimeoutDefault,\n\t}\n\n\tval.timeout = o.timeout\n\tval.Context = o.Context\n\tval.HTTPClient = o.HTTPClient\n\t*o = val\n}", "title": "" }, { "docid": "73c2329f409cca48470ebbd60b091d05", "score": "0.42355838", "text": "func NewDhcpSharednetwork6EditInput() *DhcpSharednetwork6EditInput {\n\tthis := DhcpSharednetwork6EditInput{}\n\treturn &this\n}", "title": "" }, { "docid": "5e506ecf822a033ca57b8ea0c9801001", "score": "0.42161685", "text": "func (c *KubeCluster) AddDefaultsToUpdate(*pkgCluster.UpdateClusterRequest) {\n}", "title": "" }, { "docid": "66697854e253cd520316df9be9ec2d4f", "score": "0.42142665", "text": "func (plugin *NetworkPlugin) AddNetwork(cniParams *Parameters) error {\n\tif err := plugin.checkInitialized(); err != nil {\n\t\treturn err\n\t}\n\t_, err := plugin.addToNetwork(plugin.getDefaultNetwork(), cniParams)\n\tif err != nil {\n\t\tglog.Errorf(\"Error while adding to cni network: %s\", err)\n\t\treturn err\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "f2b77acea6387bc774bac135195660a7", "score": "0.42134458", "text": "func NewAddContainerNodeDefault(code int) *AddContainerNodeDefault {\n\treturn &AddContainerNodeDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "b562fbe82c5d46c437b08c475329291f", "score": "0.42130673", "text": "func NewAddServiceInstanceDefault(code int) *AddServiceInstanceDefault {\n\treturn &AddServiceInstanceDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "3e3d4b7d1385a077a3cd7a7bf22ca81a", "score": "0.4200382", "text": "func NewInput(desc string) (i *Input, err error) {\n\ti, err = NewInputFromConfig(desc, config.Configure())\n\treturn\n}", "title": "" }, { "docid": "5ada54bf42122ad40bae17204cce1e3f", "score": "0.41960394", "text": "func NewCreateDhcpOptionsDefault(code int) *CreateDhcpOptionsDefault {\n\treturn &CreateDhcpOptionsDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "f91934d4d2a42ca6d4999f46cb4f140e", "score": "0.41787747", "text": "func (s *Server) AddPublicNWConnectedParam() {\n\tif s.ConnectedSwitches == nil {\n\t\ts.ClearConnectedSwitches()\n\t}\n\ts.ConnectedSwitches = append(s.ConnectedSwitches, map[string]interface{}{\"Scope\": \"shared\"})\n}", "title": "" }, { "docid": "2401cc3671c2b8262c7f1af9396b953e", "score": "0.41753492", "text": "func (v *Host) AddNic(name string, mac string) error {\n\tkey := \"key-vim.host.PhysicalNic-\" + name\n\tpnic := types.PhysicalNic{Key: key, Device: name, Mac: mac}\n\tv.Obj.Config.Network.Pnic = append(v.Obj.Config.Network.Pnic, pnic)\n\n\th := simulator.Map.Get(v.Obj.Reference())\n\tsimulator.Map.Update(h, []types.PropertyChange{\n\t\t{Name: \"config\", Val: v.Obj.Config},\n\t})\n\n\treturn nil\n}", "title": "" }, { "docid": "49aa70c5e52f2ea0fb91b0c83f3c64ac", "score": "0.41719198", "text": "func NewDefault(name string, address Address) *Subscriber {\n\treturn &Subscriber{name, 15.5, true, address}\n}", "title": "" }, { "docid": "91bcf4cb86fece7fe091a1cb48c2b102", "score": "0.41707775", "text": "func (o *PcloudNetworksPortsPostParams) SetDefaults() {\n\t// no default values defined for this parameter\n}", "title": "" }, { "docid": "b77a06e7256cbab4a6c9e9a00f198785", "score": "0.4169234", "text": "func NewAddRequest(dn string, controls []Control) *AddRequest {\n\treturn &AddRequest{\n\t\tDN: dn,\n\t\tControls: controls,\n\t}\n\n}", "title": "" }, { "docid": "d9d32ea33c4877abc8c9590c01cb3d89", "score": "0.4163654", "text": "func (o *AddressesParams) SetDefaults() {\n\tvar (\n\t\tdelegatedDefault = bool(false)\n\t)\n\n\tval := AddressesParams{\n\t\tDelegated: &delegatedDefault,\n\t}\n\n\tval.timeout = o.timeout\n\tval.Context = o.Context\n\tval.HTTPClient = o.HTTPClient\n\t*o = val\n}", "title": "" }, { "docid": "0fa0c7e1492fd2029fbe78e54f41a75f", "score": "0.4162482", "text": "func NewRemoteConnectionCreationRequestWithDefaults() *RemoteConnectionCreationRequest {\n\tthis := RemoteConnectionCreationRequest{}\n\tvar allowInsecureTLS bool = false\n\tthis.AllowInsecureTLS = allowInsecureTLS\n\treturn &this\n}", "title": "" }, { "docid": "1c60ad3b4371a12d84f589df6e95ea75", "score": "0.41589725", "text": "func CreateAddNetworkInterfaceToInstanceRequest() (request *AddNetworkInterfaceToInstanceRequest) {\n\trequest = &AddNetworkInterfaceToInstanceRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"Ens\", \"2017-11-10\", \"AddNetworkInterfaceToInstance\", \"ens\", \"openAPI\")\n\trequest.Method = requests.POST\n\treturn\n}", "title": "" }, { "docid": "a32336b697312eb4e6023d557d95718b", "score": "0.41499448", "text": "func (o *GetKeyBlockByHashParams) SetDefaults() {\n\t// no default values defined for this parameter\n}", "title": "" }, { "docid": "ecd963edaefd844ca1513f68e88f4cc8", "score": "0.41470483", "text": "func NewDefaultClient(srvName, localIP string, svcPort int, checkTTL string) (*Client, error) {\n\tvar err error\n\n\tclient := &Client{srvID: srvName}\n\n\tclient.clientAPI, err = api.NewClient(api.DefaultConfig())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsrvRegInfo := &api.AgentServiceRegistration{\n\t\tID: srvName,\n\t\tName: srvName,\n\t\tAddress: localIP,\n\t\tPort: svcPort,\n\t\tCheck: &api.AgentServiceCheck{\n\t\t\tCheckID: srvName,\n\t\t\tTTL: checkTTL,\n\t\t},\n\t}\n\n\tif err = client.Register(srvRegInfo); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn client, nil\n}", "title": "" }, { "docid": "1df86587d19b9c02cc99a280c448711d", "score": "0.41442716", "text": "func createDefaultConfig() configmodels.Processor {\n\treturn &Config{\n\t\tProcessorSettings: configmodels.ProcessorSettings{\n\t\t\tTypeVal: typeStr,\n\t\t\tNameVal: typeStr,\n\t\t},\n\t}\n}", "title": "" }, { "docid": "c43bc84ca95bd956ab447694489d3a47", "score": "0.414254", "text": "func NewDeleteDirectLinkRequestWithDefaults() *DeleteDirectLinkRequest {\n\tthis := DeleteDirectLinkRequest{}\n\treturn &this\n}", "title": "" }, { "docid": "9382ab1e76754969635a1e719c909600", "score": "0.41419357", "text": "func NewSSHrimpWithDefaults() *SSHrimp {\n\n\tsshrimp := SSHrimp{\n\t\tAgent{\n\t\t\tProviderURL: \"https://accounts.google.com\",\n\t\t\tSocket: \"/tmp/sshrimp.sock\",\n\t\t},\n\t\tCertificateAuthority{\n\t\t\tFunctionName: \"sshrimp\",\n\t\t\tKeyAlias: \"alias/sshrimp\",\n\t\t\tForceCommandRegex: \"^$\",\n\t\t\tSourceAddressRegex: \"^$\",\n\t\t\tUsernameRegex: `^(.*)@example\\.com$`,\n\t\t\tUsernameClaim: \"email\",\n\t\t\tValidAfterOffset: \"-5m\",\n\t\t\tValidBeforeOffset: \"+12h\",\n\t\t\tExtensions: []string{\n\t\t\t\t\"permit-agent-forwarding\",\n\t\t\t\t\"permit-port-forwarding\",\n\t\t\t\t\"permit-pty\",\n\t\t\t\t\"permit-user-rc\",\n\t\t\t\t\"no-x11-forwarding\",\n\t\t\t},\n\t\t\tProvisioningUser: \"\",\n\t\t},\n\t}\n\treturn &sshrimp\n}", "title": "" }, { "docid": "f9c12be84bc0b47fcc5b2bc64679cb3a", "score": "0.4139924", "text": "func (client *Client) initDefaultSecurityGroup() error {\n\tsg, err := client.getDefaultSecurityGroup()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif sg != nil {\n\t\tclient.SecurityGroup = sg\n\t\treturn nil\n\t}\n\topts := secgroups.CreateOpts{\n\t\tName: defaultSecurityGroup,\n\t\tDescription: \"Default security group\",\n\t}\n\n\tgroup, err := secgroups.Create(client.Compute, opts).Extract()\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = client.createTCPRules(group.ID)\n\tif err != nil {\n\t\tsecgroups.Delete(client.Compute, group.ID)\n\t\treturn err\n\t}\n\n\terr = client.createUDPRules(group.ID)\n\tif err != nil {\n\t\tsecgroups.Delete(client.Compute, group.ID)\n\t\treturn err\n\t}\n\terr = client.createICMPRules(group.ID)\n\tif err != nil {\n\t\tsecgroups.Delete(client.Compute, group.ID)\n\t\treturn err\n\t}\n\tclient.SecurityGroup = group\n\treturn nil\n}", "title": "" }, { "docid": "1b302df50aadc9d350faa1fee310f7e7", "score": "0.41357544", "text": "func getDefaultHostConfig(t *testing.T, cli ce.ContainerInterface) *ce.ContainerHostConfig {\n\thostConfig := ce.ContainerHostConfig{\n\t\tPortBindings: []ce.PortBinding{},\n\t\tCapDrop: []string{\n\t\t\t\"ALL\",\n\t\t},\n\t\tPrivileged: false,\n\t}\n\tif coverage() {\n\t\thostConfig.Binds = append(hostConfig.Binds, coverageBind(t))\n\t}\n\tif devImage(t, cli) {\n\t\t// Only needed for a RHEL-based image\n\t\tif baseImage(t, cli) != \"ubuntu\" {\n\t\t\thostConfig.CapAdd = append(hostConfig.CapAdd, \"DAC_OVERRIDE\")\n\t\t}\n\t} else {\n\t\tt.Logf(\"Detected MQ Advanced image - dropping all capabilities\")\n\t}\n\treturn &hostConfig\n}", "title": "" }, { "docid": "ef72390487e846f79935577e1c108157", "score": "0.41346303", "text": "func NewPostWifiNetworkIDGatewaysDefault(code int) *PostWifiNetworkIDGatewaysDefault {\n\treturn &PostWifiNetworkIDGatewaysDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "e180c88e33681300068de7acda994b20", "score": "0.4131691", "text": "func createHnsNetwork(hnsNetwork *hcsshim.HNSNetwork) error {\n\t// Marshal the request.\n\tbuffer, err := json.Marshal(hnsNetwork)\n\tif err != nil {\n\t\treturn err\n\t}\n\thnsRequest := string(buffer)\n\n\t// Create the HNS network.\n\tlogger.Printf(\"[Azure CNS] HNSNetworkRequest POST request:%+v\", hnsRequest)\n\thnsResponse, err := hcsshim.HNSNetworkRequest(\"POST\", \"\", hnsRequest)\n\tlogger.Printf(\"[Azure CNS] HNSNetworkRequest POST response:%+v err:%v.\", hnsResponse, err)\n\n\treturn err\n}", "title": "" }, { "docid": "f8ccfd4e072b3acab314f8447deadaff", "score": "0.41300854", "text": "func NewDefaultChain(client client.Client) DefaultChain {\n\treturn NewChain[\n\t\ttypes.MultiAddress,\n\t\ttypes.MultiSignature,\n\t\tDefaultPaymentFields,\n\t\t*DefaultGenericSignedBlock,\n\t](client)\n}", "title": "" }, { "docid": "78bcbf00577bcb9e048a873e3a32aba3", "score": "0.412766", "text": "func (o *PcloudVpnconnectionsPutParams) SetDefaults() {\n\t// no default values defined for this parameter\n}", "title": "" }, { "docid": "e4343f7d2b839b3d442fd5da39e2fc09", "score": "0.41207293", "text": "func (s *ConfigParserSuite) TestNewWithDefaults(c *C) {\n\tn := make(configparser.Dict)\n\tn[\"testing\"] = \"value\"\n\n\tp, err := configparser.NewWithDefaults(n)\n\tc.Assert(err, IsNil)\n\n\td := p.Defaults()\n\tc.Assert(d[\"testing\"], Equals, \"value\")\n}", "title": "" }, { "docid": "6bb86275e151aeaef8a75985e4ac1e48", "score": "0.4118305", "text": "func NewDefault(d *Properties) *Properties {\n\treturn &Properties{make(map[string]string), d}\n}", "title": "" }, { "docid": "abd3df34f428d22509871a1bc83ed247", "score": "0.4114563", "text": "func (a *App) AddDefaults(d []cli.Flag, c []ConfByCategories) {\n\ta.defaults.flags = append(a.defaults.flags, d...)\n\ta.defaults.confs = append(a.defaults.confs, c...)\n}", "title": "" }, { "docid": "972d980d69b2912196b59da926dc7ebb", "score": "0.41129234", "text": "func NewInput(data []byte) *Input {\n\treturn &Input{\n\t\tdata: data,\n\t\tbits: 8,\n\t}\n}", "title": "" }, { "docid": "4b9e3581d34d06e82566b7717c2f4ff8", "score": "0.4108938", "text": "func CreateConfigNetworkRequest() (request *ConfigNetworkRequest) {\n\trequest = &ConfigNetworkRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"hsm\", \"2018-01-11\", \"ConfigNetwork\", \"hsm\", \"openAPI\")\n\treturn\n}", "title": "" }, { "docid": "7985b33e328b3037e9578504cf8c1eb7", "score": "0.41072258", "text": "func NewDefaultCLIConfig() *CLIConfig {\n\treturn &CLIConfig{\n\t\tName: \"Dummy\",\n\t\tClientAddr: \"127.0.0.1:1339\",\n\t\tProxyAddr: \"127.0.0.1:1338\",\n\t\tLogLevel: \"debug\",\n\t}\n}", "title": "" }, { "docid": "8490eedb00b2ee4cfb46e13c43cc6ca7", "score": "0.4095914", "text": "func NewRequestWithDefaults() *Request {\n\tthis := Request{}\n\treturn &this\n}", "title": "" }, { "docid": "433cf74df5418998fdee593baf39747e", "score": "0.4093609", "text": "func (p *Peer) addInputChannel(label string) error {\n\tch, err := p.conn.CreateDataChannel(label, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tch.OnOpen(func() {\n\t\tp.log.Debug().Str(\"label\", ch.Label()).Uint16(\"id\", *ch.ID()).Msg(\"Data channel [input] opened\")\n\t})\n\tch.OnError(p.logx)\n\tch.OnMessage(func(mess webrtc.DataChannelMessage) {\n\t\tif len(mess.Data) == 0 {\n\t\t\treturn\n\t\t}\n\t\t// echo string messages (e.g. ping/pong)\n\t\tif mess.IsString {\n\t\t\tp.logx(ch.Send(mess.Data))\n\t\t\treturn\n\t\t}\n\t\tif p.OnMessage != nil {\n\t\t\tp.OnMessage(mess.Data)\n\t\t}\n\t})\n\tp.dTrack = ch\n\tch.OnClose(func() { p.log.Debug().Msg(\"Data channel [input] has been closed\") })\n\treturn nil\n}", "title": "" }, { "docid": "67d55518141e055f34dbf1a7b7b9cc9d", "score": "0.40921405", "text": "func NewInput() *CellInput {\n\treturn &CellInput{\n\t\tpnames: make([]string, 0, maxParam),\n\t\tpvalues: make([]string, 0, maxParam),\n\t\tdata: make(map[interface{}]interface{}),\n\t}\n}", "title": "" }, { "docid": "67d55518141e055f34dbf1a7b7b9cc9d", "score": "0.40921405", "text": "func NewInput() *CellInput {\n\treturn &CellInput{\n\t\tpnames: make([]string, 0, maxParam),\n\t\tpvalues: make([]string, 0, maxParam),\n\t\tdata: make(map[interface{}]interface{}),\n\t}\n}", "title": "" }, { "docid": "c8ac613c1d7b82f69cafc3ae9c53e6b6", "score": "0.4082348", "text": "func NewSubscribeWithDefaults() *Subscribe {\n\tthis := Subscribe{}\n\treturn &this\n}", "title": "" }, { "docid": "762e2f319aea2dbd468598851863390a", "score": "0.40817264", "text": "func NewAddTokenSigningCertificatePostRequestBody()(*AddTokenSigningCertificatePostRequestBody) {\n m := &AddTokenSigningCertificatePostRequestBody{\n }\n m.SetAdditionalData(make(map[string]interface{}));\n return m\n}", "title": "" }, { "docid": "2fe66c8095a22417042cdc415fe26168", "score": "0.4079762", "text": "func DefaultParams() Params {\n\treturn Params{\n\t\tMinNameLength: 5,\n\t\tMaxNameLength: 25,\n\t\tMinIDLength: 3,\n\t\tMaxIDLength: 15,\n\t\tMaxDescriptionLength: 140,\n\t\tCommunityAdmins: []sdk.AccAddress{},\n\t}\n}", "title": "" }, { "docid": "217af5104203c0483b7f1c830f18df24", "score": "0.40770125", "text": "func (ev *SentGenEnv) AddInput(sidx int, role string) {\n\twrd := ev.TransWord(ev.CurSent[sidx])\n\tfil := ev.Rules.States[role]\n\tev.SentInputs = append(ev.SentInputs, []string{wrd, role, fil})\n}", "title": "" }, { "docid": "df7ac5d83fcf733dec7b18ddc210b3c6", "score": "0.40763843", "text": "func (gt *GoTezos) AddNewClient(client *TezosRPCClient) {\n\n\tgt.clientLock.Lock()\n\tgt.RPCClients = append(gt.RPCClients, &TezClientWrapper{true, client})\n\tgt.clientLock.Unlock()\n\n\tvar err error\n\tgt.Constants, err = gt.GetNetworkConstants()\n\tif err != nil {\n\t\tfmt.Println(\"Could not get network constants, library will fail. Exiting .... \")\n\t\tos.Exit(0)\n\t}\n\n\tgt.Versions, err = gt.GetNetworkVersions()\n\tif err != nil {\n\t\tfmt.Println(\"Could not get network version, library will fail. Exiting .... \")\n\t\tos.Exit(0)\n\t}\n}", "title": "" }, { "docid": "9f2dfe0da44ad602a699cd3a5e33c544", "score": "0.4072954", "text": "func (nm *networkManager) newNetworkImpl(nwInfo *NetworkInfo, extIf *externalInterface) (*network, error) {\n\t// Connect the external interface.\n\tvar vlanid int\n\topt, _ := nwInfo.Options[genericData].(map[string]interface{})\n\tlog.Printf(\"opt %+v options %+v\", opt, nwInfo.Options)\n\n\tswitch nwInfo.Mode {\n\tcase opModeTunnel:\n\t\tfallthrough\n\tcase opModeBridge:\n\t\tlog.Printf(\"create bridge\")\n\t\tif err := nm.connectExternalInterface(extIf, nwInfo); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif opt != nil && opt[VlanIDKey] != nil {\n\t\t\tvlanid, _ = strconv.Atoi(opt[VlanIDKey].(string))\n\t\t}\n\n\tdefault:\n\t\treturn nil, errNetworkModeInvalid\n\t}\n\n\t// Create the network object.\n\tnw := &network{\n\t\tId: nwInfo.Id,\n\t\tMode: nwInfo.Mode,\n\t\tEndpoints: make(map[string]*endpoint),\n\t\textIf: extIf,\n\t\tVlanId: vlanid,\n\t\tEnableSnatOnHost: nwInfo.EnableSnatOnHost,\n\t}\n\n\treturn nw, nil\n}", "title": "" }, { "docid": "f936abe2dda6509bb5c557e301d79e52", "score": "0.4072387", "text": "func (g *GeneratorOptions) addDefaults() {\n\tif (g.RequestTimeout == 0) {\n\t\tg.RequestTimeout = duration\n\t}\n\n\tif (g.QPS == 0) {\n\t\tg.QPS = qps\n\t}\n}", "title": "" }, { "docid": "5a69f2cf07ae10a733df8952093da104", "score": "0.40718243", "text": "func NewInput() *Input {\n\treturn (*Input)(allocStructNkInputMemory(1))\n}", "title": "" }, { "docid": "04ce638a74b2322e621dcb35b7ff3e5f", "score": "0.40673026", "text": "func (o *GetHostStorageParams) SetDefaults() {\n\t// no default values defined for this parameter\n}", "title": "" }, { "docid": "dfb231432cece1510f42c3d54cc66bd9", "score": "0.4065308", "text": "func (m *DBRepo) AddDefaultData(td *TemplateData, r *http.Request, w http.ResponseWriter) *TemplateData {\n\tif td == nil {\n\t\ttd = &TemplateData{}\n\t}\n\ttd.CSRFToken = nosurf.Token(r)\n\ttd.Flash = m.App.Session.PopString(r.Context(), \"flash\")\n\ttd.Warning = m.App.Session.PopString(r.Context(), \"warning\")\n\ttd.Error = m.App.Session.PopString(r.Context(), \"error\")\n\n\treturn td\n}", "title": "" }, { "docid": "b1531824f77a47c232b8d1316784d4e2", "score": "0.40617988", "text": "func NewLoginRequestWithDefaults() *LoginRequest {\n\tthis := LoginRequest{}\n\treturn &this\n}", "title": "" }, { "docid": "9fa3833892a42ae7fe5d625e28c79f6d", "score": "0.40587795", "text": "func (s stack) HasDefaultNetwork(context.Context) (bool, fail.Error) {\n\tif valid.IsNil(s) {\n\t\treturn false, fail.InvalidInstanceError()\n\t}\n\treturn s.vpc != nil, nil\n}", "title": "" }, { "docid": "22ce29ba93b7ad03018d2d8d8794842e", "score": "0.40569723", "text": "func Add(filePath string) {\n\t// get message printer\n\tmsgPrinter := i18n.GetMessagePrinter()\n\n\tvar inputs []policy.UserInput\n\tinputString := cliconfig.ReadJsonFileWithLocalConfig(filePath)\n\n\terr := json.Unmarshal([]byte(inputString), &inputs)\n\tif err != nil {\n\t\tcliutils.Fatal(cliutils.JSON_PARSING_ERROR, msgPrinter.Sprintf(\"Error unmarshaling userInput json file: %v\", err))\n\t}\n\n\tcliutils.HorizonPutPost(http.MethodPost, \"node/userinput\", []int{200, 201}, inputs, true)\n\tmsgPrinter.Printf(\"Horizon node user inputs updated.\")\n\tmsgPrinter.Println()\n}", "title": "" } ]
1ea7ddb47ea7914fb967dfd9988c0224
Validate inspects the fields of the type to determine if they are valid.
[ { "docid": "b2fe56ba300127d41e0804fa330aff6c", "score": "0.0", "text": "func (s *CreateHsmInput) Validate() error {\n\tinvalidParams := aws.ErrInvalidParams{Context: \"CreateHsmInput\"}\n\n\tif s.AvailabilityZone == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"AvailabilityZone\"))\n\t}\n\n\tif s.ClusterId == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"ClusterId\"))\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" } ]
[ { "docid": "0951b64e407b64ac2da556bcb0c4eeaf", "score": "0.66930544", "text": "func (n *Field) Validate() error {\n\treturn anyErr(\n\t\tcheckNotNilAndValidate(n, protoutil.OneOf(n.Struct), \"struct\"),\n\t\tcheckNotEmptyString(n, n.Name, \"name\"),\n\t)\n}", "title": "" }, { "docid": "5841caafa6503a8ac8e3b56b26451d2a", "score": "0.658739", "text": "func (d *FieldData) Validate() error {\n\tvar result *multierror.Error\n\n\t// Scan for missing required fields\n\tfor field, schema := range d.Schema {\n\t\tif schema.Required {\n\t\t\t_, ok := d.Raw[field]\n\t\t\tif !ok {\n\t\t\t\tresult = multierror.Append(result, fmt.Errorf(\n\t\t\t\t\t\"field %q is required\", field))\n\t\t\t}\n\t\t}\n\t}\n\n\t// Validate field type and value\n\tfor field, value := range d.Raw {\n\t\tschema, ok := d.Schema[field]\n\t\tif !ok {\n\t\t\tresult = multierror.Append(result, fmt.Errorf(\n\t\t\t\t\"%q is an invalid field\", field))\n\t\t\tcontinue\n\t\t}\n\n\t\tswitch schema.Type {\n\t\tcase TypeBool, TypeInt, TypeMap, TypeArray, TypeString:\n\t\t\tval, _, err := d.getPrimitive(field, schema)\n\t\t\tif err != nil {\n\t\t\t\tresult = multierror.Append(result, fmt.Errorf(\n\t\t\t\t\t\"field %q with input %q doesn't seem to be of type %s\",\n\t\t\t\t\tfield, value, schema.Type))\n\t\t\t}\n\t\t\t// Check that we don't have an empty value for required fields\n\t\t\tif schema.Required && val == schema.Type.Zero() {\n\t\t\t\tresult = multierror.Append(result, fmt.Errorf(\n\t\t\t\t\t\"field %q is required, but no value was found\", field))\n\t\t\t}\n\t\tdefault:\n\t\t\tresult = multierror.Append(result, fmt.Errorf(\n\t\t\t\t\"unknown field type %s for field %s\", schema.Type, field))\n\t\t}\n\t}\n\n\treturn result.ErrorOrNil()\n}", "title": "" }, { "docid": "2aeb83f6e588cbd86993cbf65e548ef6", "score": "0.6291441", "text": "func Validate(inobj interface{}) error {\n\tv := reflect.ValueOf(inobj)\n\tt := reflect.TypeOf(inobj)\n\n\tfor i := 0; i < v.NumField(); i++ {\n\t\tfield := v.Field(i)\n\n\t\ttag := t.Field(i).Tag\n\n\t\treq := tag.Get(\"require\")\n\t\tallow := tag.Get(\"allowVals\")\n\t\tgoodchars := tag.Get(\"goodChars\")\n\t\tbadchars := tag.Get(\"badChars\")\n\t\tmatch := tag.Get(\"match\")\n\n\t\tdoIt := len(req) > 0 || len(allow) > 0 || len(goodchars) > 0 || len(badchars) > 0 || len(match) > 0\n\n\t\tif field.CanInterface() && doIt {\n\t\t\tswitch field.Interface().(type) {\n\t\t\tcase string:\n\t\t\t\tif field.Len() > 0 {\n\t\t\t\t\tvalid, str := IsValid(field.String(), allow, goodchars, badchars, match)\n\t\t\t\t\tif !valid {\n\t\t\t\t\t\treturn fmt.Errorf(\"Field %v: %v\", t.Field(i).Name, str)\n\t\t\t\t\t}\n\t\t\t\t} else if len(req) > 0 {\n\t\t\t\t\treturn fmt.Errorf(\"Field %v: Missing required field\", t.Field(i).Name)\n\t\t\t\t}\n\t\t\tcase []string:\n\t\t\t\tif field.Len() > 0 {\n\t\t\t\t\tlist, _ := field.Interface().([]string)\n\t\t\t\t\tfor _, str := range list {\n\t\t\t\t\t\tvalid, str := IsValid(str, allow, goodchars, badchars, match)\n\t\t\t\t\t\tif !valid {\n\t\t\t\t\t\t\treturn fmt.Errorf(\"Field %v: %v\", t.Field(i).Name, str)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if len(req) > 0 {\n\t\t\t\t\treturn fmt.Errorf(\"Missing required field %v\", t.Field(i).Name)\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "0bf4eecf0d980186a19d1aed6beb9d96", "score": "0.6229557", "text": "func Validate(schema Schema) Errors {\n\tvar errs Errors\n\tfor field, v := range schema {\n\t\tif err := v.Validate(field); err != nil {\n\t\t\terrs.Extend(err)\n\t\t}\n\t}\n\treturn errs\n}", "title": "" }, { "docid": "565f4f5ceaa950aa628b322afb30d13b", "score": "0.6215323", "text": "func Validate(contentType string, fieldsDef map[string]definition.FieldDef, inputs InputMap, checkAllRequired bool) (bool, ValidationResult) {\n\t//todo: check max length\n\t//todo: check all kind of validation\n\tresult := ValidationResult{Fields: map[string]string{}}\n\n\t//check required\n\tfor identifier, fieldDef := range fieldsDef {\n\t\tinput, fieldExists := inputs[identifier]\n\t\tfieldResult := \"\"\n\t\t//validat both required and others together.\n\t\tif fieldExists {\n\t\t\tisEmpty := fieldtype.IsEmptyInput(input)\n\t\t\tif fieldDef.Required && isEmpty {\n\t\t\t\tfieldResult = \"1\"\n\t\t\t} else {\n\t\t\t\tfieldtypeDef := fieldtype.GetFieldtype(fieldDef.FieldType)\n\t\t\t\thandler := fieldtypeDef.NewHandler(fieldDef)\n\t\t\t\tif handler != nil {\n\t\t\t\t\t_, err := handler.LoadInput(input, \"\")\n\t\t\t\t\tif _, ok := err.(fieldtype.ValidationError); ok {\n\t\t\t\t\t\tfieldResult = err.Error()\n\t\t\t\t\t}\n\t\t\t\t\tif _, ok := err.(fieldtype.EmptyError); ok {\n\t\t\t\t\t\tfieldResult = \"1\"\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} else if fieldDef.Required && checkAllRequired {\n\t\t\tfieldResult = \"1\"\n\t\t}\n\t\tif fieldResult != \"\" {\n\t\t\tresult.Fields[identifier] = fieldResult\n\t\t}\n\t}\n\n\treturn result.Passed(), result\n}", "title": "" }, { "docid": "251ca9df52e1b026c7835a5ecebc2045", "score": "0.6208048", "text": "func (model SolvedHCN) ValidateFields(structFields ...[]string) (bool, error) {\n\tif len(structFields) == 0 {\n\t\treturn helper.ValidateFields(model)\n\t}\n\treturn helper.ValidateFields(model, structFields[0])\n}", "title": "" }, { "docid": "ed95ba79ebd1568363ef8b2554653166", "score": "0.62034243", "text": "func (field *BaseField) Validate() error {\n if field.Required() && len(field.Values()) == 0 {\n field.AddError(errors.New(\"Field is required\"))\n }\n\n for _, validator := range field.Validators() {\n for _, value := range field.Values() {\n err := validator(value)\n if err != nil {\n field.AddError(err)\n }\n }\n }\n\n return field.Errors()\n}", "title": "" }, { "docid": "7c624b6604dd47cfe58a6365a7680f18", "score": "0.6148087", "text": "func (m *Field) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateOption(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateType(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "961655ceca210033cdaffb8af367be9b", "score": "0.6146007", "text": "func (m *FieldMetadata) Validate() error {\n\treturn m.validate(false)\n}", "title": "" }, { "docid": "a45be347ea0855dfb66cae3209735575", "score": "0.61411595", "text": "func (m *FieldDetailsForTestResults) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "title": "" }, { "docid": "d00714f4a3c899022cdceab26ef4a348", "score": "0.60702914", "text": "func (t *Table) Validate() error {\n\tm, n := t.Dims()\n\tif len(t.types) != n || len(t.body) != m*n {\n\t\treturn errors.New(errDims)\n\t}\n\n\tfor i := 0; i < m; i++ {\n\t\tfor k, j := i*n, 0; j < n; j++ {\n\t\t\tif Parse(t.body[k+j]) != t.types[j] {\n\t\t\t\treturn errors.New(errType)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "d5acc58a741dc638d798a80eea30e154", "score": "0.6041131", "text": "func Validate(s interface{}) (result bool, err error) {\n\tresult = true\n\tif s == nil {\n\t\treturn\n\t}\n\tval := reflect.ValueOf(s)\n\tif val.Kind() == reflect.Interface || val.Kind() == reflect.Ptr {\n\t\tval = val.Elem()\n\t}\n\tif val.Kind() != reflect.Struct {\n\t\tresult = false\n\t\terr = fmt.Errorf(\"function only accepts structs; got %s\", val.Kind())\n\t\treturn\n\t}\n\tvar errs Errors\n\tfor i := 0; i < val.NumField(); i++ {\n\t\tvalueField := val.Field(i)\n\t\ttypeField := val.Type().Field(i)\n\t\tif typeField.PkgPath != \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tresultField, err2 := typeCheck(valueField, typeField, val)\n\t\tif err2 != nil {\n\t\t\terrs = append(errs, err2)\n\t\t}\n\t\tresult = result && resultField\n\t}\n\tif len(errs) > 0 {\n\t\terr = errs\n\t}\n\treturn\n}", "title": "" }, { "docid": "5f0b0d13e0ce391d9b64128e74af9ae7", "score": "0.6037108", "text": "func (m *Field) Validate() error {\n\treturn m.validate(false)\n}", "title": "" }, { "docid": "e25f1bbbc150fb88f6c1822ffa2cae8e", "score": "0.60205156", "text": "func (i *isJSON) Validate(fields []string, errorMessages map[string]string) (error, []interface{}) {\n\n\tfield := getFirstKey(fields)\n\n\t// if blank, it is fine\n\tif len(field) == 0 {\n\t\treturn nil, nil\n\t}\n\n\tvar holder json.RawMessage\n\tif err := json.Unmarshal([]byte(field), &holder); err != nil { // parse json\n\t\treturn errors.New(errorMessages[\"json\"]), nil\n\t}\n\n\treturn nil, nil\n}", "title": "" }, { "docid": "ae4a20a6b63d68a16d3def96a9b9b1b2", "score": "0.6002921", "text": "func (form *Form) Validate() (ok bool) {\n ok = true\n if len(form.fields) == 0 {\n return\n }\n\n for _, field := range form.fields {\n if field.Validate() != nil {\n ok = false\n }\n }\n\n return\n}", "title": "" }, { "docid": "14671f2e12d54e34defb0d4e2e5edb9e", "score": "0.5933574", "text": "func (m *RunOnceFields) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Active\n\n\t// no validation rules for Completions\n\n\t// no validation rules for Failed\n\n\t// no validation rules for Succeeded\n\n\t// no validation rules for StartTime\n\n\t// no validation rules for CompletionTime\n\n\treturn nil\n}", "title": "" }, { "docid": "339eb664d31a4ab0d4ff72496a0b209c", "score": "0.5858464", "text": "func (m *HookType) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEvents(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateFields(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6fac892ad20a1311367b2a82c884da96", "score": "0.5829632", "text": "func (mv *Validator) Validate(v interface{}) error {\n\tsv := reflect.ValueOf(v)\n\tst := reflect.TypeOf(v)\n\tif sv.Kind() == reflect.Ptr && !sv.IsNil() {\n\t\treturn mv.Validate(sv.Elem().Interface())\n\t}\n\tif sv.Kind() != reflect.Struct {\n\t\treturn ErrUnsupported\n\t}\n\n\tnfields := sv.NumField()\n\tm := make(ErrorMap)\n\tfor i := 0; i < nfields; i++ {\n\t\tf := sv.Field(i)\n\t\t// deal with pointers\n\t\tfor f.Kind() == reflect.Ptr && !f.IsNil() {\n\t\t\tf = f.Elem()\n\t\t}\n\t\ttag := st.Field(i).Tag.Get(mv.tagName)\n\t\tif tag == \"-\" {\n\t\t\tcontinue\n\t\t}\n\t\tfname := st.Field(i).Name\n\t\tif readJSONTag {\n\t\t\ttag := st.Field(i).Tag.Get(\"json\")\n\t\t\tif tag == \"-\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif tag != \"\" && tag != \"omitempty\" {\n\t\t\t\tfname = tag\n\t\t\t}\n\t\t}\n\t\tvar errs ErrorArray\n\n\t\tif tag != \"\" {\n\t\t\terr := mv.Valid(f.Interface(), tag)\n\t\t\tif errors, ok := err.(ErrorArray); ok {\n\t\t\t\terrs = errors\n\t\t\t} else {\n\t\t\t\tif err != nil {\n\t\t\t\t\terrs = ErrorArray{err}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif f.Kind() == reflect.Struct {\n\t\t\tif !unicode.IsUpper(rune(fname[0])) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\te := mv.Validate(f.Interface())\n\t\t\tif e, ok := e.(ErrorMap); ok && len(e) > 0 {\n\t\t\t\tfor j, k := range e {\n\t\t\t\t\tm[fname+\".\"+j] = k\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif len(errs) > 0 {\n\t\t\tm[fname] = errs\n\t\t}\n\t}\n\tif len(m) > 0 {\n\t\treturn m\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "760eaa214b9600f3052593fa6af68095", "score": "0.58263904", "text": "func (v *Validation) Validate(i interface{}) ValidationErrors {\n\terr := v.validate.Struct(i)\n\tif err == nil {\n\t\treturn nil\n\t}\n\terrs := err.(validator.ValidationErrors)\n\tif len(errs) == 0 {\n\t\treturn nil\n\t}\n\tvar returnErrs []ValidationError\n\tfor _, err := range errs {\n\t\t//Cast FieldError into ValidationError\n\t\tve := ValidationError{err.(validator.FieldError)}\n\t\treturnErrs = append(returnErrs, ve)\n\t}\n\treturn returnErrs\n}", "title": "" }, { "docid": "bb9872ed807daa6d1ba5ea6a75fe5b94", "score": "0.57980424", "text": "func (invoice Invoice) Validate(validate FieldsToValidate) error {\n\tvar err error\n\n\tif validate.PaymentDeadline {\n\t\terr = validation.ValidateStruct(&invoice,\n\t\t\tvalidation.Field(\n\t\t\t\t&invoice.PaymentDeadline,\n\t\t\t\tvalidation.Required,\n\t\t\t\tvalidation.Min(time.Now()),\n\t\t\t),\n\t\t)\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif validate.TotalAmount {\n\t\terr = validation.ValidateStruct(&invoice,\n\t\t\tvalidation.Field(\n\t\t\t\t&invoice.TotalAmount,\n\t\t\t\tvalidation.Required,\n\t\t\t),\n\t\t)\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif validate.PublicServiceID {\n\t\terr = validation.ValidateStruct(&invoice,\n\t\t\tvalidation.Field(\n\t\t\t\t&invoice.PublicServiceID,\n\t\t\t\tvalidation.Required,\n\t\t\t),\n\t\t)\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "578ae9b854c773d88e90d1b039163c3f", "score": "0.57144946", "text": "func (ut *UserPayload) Validate() (err error) {\n\tif ut.Fullname == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"fullname\"))\n\t}\n\tif ut.Email == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"email\"))\n\t}\n\tif err2 := goa.ValidateFormat(goa.FormatEmail, ut.Email); err2 != nil {\n\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`type.email`, ut.Email, goa.FormatEmail, err2))\n\t}\n\tif ok := goa.ValidatePattern(`^([a-zA-Z0-9 ]{4,30})$`, ut.Fullname); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`type.fullname`, ut.Fullname, `^([a-zA-Z0-9 ]{4,30})$`))\n\t}\n\tif ut.Password != nil {\n\t\tif utf8.RuneCountInString(*ut.Password) < 6 {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`type.password`, *ut.Password, utf8.RuneCountInString(*ut.Password), 6, true))\n\t\t}\n\t}\n\tif ut.Password != nil {\n\t\tif utf8.RuneCountInString(*ut.Password) > 30 {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`type.password`, *ut.Password, utf8.RuneCountInString(*ut.Password), 30, false))\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "7962274f2e1fb540c29ce83bfd42a308", "score": "0.5703782", "text": "func (m *ScopeFields) Validate() error {\n\tif m.AddressID == nil {\n\t\treturn &validationError{\n\t\t\terrType: \"required\",\n\t\t\tmessage: \"field required\",\n\t\t\tpath: []interface{}{\"AddressID\"},\n\t\t\tjsonPath: []interface{}{\"address_id\"},\n\t\t}\n\t}\n\tif m.Options == nil {\n\t\treturn &validationError{\n\t\t\terrType: \"required\",\n\t\t\tmessage: \"field required\",\n\t\t\tpath: []interface{}{\"Options\"},\n\t\t\tjsonPath: []interface{}{\"options\"},\n\t\t}\n\t}\n\tif err := m.Options.Validate(); err != nil {\n\t\tif err, ok := err.(valErr); ok {\n\t\t\treturn &validationError{\n\t\t\t\terrType: err.ErrType(),\n\t\t\t\tmessage: err.Message(),\n\t\t\t\tpath: append([]interface{}{\"Options\"}, err.Path()...),\n\t\t\t\tjsonPath: append([]interface{}{\"options\"}, err.JSONPath()...),\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "27f4e00b5c02d6559d7071bd242f4d2f", "score": "0.57028854", "text": "func (addenda12 *Addenda12) Validate() error {\n\tif err := addenda12.fieldInclusion(); err != nil {\n\t\treturn err\n\t}\n\tif err := addenda12.isTypeCode(addenda12.TypeCode); err != nil {\n\t\treturn fieldError(\"TypeCode\", err, addenda12.TypeCode)\n\t}\n\t// Type Code must be 12\n\tif addenda12.TypeCode != \"12\" {\n\t\treturn fieldError(\"TypeCode\", ErrAddendaTypeCode, addenda12.TypeCode)\n\t}\n\tif err := addenda12.isAlphanumeric(addenda12.OriginatorCityStateProvince); err != nil {\n\t\treturn fieldError(\"OriginatorCityStateProvince\", err, addenda12.OriginatorCityStateProvince)\n\t}\n\tif err := addenda12.isAlphanumeric(addenda12.OriginatorCountryPostalCode); err != nil {\n\t\treturn fieldError(\"OriginatorCountryPostalCode\", err, addenda12.OriginatorCountryPostalCode)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a88f792c93ff3babdc18e341f35f874b", "score": "0.5700023", "text": "func (tv TypedValue) Validate() error {\n\tif errs := tv.walker().validate(); len(errs) != 0 {\n\t\treturn errs\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "aadcd049d968b652dd6c12788c989a08", "score": "0.5699571", "text": "func (u *UserDTO) Validate() error {\n\treturn validation.ValidateStruct(\n\t\tu,\n\t\tvalidation.Field(&u.Login, validation.Length(6, 30), is.Alpha),\n\t\tvalidation.Field(&u.FirstName, validation.Length(2, 40), is.Alpha),\n\t\tvalidation.Field(&u.SurName, validation.Length(2, 40), is.Alpha),\n\t\tvalidation.Field(&u.Email, is.EmailFormat),\n\t)\n}", "title": "" }, { "docid": "2f167e562eb58522c8c75e2306df0f6c", "score": "0.5694968", "text": "func (c Customer) Validate() error {\n\treturn validation.ValidateStruct(&c,\n\t\tvalidation.Field(&c.FirstName, validation.Required),\n\t\tvalidation.Field(&c.LastName, validation.Required),\n\t\tvalidation.Field(&c.Email, validation.Required, is.Email),\n\t)\n}", "title": "" }, { "docid": "5e9bb3d1e0ad12f225fd4b63c55ff5a1", "score": "0.5694331", "text": "func (vehicleType *VehicleType) Validate() errors.APIError {\n\n\tif vehicleType.Name == \"\" {\n\t\treturn errors.ValidationError(\"Vehicle type should not be empty\", \"Validation Error\")\n\t}\n\t//All the required parameters are present\n\treturn nil\n}", "title": "" }, { "docid": "953d049f67c4e2def3e235b9830f4f61", "score": "0.56919277", "text": "func (d *debug) Validate() error {\n\treturn nil\n}", "title": "" }, { "docid": "0f63caa014a4f0bcc1cbc823508d85c3", "score": "0.5644906", "text": "func (s *CreateAutoPredictorInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateAutoPredictorInput\"}\n\tif s.ForecastDimensions != nil && len(s.ForecastDimensions) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ForecastDimensions\", 1))\n\t}\n\tif s.ForecastFrequency != nil && len(*s.ForecastFrequency) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ForecastFrequency\", 1))\n\t}\n\tif s.ForecastTypes != nil && len(s.ForecastTypes) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ForecastTypes\", 1))\n\t}\n\tif s.PredictorName == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"PredictorName\"))\n\t}\n\tif s.PredictorName != nil && len(*s.PredictorName) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"PredictorName\", 1))\n\t}\n\tif s.DataConfig != nil {\n\t\tif err := s.DataConfig.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"DataConfig\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.EncryptionConfig != nil {\n\t\tif err := s.EncryptionConfig.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"EncryptionConfig\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.MonitorConfig != nil {\n\t\tif err := s.MonitorConfig.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"MonitorConfig\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.TimeAlignmentBoundary != nil {\n\t\tif err := s.TimeAlignmentBoundary.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"TimeAlignmentBoundary\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "342e392efcf7b44d806697ec343e1cc2", "score": "0.564374", "text": "func (r *PackedTrailerRecord) Validate() error {\n\tfields := reflect.ValueOf(r).Elem()\n\tfor i := 0; i < fields.NumField(); i++ {\n\t\tfieldName := fields.Type().Field(i).Name\n\t\tif !fields.IsValid() {\n\t\t\treturn utils.ErrValidField\n\t\t}\n\n\t\tif spec, ok := trailerRecordPackedFormat[fieldName]; ok {\n\t\t\tif spec.Required == required {\n\t\t\t\tfieldValue := fields.FieldByName(fieldName)\n\t\t\t\tif fieldValue.IsZero() {\n\t\t\t\t\treturn utils.ErrFieldRequired\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "183fec269da39962c20447a35c690393", "score": "0.5610163", "text": "func (f *Field) Validate() error {\n\treturn f.f(f.name, f.src)\n}", "title": "" }, { "docid": "b9b8072dce4b918f477faa2e808129fc", "score": "0.56082475", "text": "func (øField *Field) Validate() (errs []error) {\n\tif len(øField.enum) > 0 {\n\t\tvar valid bool\n\t\tvar val = øField.Get()\n\t\tfor _, en := range øField.enum {\n\t\t\tif val == en.Get() {\n\t\t\t\tvalid = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !valid {\n\t\t\terrs = append(errs, fmt.Errorf(\"value not part of enum values\"))\n\t\t}\n\t}\n\tif øField.Validator != nil {\n\t\terrs = append(errs, øField.Validator(øField)...)\n\t}\n\treturn\n}", "title": "" }, { "docid": "ac4a3c449065e7143567cc6716c321cc", "score": "0.5604227", "text": "func (ts TypeSpec) Validate(ps PipelineSpec) error {\n\tif err := ValidateName(ts.Name); err != nil {\n\t\treturn maskAny(err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "db0aa701f4c01d3469e013ffb3a1c925", "score": "0.560085", "text": "func (m *Instance) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Environment\n\n\t// no validation rules for NumOfEquipments\n\n\t// no validation rules for NumOfProducts\n\n\treturn nil\n}", "title": "" }, { "docid": "47934eb773fcc095b0d4fd41041085e3", "score": "0.5598898", "text": "func (m *ReservationFields) Validate() error {\n\tif m.Options == nil {\n\t\treturn &validationError{\n\t\t\terrType: \"required\",\n\t\t\tmessage: \"field required\",\n\t\t\tpath: []interface{}{\"Options\"},\n\t\t\tjsonPath: []interface{}{\"options\"},\n\t\t}\n\t}\n\tif m.Mac != nil && !reservationFieldsMacPattern.MatchString(*m.Mac) {\n\t\treturn &validationError{\n\t\t\terrType: \"pattern\",\n\t\t\tpath: []interface{}{\"Mac\"},\n\t\t\tjsonPath: []interface{}{\"mac\"},\n\t\t\tmessage: fmt.Sprintf(`must match '^([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}$' but got %q`, *m.Mac),\n\t\t}\n\t}\n\tif err := m.Options.Validate(); err != nil {\n\t\tif err, ok := err.(valErr); ok {\n\t\t\treturn &validationError{\n\t\t\t\terrType: err.ErrType(),\n\t\t\t\tmessage: err.Message(),\n\t\t\t\tpath: append([]interface{}{\"Options\"}, err.Path()...),\n\t\t\t\tjsonPath: append([]interface{}{\"options\"}, err.JSONPath()...),\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "de5ce3aeecec18a1e1f8bd7056216032", "score": "0.5595457", "text": "func (cv *CheckValidator) Validate(i interface{}) error {\n\treturn cv.validator.Struct(i)\n}", "title": "" }, { "docid": "33b16cccadf4206a81ae8d1c1a6d3923", "score": "0.5592943", "text": "func (s *PutModelInput) Validate() error {\n\tinvalidParams := aws.ErrInvalidParams{Context: \"PutModelInput\"}\n\tif s.Description != nil && len(*s.Description) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"Description\", 1))\n\t}\n\n\tif s.LabelSchema == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"LabelSchema\"))\n\t}\n\n\tif s.ModelId == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"ModelId\"))\n\t}\n\tif s.ModelId != nil && len(*s.ModelId) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"ModelId\", 1))\n\t}\n\tif len(s.ModelType) == 0 {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"ModelType\"))\n\t}\n\n\tif s.ModelVariables == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"ModelVariables\"))\n\t}\n\n\tif s.TrainingDataSource == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"TrainingDataSource\"))\n\t}\n\tif s.LabelSchema != nil {\n\t\tif err := s.LabelSchema.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"LabelSchema\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.ModelVariables != nil {\n\t\tfor i, v := range s.ModelVariables {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"ModelVariables\", i), err.(aws.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.TrainingDataSource != nil {\n\t\tif err := s.TrainingDataSource.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"TrainingDataSource\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "40379179389b4418bd9aeb56e3f2b540", "score": "0.55750746", "text": "func (u User) Validate() error {\n\treturn validation.ValidateStruct(&u,\n\t\tvalidation.Field(&u.Name, validation.Required),\n\t\tvalidation.Field(&u.Created, validation.Required))\n}", "title": "" }, { "docid": "51013329040d4ccfb74eae6b7dbd3027", "score": "0.55638736", "text": "func (r *TrailerRecord) Validate() error {\n\tfields := reflect.ValueOf(r).Elem()\n\tfor i := 0; i < fields.NumField(); i++ {\n\t\tfieldName := fields.Type().Field(i).Name\n\t\tif !fields.IsValid() {\n\t\t\treturn utils.ErrValidField\n\t\t}\n\n\t\tif spec, ok := trailerRecordCharacterFormat[fieldName]; ok {\n\t\t\tif spec.Required == required {\n\t\t\t\tfieldValue := fields.FieldByName(fieldName)\n\t\t\t\tif fieldValue.IsZero() {\n\t\t\t\t\treturn utils.ErrFieldRequired\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "ef6a8dc95f47b20a6f6dd6afc4c1f3ca", "score": "0.5563677", "text": "func (object Author) Validate() error {\n\t//\terrs := validate.Struct(object)\n\t//\tif errs != nil {\n\t//\t\treturn fmt.Errorf(\"%v\", errs)\n\t//\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "0b3f630a3d8e765db814da5366912b46", "score": "0.55625", "text": "func Validate(v map[string]ValidPair) (bool, error) {\n\tvalid := true\n\tvar err ValidationError\n\tfor field := range v {\n\t\tswitch {\n\t\tcase strings.Contains(v[field].tag, \"req\") && v[field].value == \"\":\n\t\t\terr.s += fmt.Sprintf(\"%s: value is required; \", field)\n\t\t\tvalid = false\n\t\tcase strings.Contains(v[field].tag, \"alpha\") && !isAlpha(v[field].value):\n\t\t\terr.s += fmt.Sprintf(\"%s: value can only contain letters; \", field)\n\t\t\tvalid = false\n\t\tcase strings.Contains(v[field].tag, \"alph-num\") && !isAlphaNum(v[field].value):\n\t\t\terr.s += fmt.Sprintf(\"%s: value can only contain alphanumeric characters; \", field)\n\t\t\tvalid = false\n\t\tcase strings.Contains(v[field].tag, \"email\") && !isEmail(v[field].value):\n\t\t\terr.s += fmt.Sprintf(\"%s: value must be a valid email;\", field)\n\t\t\tvalid = false\n\t\tcase strings.Contains(v[field].tag, \"uuid\") && !isUUID(v[field].value):\n\t\t\terr.s += fmt.Sprintf(\"%s: value must be a valid UUID;\", field)\n\t\t\t//valid = false\n\t\t}\n\t}\n\treturn valid, err\n}", "title": "" }, { "docid": "28471f6855e8ae07c7476cb399ae1ae7", "score": "0.5550939", "text": "func (cv *CustomValidator) Validate(i interface{}) error {\n\treturn cv.Validator.Struct(i)\n}", "title": "" }, { "docid": "effc3eeddfd6bc9bfb05e1ccb2225543", "score": "0.55505615", "text": "func Validate(t interface{}) error {\n\treturn validator.Struct(t)\n}", "title": "" }, { "docid": "effc3eeddfd6bc9bfb05e1ccb2225543", "score": "0.55505615", "text": "func Validate(t interface{}) error {\n\treturn validator.Struct(t)\n}", "title": "" }, { "docid": "b950be935817cd41312af669f4edbacd", "score": "0.5549103", "text": "func validateBoundaryFields(schema *ast.Schema) error {\n\tboundaryTypes := make(map[string]struct{})\n\tfor _, t := range schema.Types {\n\t\tif t.Kind == ast.Object && isBoundaryObject(t) {\n\t\t\tboundaryTypes[t.Name] = struct{}{}\n\t\t}\n\t}\n\n\tfor _, f := range schema.Query.Fields {\n\t\tif hasBoundaryDirective(f) {\n\t\t\tif _, ok := boundaryTypes[f.Type.Name()]; !ok {\n\t\t\t\treturn fmt.Errorf(\"declared boundary query for non-boundary type %q\", f.Type.Name())\n\t\t\t}\n\n\t\t\tdelete(boundaryTypes, f.Type.Name())\n\t\t}\n\t}\n\n\tif len(boundaryTypes) > 0 {\n\t\tvar missingBoundaryQueries []string\n\t\tfor k := range boundaryTypes {\n\t\t\tmissingBoundaryQueries = append(missingBoundaryQueries, k)\n\t\t}\n\n\t\treturn fmt.Errorf(\"missing boundary queries for the following types: %v\", missingBoundaryQueries)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "d990da2928cc48d73492072c572f79c6", "score": "0.5541377", "text": "func (m AuthType) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\t// value enum\n\tif err := m.validateAuthTypeEnum(\"\", \"body\", m); err != nil {\n\t\treturn err\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e6a420bf8285845d560bd869ff211aef", "score": "0.5540443", "text": "func (s *BatchGetFieldInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"BatchGetFieldInput\"}\n\tif s.DomainId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"DomainId\"))\n\t}\n\tif s.DomainId != nil && len(*s.DomainId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"DomainId\", 1))\n\t}\n\tif s.Fields == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Fields\"))\n\t}\n\tif s.Fields != nil && len(s.Fields) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Fields\", 1))\n\t}\n\tif s.Fields != nil {\n\t\tfor i, v := range s.Fields {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Fields\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a27eafb14be9a6556b28c5120fadbbce", "score": "0.55397534", "text": "func (s *CreateClusterInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateClusterInput\"}\n\tif s.AddressId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"AddressId\"))\n\t}\n\tif s.AddressId != nil && len(*s.AddressId) < 40 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AddressId\", 40))\n\t}\n\tif s.Description != nil && len(*s.Description) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Description\", 1))\n\t}\n\tif s.ForwardingAddressId != nil && len(*s.ForwardingAddressId) < 40 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ForwardingAddressId\", 40))\n\t}\n\tif s.JobType == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"JobType\"))\n\t}\n\tif s.ShippingOption == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"ShippingOption\"))\n\t}\n\tif s.SnowballType == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"SnowballType\"))\n\t}\n\tif s.OnDeviceServiceConfiguration != nil {\n\t\tif err := s.OnDeviceServiceConfiguration.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"OnDeviceServiceConfiguration\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Resources != nil {\n\t\tif err := s.Resources.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"Resources\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.TaxDocuments != nil {\n\t\tif err := s.TaxDocuments.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"TaxDocuments\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2ea8c971adbe96c67f99593a9e29effe", "score": "0.5539723", "text": "func (s *CreateAnalysisInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateAnalysisInput\"}\n\tif s.AnalysisId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"AnalysisId\"))\n\t}\n\tif s.AnalysisId != nil && len(*s.AnalysisId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AnalysisId\", 1))\n\t}\n\tif s.AwsAccountId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"AwsAccountId\"))\n\t}\n\tif s.AwsAccountId != nil && len(*s.AwsAccountId) < 12 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AwsAccountId\", 12))\n\t}\n\tif s.Name == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Name\"))\n\t}\n\tif s.Name != nil && len(*s.Name) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Name\", 1))\n\t}\n\tif s.Permissions != nil && len(s.Permissions) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Permissions\", 1))\n\t}\n\tif s.SourceEntity == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"SourceEntity\"))\n\t}\n\tif s.Tags != nil && len(s.Tags) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Tags\", 1))\n\t}\n\tif s.Parameters != nil {\n\t\tif err := s.Parameters.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"Parameters\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Permissions != nil {\n\t\tfor i, v := range s.Permissions {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Permissions\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.SourceEntity != nil {\n\t\tif err := s.SourceEntity.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"SourceEntity\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "bbf7b9be6e0e7e39c1d9d7dd8157060f", "score": "0.5539706", "text": "func (ec *EventClass) Validate() error {\n\treturn validation.ValidateStruct(ec,\n\t\tvalidation.Field(&ec.TableName, validation.Required, validation.Length(1, 60)),\n\t\tvalidation.Field(&ec.Filter, validation.Required),\n\t\tvalidation.Field(&ec.FieldMappings, validation.Required, validation.Length(1, 0)),\n\t)\n}", "title": "" }, { "docid": "6e6c32ae9cf08117865a90534e13e057", "score": "0.5537795", "text": "func (mv *Validator) Validate(v interface{}) ErrorMap {\n\tvar (\n\t\tsv = reflect.ValueOf(v)\n\t\tst = reflect.TypeOf(v)\n\t\tm = make(ErrorMap)\n\t)\n\n\tif sv.Kind() == reflect.Ptr && !sv.IsNil() {\n\t\treturn mv.Validate(sv.Elem().Interface())\n\t}\n\tif sv.Kind() != reflect.Struct {\n\t\tm[\"_summary\"] = ErrUnsupported\n\t\treturn m\n\t}\n\n\tnfields := sv.NumField()\n\tfor i := 0; i < nfields; i++ {\n\t\tvar (\n\t\t\tf = sv.Field(i)\n\t\t\tfname = st.Field(i).Name\n\t\t\terrs ErrorArray\n\t\t)\n\n\t\t// deal with pointers\n\t\tfor f.Kind() == reflect.Ptr && !f.IsNil() {\n\t\t\tf = f.Elem()\n\t\t}\n\n\t\ttag := st.Field(i).Tag.Get(mv.tagName)\n\t\tif tag == \"-\" || (tag == \"\" && f.Kind() != reflect.Struct) {\n\t\t\tcontinue\n\t\t}\n\n\t\t// parse tags on the highest level to pass further\n\t\ttags, err := mv.parseTags(tag)\n\t\tif err != nil {\n\t\t\tm[fname] = err\n\t\t\tcontinue\n\t\t}\n\n\t\t// custom field alias\n\t\tif nameTag, exists := tags.getByName(tagAttr); exists {\n\t\t\tfname = nameTag.Param\n\t\t}\n\n\t\tswitch f.Kind() {\n\t\t// nested struct\n\t\tcase reflect.Struct:\n\t\t\tif !unicode.IsUpper(rune(fname[0])) {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\te := mv.Validate(f.Interface())\n\t\t\tfor j, k := range e {\n\t\t\t\t// Nested struct gets alias of parent struct\n\t\t\t\t// as a prefix\n\t\t\t\tm[fname+\".\"+j] = k\n\t\t\t}\n\n\t\t\t// flat struct\n\t\tdefault:\n\t\t\terr := mv.valid(f.Interface(), tags)\n\t\t\tif errors, ok := err.(ErrorArray); ok {\n\t\t\t\terrs = errors\n\t\t\t} else {\n\t\t\t\tif err != nil {\n\t\t\t\t\terrs = ErrorArray{err}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif len(errs) > 0 {\n\t\t\tm[fname] = errs[0]\n\t\t}\n\t}\n\n\treturn m\n}", "title": "" }, { "docid": "37baa1fedd7af0a022c1cc074d6d1a31", "score": "0.55372953", "text": "func (vwad visibilityWithAdditionalDetails) Validate() error {\n\tif vwad.Resource == nil {\n\t\treturn fmt.Errorf(\"resource in notification payload cannot be nil\")\n\t}\n\n\tif vwad.Resource.ID == \"\" {\n\t\treturn fmt.Errorf(\"visibility id cannot be empty\")\n\t}\n\n\tif vwad.Resource.ServicePlanID == \"\" {\n\t\treturn fmt.Errorf(\"visibility service plan id cannot be empty\")\n\t}\n\n\treturn vwad.Additional.Validate()\n}", "title": "" }, { "docid": "a5eb4ba27eff6b083ef9fba26a2a53e7", "score": "0.5537046", "text": "func (s *CustomSql) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CustomSql\"}\n\tif s.Columns != nil && len(s.Columns) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Columns\", 1))\n\t}\n\tif s.DataSourceArn == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"DataSourceArn\"))\n\t}\n\tif s.Name == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Name\"))\n\t}\n\tif s.Name != nil && len(*s.Name) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Name\", 1))\n\t}\n\tif s.SqlQuery == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"SqlQuery\"))\n\t}\n\tif s.SqlQuery != nil && len(*s.SqlQuery) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"SqlQuery\", 1))\n\t}\n\tif s.Columns != nil {\n\t\tfor i, v := range s.Columns {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Columns\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "eadb47f287dedc4b9ef75de9c0a08efd", "score": "0.5535036", "text": "func (m *Type2) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "7ecfee0ac9c27396b87b1b3ba54de7e3", "score": "0.55341464", "text": "func (s *MqttHeaders) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"MqttHeaders\"}\n\tif s.UserProperties != nil && len(s.UserProperties) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"UserProperties\", 1))\n\t}\n\tif s.UserProperties != nil {\n\t\tfor i, v := range s.UserProperties {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"UserProperties\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c63b6d9481fff29968c9c13be08b4ea4", "score": "0.5526698", "text": "func (s *SchemaValidator) Validate(data interface{}) *Result {\n\tresult := new(Result)\n\tif s == nil {\n\t\treturn result\n\t}\n\n\tif data == nil {\n\t\tresult.Merge(s.validators[0].Validate(data)) // type validator\n\t\tresult.Merge(s.validators[6].Validate(data)) // common validator\n\t\treturn result\n\t}\n\n\ttpe := reflect.TypeOf(data)\n\tkind := tpe.Kind()\n\tfor kind == reflect.Ptr {\n\t\ttpe = tpe.Elem()\n\t\tkind = tpe.Kind()\n\t}\n\td := data\n\n\tif kind == reflect.Struct {\n\t\t// NOTE: since reflect retrieves the true nature of types\n\t\t// this means that all strfmt types passed here (e.g. strfmt.Datetime, etc..)\n\t\t// are converted here to strings, and structs are systematically converted\n\t\t// to map[string]interface{}.\n\t\td = swag.ToDynamicJSON(data)\n\t}\n\n\t// TODO: this part should be handed over to type validator\n\t// Handle special case of json.Number data (number marshalled as string)\n\tisnumber := s.Schema.Type.Contains(numberType) || s.Schema.Type.Contains(integerType)\n\tif num, ok := data.(json.Number); ok && isnumber {\n\t\tif s.Schema.Type.Contains(integerType) { // avoid lossy conversion\n\t\t\tin, erri := num.Int64()\n\t\t\tif erri != nil {\n\t\t\t\tresult.AddErrors(invalidTypeConversionMsg(s.Path, erri))\n\t\t\t\tresult.Inc()\n\t\t\t\treturn result\n\t\t\t}\n\t\t\td = in\n\t\t} else {\n\t\t\tnf, errf := num.Float64()\n\t\t\tif errf != nil {\n\t\t\t\tresult.AddErrors(invalidTypeConversionMsg(s.Path, errf))\n\t\t\t\tresult.Inc()\n\t\t\t\treturn result\n\t\t\t}\n\t\t\td = nf\n\t\t}\n\n\t\ttpe = reflect.TypeOf(d)\n\t\tkind = tpe.Kind()\n\t}\n\n\tfor _, v := range s.validators {\n\t\tif !v.Applies(s.Schema, kind) {\n\t\t\tdebugLog(\"%T does not apply for %v\", v, kind)\n\t\t\tcontinue\n\t\t}\n\n\t\terr := v.Validate(d)\n\t\tresult.Merge(err)\n\t\tresult.Inc()\n\t}\n\tresult.Inc()\n\treturn result\n}", "title": "" }, { "docid": "0ede59be7a46e99b034c15d458eb1d5f", "score": "0.5522874", "text": "func (s *CreateTemplateInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateTemplateInput\"}\n\tif s.AwsAccountId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"AwsAccountId\"))\n\t}\n\tif s.AwsAccountId != nil && len(*s.AwsAccountId) < 12 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AwsAccountId\", 12))\n\t}\n\tif s.Name != nil && len(*s.Name) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Name\", 1))\n\t}\n\tif s.Permissions != nil && len(s.Permissions) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Permissions\", 1))\n\t}\n\tif s.SourceEntity == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"SourceEntity\"))\n\t}\n\tif s.Tags != nil && len(s.Tags) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Tags\", 1))\n\t}\n\tif s.TemplateId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"TemplateId\"))\n\t}\n\tif s.TemplateId != nil && len(*s.TemplateId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"TemplateId\", 1))\n\t}\n\tif s.VersionDescription != nil && len(*s.VersionDescription) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"VersionDescription\", 1))\n\t}\n\tif s.Permissions != nil {\n\t\tfor i, v := range s.Permissions {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Permissions\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.SourceEntity != nil {\n\t\tif err := s.SourceEntity.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"SourceEntity\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "47b2f6881311c5b6673e3e4ecde67671", "score": "0.55194396", "text": "func (s *PutEventTypeInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"PutEventTypeInput\"}\n\tif s.Description != nil && len(*s.Description) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Description\", 1))\n\t}\n\tif s.EntityTypes == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"EntityTypes\"))\n\t}\n\tif s.EntityTypes != nil && len(s.EntityTypes) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"EntityTypes\", 1))\n\t}\n\tif s.EventVariables == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"EventVariables\"))\n\t}\n\tif s.EventVariables != nil && len(s.EventVariables) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"EventVariables\", 1))\n\t}\n\tif s.Name == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Name\"))\n\t}\n\tif s.Name != nil && len(*s.Name) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Name\", 1))\n\t}\n\tif s.EventOrchestration != nil {\n\t\tif err := s.EventOrchestration.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"EventOrchestration\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5be4545fd7b280afe555b3464c3669c9", "score": "0.55160666", "text": "func (s *RegisterDomainInput) Validate() error {\n\tinvalidParams := aws.ErrInvalidParams{Context: \"RegisterDomainInput\"}\n\n\tif s.AdminContact == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"AdminContact\"))\n\t}\n\n\tif s.DomainName == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"DomainName\"))\n\t}\n\n\tif s.DurationInYears == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"DurationInYears\"))\n\t}\n\tif s.DurationInYears != nil && *s.DurationInYears < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinValue(\"DurationInYears\", 1))\n\t}\n\n\tif s.RegistrantContact == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"RegistrantContact\"))\n\t}\n\n\tif s.TechContact == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"TechContact\"))\n\t}\n\tif s.AdminContact != nil {\n\t\tif err := s.AdminContact.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"AdminContact\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.RegistrantContact != nil {\n\t\tif err := s.RegistrantContact.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"RegistrantContact\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.TechContact != nil {\n\t\tif err := s.TechContact.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"TechContact\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "011e1a63d91406c00ea17cbee08f3d39", "score": "0.5513748", "text": "func (gi GraphInfo) Validate() error {\n\tif gi.ID == \"\" {\n\t\treturn errors.New(\"id is empty. plz input\")\n\t}\n\tif gi.Name == \"\" {\n\t\treturn errors.New(\"name is empty. plz input\")\n\t}\n\tif gi.Unit == \"\" {\n\t\treturn errors.New(\"unit name is empty. plz input\")\n\t}\n\n\tswitch gi.UnitType {\n\tcase \"int\", \"float\":\n\tdefault:\n\t\treturn errors.New(\"invalid unit type. expected int or float\")\n\t}\n\n\tswitch gi.Color {\n\tcase \"shibafu\", \"momiji\", \"sora\", \"ichou\", \"ajisai\", \"kuro\":\n\tdefault:\n\t\treturn errors.New(\"invalid color\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "30d7d654cbfd30713ff0c17e0f2f15be", "score": "0.5512675", "text": "func (d *Definition) Validate() (bool, error) {\n\treturn govalidator.ValidateStruct(d)\n}", "title": "" }, { "docid": "79112a0e56e414d8eea9def2f5706ebf", "score": "0.55089056", "text": "func (s *PutExternalModelInput) Validate() error {\n\tinvalidParams := aws.ErrInvalidParams{Context: \"PutExternalModelInput\"}\n\tif s.EventTypeName != nil && len(*s.EventTypeName) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"EventTypeName\", 1))\n\t}\n\n\tif s.InputConfiguration == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"InputConfiguration\"))\n\t}\n\n\tif s.ModelEndpoint == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"ModelEndpoint\"))\n\t}\n\tif s.ModelEndpoint != nil && len(*s.ModelEndpoint) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"ModelEndpoint\", 1))\n\t}\n\tif len(s.ModelEndpointStatus) == 0 {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"ModelEndpointStatus\"))\n\t}\n\tif len(s.ModelSource) == 0 {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"ModelSource\"))\n\t}\n\n\tif s.OutputConfiguration == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"OutputConfiguration\"))\n\t}\n\n\tif s.Role == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"Role\"))\n\t}\n\tif s.InputConfiguration != nil {\n\t\tif err := s.InputConfiguration.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"InputConfiguration\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.OutputConfiguration != nil {\n\t\tif err := s.OutputConfiguration.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"OutputConfiguration\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Role != nil {\n\t\tif err := s.Role.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"Role\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(aws.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "33c5c078e7826f62b5957b7b04135cb6", "score": "0.5507335", "text": "func Validate(schema interface{}, errors []response.Error) {\n\tvalidate := validator.New()\n\n\tif err := validate.Struct(schema); err != nil {\n\t\tif _, ok := err.(*validator.InvalidValidationError); ok {\n\t\t\terrors = append(errors, response.Error{\n\t\t\t\tMessage: err.Error(),\n\t\t\t\tFlag: \"INVALID_VALIDATION_SCHEMA\",\n\t\t\t})\n\t\t}\n\n\t\tfor _, err := range err.(validator.ValidationErrors) {\n\t\t\terrors = append(errors, response.Error{\n\t\t\t\tMessage: fmt.Sprint(err),\n\t\t\t\tFlag: fmt.Sprintf(\"INVALID_VALIDATION_%s\", strings.ToUpper(err.Field())),\n\t\t\t})\n\t\t}\n\n\t\tresponse.BadRequest(\"Validation error\", errors)\n\t}\n\n\tif errors != nil {\n\t\tresponse.BadRequest(\"Validation error\", errors)\n\t}\n}", "title": "" }, { "docid": "840923e60536fffca5c97b3d0d6f8ef8", "score": "0.55064243", "text": "func (n *TypeFilterNode) Validate() []error {\n\treturn ValidateConnectivity(n)\n}", "title": "" }, { "docid": "f28b012e80fa37515d2019f3fcb88da2", "score": "0.55063283", "text": "func (m *MetricType) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Name\n\n\t// no validation rules for Description\n\n\t// no validation rules for Href\n\n\t// no validation rules for TypeId\n\n\treturn nil\n}", "title": "" }, { "docid": "51f42be8fef1f88dc024128892b56b8e", "score": "0.55028653", "text": "func (r *ClientRequest) Validate(fieldlist []string) error {\n\tfor _, fieldname := range fieldlist {\n\t\t_, exists := r.Data[fieldname]\n\t\tif !exists {\n\t\t\treturn fmt.Errorf(\"missing field %s\", fieldname)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d2651a62a3fa6bab8c7c7b6a47b4357a", "score": "0.5498253", "text": "func (s *CreateWhatIfForecastExportInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateWhatIfForecastExportInput\"}\n\tif s.Destination == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Destination\"))\n\t}\n\tif s.WhatIfForecastArns == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"WhatIfForecastArns\"))\n\t}\n\tif s.WhatIfForecastArns != nil && len(s.WhatIfForecastArns) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"WhatIfForecastArns\", 1))\n\t}\n\tif s.WhatIfForecastExportName == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"WhatIfForecastExportName\"))\n\t}\n\tif s.WhatIfForecastExportName != nil && len(*s.WhatIfForecastExportName) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"WhatIfForecastExportName\", 1))\n\t}\n\tif s.Destination != nil {\n\t\tif err := s.Destination.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"Destination\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "39a28f218a8111f2a723b5ac676b8fd3", "score": "0.5496824", "text": "func (h *Have) Fields(args ...string) *Have {\n\tmsg := h.msg(Sprintf(\"fields: %v\", args))\n\tswitch reflect.TypeOf(h.actual).Kind() {\n\tcase reflect.Struct:\n\t\tv := reflect.ValueOf(h.actual)\n\t\tfor _, f := range args {\n\t\t\tif v.FieldByName(f).IsValid() != h.assert {\n\t\t\t\th.fail(2, msg)\n\t\t\t}\n\t\t}\n\tdefault:\n\t\th.t.Fatal(invMsg(\"Struct\"))\n\t}\n\treturn h\n}", "title": "" }, { "docid": "bddec8ac61b4d4ccaf35e73eb7852462", "score": "0.5494363", "text": "func (v Validator) Validate() error {\n\tif len(v.Moniker) > 64 {\n\t\treturn ErrInvalidMonikerLength\n\t}\n\n\tif len(v.Website) > 64 {\n\t\treturn ErrInvalidWebsiteLength\n\t}\n\n\tif len(v.Social) > 64 {\n\t\treturn ErrInvalidSocialLength\n\t}\n\n\tif len(v.Identity) > 64 {\n\t\treturn ErrInvalidIdentityLength\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "52564356a9c0b7d8181a3bb1d870b131", "score": "0.5492912", "text": "func (fns *Fns) Validate(typ reflect.Type) bool {\n\treturn validateStateFn(fns.Save, typ) && validateStateFn(fns.Load, typ)\n}", "title": "" }, { "docid": "8a351289cbe91273672256a4fc26acdb", "score": "0.5491831", "text": "func (s *CreateModelVersionInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateModelVersionInput\"}\n\tif s.ModelId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"ModelId\"))\n\t}\n\tif s.ModelId != nil && len(*s.ModelId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ModelId\", 1))\n\t}\n\tif s.ModelType == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"ModelType\"))\n\t}\n\tif s.TrainingDataSchema == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"TrainingDataSchema\"))\n\t}\n\tif s.TrainingDataSource == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"TrainingDataSource\"))\n\t}\n\tif s.ExternalEventsDetail != nil {\n\t\tif err := s.ExternalEventsDetail.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"ExternalEventsDetail\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.IngestedEventsDetail != nil {\n\t\tif err := s.IngestedEventsDetail.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"IngestedEventsDetail\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.TrainingDataSchema != nil {\n\t\tif err := s.TrainingDataSchema.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"TrainingDataSchema\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2318ad61ff6e233bf22e330561c3be0a", "score": "0.5491463", "text": "func (s *CreateForecastInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateForecastInput\"}\n\tif s.ForecastName == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"ForecastName\"))\n\t}\n\tif s.ForecastName != nil && len(*s.ForecastName) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ForecastName\", 1))\n\t}\n\tif s.ForecastTypes != nil && len(s.ForecastTypes) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ForecastTypes\", 1))\n\t}\n\tif s.PredictorArn == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"PredictorArn\"))\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.TimeSeriesSelector != nil {\n\t\tif err := s.TimeSeriesSelector.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"TimeSeriesSelector\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2d62cf644a9432446a073d85d20a4fb8", "score": "0.54909635", "text": "func (s *GetCaseInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"GetCaseInput\"}\n\tif s.CaseId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"CaseId\"))\n\t}\n\tif s.CaseId != nil && len(*s.CaseId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"CaseId\", 1))\n\t}\n\tif s.DomainId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"DomainId\"))\n\t}\n\tif s.DomainId != nil && len(*s.DomainId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"DomainId\", 1))\n\t}\n\tif s.Fields == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Fields\"))\n\t}\n\tif s.Fields != nil && len(s.Fields) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Fields\", 1))\n\t}\n\tif s.Fields != nil {\n\t\tfor i, v := range s.Fields {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Fields\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "0aa7868518c2242be4ab9edbae92542a", "score": "0.5482421", "text": "func (p *Property) Validate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.Validate(\n\t\t&validators.StringIsPresent{Field: p.Name, Name: \"Name\"},\n\t\t&validators.StringIsPresent{Field: p.Name, Name: \"Type\"},\n\t\t&validators.StringIsPresent{Field: p.Name, Name: \"Address\"},\n\t), nil\n}", "title": "" }, { "docid": "1d9566ac7fa01c5662951b09c70bb64b", "score": "0.5481836", "text": "func (s *GetEventPredictionInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"GetEventPredictionInput\"}\n\tif s.DetectorId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"DetectorId\"))\n\t}\n\tif s.DetectorVersionId != nil && len(*s.DetectorVersionId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"DetectorVersionId\", 1))\n\t}\n\tif s.Entities == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Entities\"))\n\t}\n\tif s.EventId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"EventId\"))\n\t}\n\tif s.EventTimestamp == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"EventTimestamp\"))\n\t}\n\tif s.EventTimestamp != nil && len(*s.EventTimestamp) < 10 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"EventTimestamp\", 10))\n\t}\n\tif s.EventTypeName == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"EventTypeName\"))\n\t}\n\tif s.EventVariables == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"EventVariables\"))\n\t}\n\tif s.EventVariables != nil && len(s.EventVariables) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"EventVariables\", 1))\n\t}\n\tif s.Entities != nil {\n\t\tfor i, v := range s.Entities {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Entities\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.ExternalModelEndpointDataBlobs != nil {\n\t\tfor i, v := range s.ExternalModelEndpointDataBlobs {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"ExternalModelEndpointDataBlobs\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8cebccb088e5f9ff4664977c78754154", "score": "0.5475966", "text": "func (m *Member) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "title": "" }, { "docid": "8a68d2b2359450a9141360dae2ef7b19", "score": "0.54759616", "text": "func (article *Article) Validate() error {\n\tvalidate := validator.New()\n\treturn validate.Struct(article)\n}", "title": "" }, { "docid": "57255dc77092a576a36352f85e431fe7", "score": "0.547421", "text": "func (s *Address) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"Address\"}\n\tif s.AddressId != nil && len(*s.AddressId) < 40 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AddressId\", 40))\n\t}\n\tif s.City != nil && len(*s.City) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"City\", 1))\n\t}\n\tif s.Company != nil && len(*s.Company) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Company\", 1))\n\t}\n\tif s.Country != nil && len(*s.Country) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Country\", 1))\n\t}\n\tif s.Landmark != nil && len(*s.Landmark) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Landmark\", 1))\n\t}\n\tif s.Name != nil && len(*s.Name) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Name\", 1))\n\t}\n\tif s.PhoneNumber != nil && len(*s.PhoneNumber) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"PhoneNumber\", 1))\n\t}\n\tif s.PostalCode != nil && len(*s.PostalCode) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"PostalCode\", 1))\n\t}\n\tif s.PrefectureOrDistrict != nil && len(*s.PrefectureOrDistrict) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"PrefectureOrDistrict\", 1))\n\t}\n\tif s.StateOrProvince != nil && len(*s.StateOrProvince) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"StateOrProvince\", 1))\n\t}\n\tif s.Street1 != nil && len(*s.Street1) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Street1\", 1))\n\t}\n\tif s.Street2 != nil && len(*s.Street2) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Street2\", 1))\n\t}\n\tif s.Street3 != nil && len(*s.Street3) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Street3\", 1))\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6cf51a613825a9899af8221464058424", "score": "0.5473344", "text": "func (m *BindingInfoDTO) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "title": "" }, { "docid": "c3745c4b09c5dc4693dda4c105579db0", "score": "0.54715693", "text": "func (s *CreateCaseInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateCaseInput\"}\n\tif s.DomainId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"DomainId\"))\n\t}\n\tif s.DomainId != nil && len(*s.DomainId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"DomainId\", 1))\n\t}\n\tif s.Fields == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Fields\"))\n\t}\n\tif s.TemplateId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"TemplateId\"))\n\t}\n\tif s.TemplateId != nil && len(*s.TemplateId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"TemplateId\", 1))\n\t}\n\tif s.Fields != nil {\n\t\tfor i, v := range s.Fields {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Fields\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "7cae7156a9c0852b9b50f3c331283655", "score": "0.54702073", "text": "func (s *CreatePredictorInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreatePredictorInput\"}\n\tif s.FeaturizationConfig == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"FeaturizationConfig\"))\n\t}\n\tif s.ForecastHorizon == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"ForecastHorizon\"))\n\t}\n\tif s.ForecastTypes != nil && len(s.ForecastTypes) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ForecastTypes\", 1))\n\t}\n\tif s.InputDataConfig == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"InputDataConfig\"))\n\t}\n\tif s.PredictorName == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"PredictorName\"))\n\t}\n\tif s.PredictorName != nil && len(*s.PredictorName) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"PredictorName\", 1))\n\t}\n\tif s.EncryptionConfig != nil {\n\t\tif err := s.EncryptionConfig.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"EncryptionConfig\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.FeaturizationConfig != nil {\n\t\tif err := s.FeaturizationConfig.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"FeaturizationConfig\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.HPOConfig != nil {\n\t\tif err := s.HPOConfig.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"HPOConfig\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.InputDataConfig != nil {\n\t\tif err := s.InputDataConfig.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"InputDataConfig\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "224e42e741ff8e2d54f6358513262456", "score": "0.5470183", "text": "func (cv *CustomValidator) Validate(i interface{}) error {\n\treturn cv.validator.Struct(i)\n}", "title": "" }, { "docid": "224e42e741ff8e2d54f6358513262456", "score": "0.5470183", "text": "func (cv *CustomValidator) Validate(i interface{}) error {\n\treturn cv.validator.Struct(i)\n}", "title": "" }, { "docid": "224e42e741ff8e2d54f6358513262456", "score": "0.5470183", "text": "func (cv *CustomValidator) Validate(i interface{}) error {\n\treturn cv.validator.Struct(i)\n}", "title": "" }, { "docid": "224e42e741ff8e2d54f6358513262456", "score": "0.5470183", "text": "func (cv *CustomValidator) Validate(i interface{}) error {\n\treturn cv.validator.Struct(i)\n}", "title": "" }, { "docid": "d24a00ca6aafbbaaf49a584d9aa57f84", "score": "0.5467334", "text": "func (s *ContactInformation) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"ContactInformation\"}\n\tif s.AddressLine1 == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"AddressLine1\"))\n\t}\n\tif s.AddressLine1 != nil && len(*s.AddressLine1) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AddressLine1\", 1))\n\t}\n\tif s.AddressLine2 != nil && len(*s.AddressLine2) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AddressLine2\", 1))\n\t}\n\tif s.AddressLine3 != nil && len(*s.AddressLine3) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AddressLine3\", 1))\n\t}\n\tif s.City == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"City\"))\n\t}\n\tif s.City != nil && len(*s.City) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"City\", 1))\n\t}\n\tif s.CompanyName != nil && len(*s.CompanyName) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"CompanyName\", 1))\n\t}\n\tif s.CountryCode == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"CountryCode\"))\n\t}\n\tif s.CountryCode != nil && len(*s.CountryCode) < 2 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"CountryCode\", 2))\n\t}\n\tif s.DistrictOrCounty != nil && len(*s.DistrictOrCounty) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"DistrictOrCounty\", 1))\n\t}\n\tif s.FullName == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"FullName\"))\n\t}\n\tif s.FullName != nil && len(*s.FullName) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"FullName\", 1))\n\t}\n\tif s.PhoneNumber == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"PhoneNumber\"))\n\t}\n\tif s.PhoneNumber != nil && len(*s.PhoneNumber) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"PhoneNumber\", 1))\n\t}\n\tif s.PostalCode == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"PostalCode\"))\n\t}\n\tif s.PostalCode != nil && len(*s.PostalCode) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"PostalCode\", 1))\n\t}\n\tif s.StateOrRegion != nil && len(*s.StateOrRegion) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"StateOrRegion\", 1))\n\t}\n\tif s.WebsiteUrl != nil && len(*s.WebsiteUrl) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"WebsiteUrl\", 1))\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "70c002cb88497ee207d8a36a7d38a37c", "score": "0.546652", "text": "func (m *GetPhyVersionsRequest) ValidateFields(paths ...string) error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif len(paths) == 0 {\n\t\tpaths = GetPhyVersionsRequestFieldPathsNested\n\t}\n\n\tfor name, subs := range _processPaths(append(paths[:0:0], paths...)) {\n\t\t_ = subs\n\t\tswitch name {\n\t\tcase \"band_id\":\n\t\t\t// no validation rules for BandId\n\t\tdefault:\n\t\t\treturn GetPhyVersionsRequestValidationError{\n\t\t\t\tfield: name,\n\t\t\t\treason: \"invalid field path\",\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "82b42d9163378bd1f8390d95df6dbcd9", "score": "0.5465854", "text": "func (s *CreateWhatIfForecastInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateWhatIfForecastInput\"}\n\tif s.WhatIfAnalysisArn == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"WhatIfAnalysisArn\"))\n\t}\n\tif s.WhatIfForecastName == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"WhatIfForecastName\"))\n\t}\n\tif s.WhatIfForecastName != nil && len(*s.WhatIfForecastName) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"WhatIfForecastName\", 1))\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.TimeSeriesReplacementsDataSource != nil {\n\t\tif err := s.TimeSeriesReplacementsDataSource.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"TimeSeriesReplacementsDataSource\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.TimeSeriesTransformations != nil {\n\t\tfor i, v := range s.TimeSeriesTransformations {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"TimeSeriesTransformations\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "49bda72c3b3086fc200358668a70da0b", "score": "0.5465715", "text": "func (s *CreateClusterInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateClusterInput\"}\n\tif s.BrokerNodeGroupInfo == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"BrokerNodeGroupInfo\"))\n\t}\n\tif s.ClusterName == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"ClusterName\"))\n\t}\n\tif s.ClusterName != nil && len(*s.ClusterName) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ClusterName\", 1))\n\t}\n\tif s.KafkaVersion == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"KafkaVersion\"))\n\t}\n\tif s.KafkaVersion != nil && len(*s.KafkaVersion) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"KafkaVersion\", 1))\n\t}\n\tif s.NumberOfBrokerNodes == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"NumberOfBrokerNodes\"))\n\t}\n\tif s.NumberOfBrokerNodes != nil && *s.NumberOfBrokerNodes < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinValue(\"NumberOfBrokerNodes\", 1))\n\t}\n\tif s.BrokerNodeGroupInfo != nil {\n\t\tif err := s.BrokerNodeGroupInfo.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"BrokerNodeGroupInfo\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.ConfigurationInfo != nil {\n\t\tif err := s.ConfigurationInfo.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"ConfigurationInfo\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.EncryptionInfo != nil {\n\t\tif err := s.EncryptionInfo.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"EncryptionInfo\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.LoggingInfo != nil {\n\t\tif err := s.LoggingInfo.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"LoggingInfo\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.OpenMonitoring != nil {\n\t\tif err := s.OpenMonitoring.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"OpenMonitoring\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "b512e1fe6733e8829d8c6bf5549a7345", "score": "0.54656476", "text": "func (m *LeagueTierAndRankDTO) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "title": "" }, { "docid": "7943df04749a0fbf209596fa7358e559", "score": "0.5462473", "text": "func (s *CreateTemplateInput) Validate() error {\n\tinvalidParams := aws.ErrInvalidParams{Context: \"CreateTemplateInput\"}\n\n\tif s.AwsAccountId == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"AwsAccountId\"))\n\t}\n\tif s.AwsAccountId != nil && len(*s.AwsAccountId) < 12 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"AwsAccountId\", 12))\n\t}\n\tif s.Name != nil && len(*s.Name) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"Name\", 1))\n\t}\n\tif s.Permissions != nil && len(s.Permissions) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"Permissions\", 1))\n\t}\n\n\tif s.SourceEntity == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"SourceEntity\"))\n\t}\n\tif s.Tags != nil && len(s.Tags) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"Tags\", 1))\n\t}\n\n\tif s.TemplateId == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"TemplateId\"))\n\t}\n\tif s.TemplateId != nil && len(*s.TemplateId) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"TemplateId\", 1))\n\t}\n\tif s.VersionDescription != nil && len(*s.VersionDescription) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"VersionDescription\", 1))\n\t}\n\tif s.Permissions != nil {\n\t\tfor i, v := range s.Permissions {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Permissions\", i), err.(aws.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.SourceEntity != nil {\n\t\tif err := s.SourceEntity.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"SourceEntity\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.Tags != nil {\n\t\tfor i, v := range s.Tags {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"Tags\", i), err.(aws.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5c3379b6af4dc7afef150630e0258c8b", "score": "0.5460398", "text": "func (mt *Modeldetails) Validate() (err error) {\n\n\treturn\n}", "title": "" }, { "docid": "1dd048bc6cb1b6219a30c9732a382e89", "score": "0.5457756", "text": "func (u User) Validate() error {\n\treturn validation.ValidateStruct(&u,\n\t\tvalidation.Field(&u.Name, validation.Required))\n}", "title": "" }, { "docid": "bff44fd2b3959089c109c765988bc199", "score": "0.54503906", "text": "func (s *DeleteFolderMembershipInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"DeleteFolderMembershipInput\"}\n\tif s.AwsAccountId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"AwsAccountId\"))\n\t}\n\tif s.AwsAccountId != nil && len(*s.AwsAccountId) < 12 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AwsAccountId\", 12))\n\t}\n\tif s.FolderId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"FolderId\"))\n\t}\n\tif s.FolderId != nil && len(*s.FolderId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"FolderId\", 1))\n\t}\n\tif s.MemberId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"MemberId\"))\n\t}\n\tif s.MemberId != nil && len(*s.MemberId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"MemberId\", 1))\n\t}\n\tif s.MemberType == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"MemberType\"))\n\t}\n\tif s.MemberType != nil && len(*s.MemberType) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"MemberType\", 1))\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "7fa19770732b4598ce79d474f6816a0f", "score": "0.5443995", "text": "func (s *UpdateDataSetInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"UpdateDataSetInput\"}\n\tif s.AwsAccountId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"AwsAccountId\"))\n\t}\n\tif s.AwsAccountId != nil && len(*s.AwsAccountId) < 12 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"AwsAccountId\", 12))\n\t}\n\tif s.ColumnGroups != nil && len(s.ColumnGroups) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ColumnGroups\", 1))\n\t}\n\tif s.ColumnLevelPermissionRules != nil && len(s.ColumnLevelPermissionRules) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ColumnLevelPermissionRules\", 1))\n\t}\n\tif s.DataSetId == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"DataSetId\"))\n\t}\n\tif s.DataSetId != nil && len(*s.DataSetId) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"DataSetId\", 1))\n\t}\n\tif s.ImportMode == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"ImportMode\"))\n\t}\n\tif s.LogicalTableMap != nil && len(s.LogicalTableMap) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"LogicalTableMap\", 1))\n\t}\n\tif s.Name == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Name\"))\n\t}\n\tif s.Name != nil && len(*s.Name) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Name\", 1))\n\t}\n\tif s.PhysicalTableMap == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"PhysicalTableMap\"))\n\t}\n\tif s.ColumnGroups != nil {\n\t\tfor i, v := range s.ColumnGroups {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"ColumnGroups\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.ColumnLevelPermissionRules != nil {\n\t\tfor i, v := range s.ColumnLevelPermissionRules {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"ColumnLevelPermissionRules\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.LogicalTableMap != nil {\n\t\tfor i, v := range s.LogicalTableMap {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"LogicalTableMap\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.PhysicalTableMap != nil {\n\t\tfor i, v := range s.PhysicalTableMap {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"PhysicalTableMap\", i), err.(request.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\tif s.RowLevelPermissionDataSet != nil {\n\t\tif err := s.RowLevelPermissionDataSet.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"RowLevelPermissionDataSet\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.RowLevelPermissionTagConfiguration != nil {\n\t\tif err := s.RowLevelPermissionTagConfiguration.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"RowLevelPermissionTagConfiguration\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "bca7ae702f8049aa376fdfa8656c8ae0", "score": "0.5442637", "text": "func (s *Student) Validate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.Validate(\n\t\t&validators.StringIsPresent{\n\t\t\tField: s.Name,\n\t\t\tName: \"Name\",\n\t\t\tMessage: nameValidationMsg},\n\t\t&validators.StringIsPresent{\n\t\t\tField: s.Surname,\n\t\t\tName: \"Surname\",\n\t\t\tMessage: surnameValidationMsg},\n\t\t&validators.TimeIsPresent{\n\t\t\tField: s.DateOfBirth,\n\t\t\tName: \"DateOfBirth\",\n\t\t\tMessage: dateOfBirthValidationMsg},\n\t), nil\n}", "title": "" }, { "docid": "55c173a920750ce5fc966a23cc023913", "score": "0.544063", "text": "func (s *DataSourceToIndexFieldMapping) Validate() error {\n\tinvalidParams := aws.ErrInvalidParams{Context: \"DataSourceToIndexFieldMapping\"}\n\n\tif s.DataSourceFieldName == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"DataSourceFieldName\"))\n\t}\n\tif s.DataSourceFieldName != nil && len(*s.DataSourceFieldName) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"DataSourceFieldName\", 1))\n\t}\n\tif s.DateFieldFormat != nil && len(*s.DateFieldFormat) < 4 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"DateFieldFormat\", 4))\n\t}\n\n\tif s.IndexFieldName == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"IndexFieldName\"))\n\t}\n\tif s.IndexFieldName != nil && len(*s.IndexFieldName) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"IndexFieldName\", 1))\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "fb78d1f5cfd3435f63ab0c9165d3f248", "score": "0.5440324", "text": "func (cv CustomValidator) Validate(i interface{}) error {\n\treturn cv.validator.Struct(i)\n}", "title": "" }, { "docid": "83776c1e8a17b16e3406a0bf40cb0a28", "score": "0.5440076", "text": "func (s *SalesforceCustomKnowledgeArticleTypeConfiguration) Validate() error {\n\tinvalidParams := aws.ErrInvalidParams{Context: \"SalesforceCustomKnowledgeArticleTypeConfiguration\"}\n\n\tif s.DocumentDataFieldName == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"DocumentDataFieldName\"))\n\t}\n\tif s.DocumentDataFieldName != nil && len(*s.DocumentDataFieldName) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"DocumentDataFieldName\", 1))\n\t}\n\tif s.DocumentTitleFieldName != nil && len(*s.DocumentTitleFieldName) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"DocumentTitleFieldName\", 1))\n\t}\n\tif s.FieldMappings != nil && len(s.FieldMappings) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"FieldMappings\", 1))\n\t}\n\n\tif s.Name == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"Name\"))\n\t}\n\tif s.Name != nil && len(*s.Name) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"Name\", 1))\n\t}\n\tif s.FieldMappings != nil {\n\t\tfor i, v := range s.FieldMappings {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\tinvalidParams.AddNested(fmt.Sprintf(\"%s[%v]\", \"FieldMappings\", i), err.(aws.ErrInvalidParams))\n\t\t\t}\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "eb4dcb4e7065e727c6975ba9fd104f28", "score": "0.54395574", "text": "func (s *RegexMatchTuple) Validate() error {\n\tinvalidParams := aws.ErrInvalidParams{Context: \"RegexMatchTuple\"}\n\n\tif s.FieldToMatch == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"FieldToMatch\"))\n\t}\n\n\tif s.RegexPatternSetId == nil {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"RegexPatternSetId\"))\n\t}\n\tif s.RegexPatternSetId != nil && len(*s.RegexPatternSetId) < 1 {\n\t\tinvalidParams.Add(aws.NewErrParamMinLen(\"RegexPatternSetId\", 1))\n\t}\n\tif len(s.TextTransformation) == 0 {\n\t\tinvalidParams.Add(aws.NewErrParamRequired(\"TextTransformation\"))\n\t}\n\tif s.FieldToMatch != nil {\n\t\tif err := s.FieldToMatch.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"FieldToMatch\", err.(aws.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "0075cfc15a6ecad8b7a1a912f9c04623", "score": "0.5437769", "text": "func (s *CreateServiceInput) Validate() error {\n\tinvalidParams := request.ErrInvalidParams{Context: \"CreateServiceInput\"}\n\tif s.ApplicationIdentifier == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"ApplicationIdentifier\"))\n\t}\n\tif s.ApplicationIdentifier != nil && len(*s.ApplicationIdentifier) < 14 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ApplicationIdentifier\", 14))\n\t}\n\tif s.ClientToken != nil && len(*s.ClientToken) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"ClientToken\", 1))\n\t}\n\tif s.Description != nil && len(*s.Description) < 1 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Description\", 1))\n\t}\n\tif s.EndpointType == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"EndpointType\"))\n\t}\n\tif s.EnvironmentIdentifier == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"EnvironmentIdentifier\"))\n\t}\n\tif s.EnvironmentIdentifier != nil && len(*s.EnvironmentIdentifier) < 14 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"EnvironmentIdentifier\", 14))\n\t}\n\tif s.Name == nil {\n\t\tinvalidParams.Add(request.NewErrParamRequired(\"Name\"))\n\t}\n\tif s.Name != nil && len(*s.Name) < 3 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"Name\", 3))\n\t}\n\tif s.VpcId != nil && len(*s.VpcId) < 12 {\n\t\tinvalidParams.Add(request.NewErrParamMinLen(\"VpcId\", 12))\n\t}\n\tif s.LambdaEndpoint != nil {\n\t\tif err := s.LambdaEndpoint.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"LambdaEndpoint\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\tif s.UrlEndpoint != nil {\n\t\tif err := s.UrlEndpoint.Validate(); err != nil {\n\t\t\tinvalidParams.AddNested(\"UrlEndpoint\", err.(request.ErrInvalidParams))\n\t\t}\n\t}\n\n\tif invalidParams.Len() > 0 {\n\t\treturn invalidParams\n\t}\n\treturn nil\n}", "title": "" } ]
5bf0b6d9f58c65b0bb93998c12389683
Handle approves a shipment
[ { "docid": "e2b5f2de59edd9a7e57f3c7b6e01e4ad", "score": "0.72207373", "text": "func (h ApproveShipmentHandler) Handle(params shipmentops.ApproveShipmentParams) middleware.Responder {\n\tsession, logger := h.SessionAndLoggerFromRequest(params.HTTPRequest)\n\tappCtx := appcontext.NewAppContext(h.DB(), logger)\n\n\tif !session.IsOfficeUser() || !session.Roles.HasRole(roles.RoleTypeTOO) {\n\t\tlogger.Error(\"Only TOO role can approve shipments\")\n\t\treturn shipmentops.NewApproveShipmentForbidden()\n\t}\n\n\tshipmentID := uuid.FromStringOrNil(params.ShipmentID.String())\n\teTag := params.IfMatch\n\tshipment, err := h.ApproveShipment(appCtx, shipmentID, eTag)\n\n\tif err != nil {\n\t\tlogger.Error(\"ghcapi.ApproveShipmentHandler\", zap.Error(err))\n\n\t\tswitch e := err.(type) {\n\t\tcase services.NotFoundError:\n\t\t\treturn shipmentops.NewApproveShipmentNotFound()\n\t\tcase services.InvalidInputError:\n\t\t\tpayload := payloadForValidationError(\"Validation errors\", \"ApproveShipment\", h.GetTraceID(), e.ValidationErrors)\n\t\t\treturn shipmentops.NewApproveShipmentUnprocessableEntity().WithPayload(payload)\n\t\tcase services.PreconditionFailedError:\n\t\t\treturn shipmentops.NewApproveShipmentPreconditionFailed().WithPayload(&ghcmessages.Error{Message: handlers.FmtString(err.Error())})\n\t\tcase services.ConflictError, mtoshipment.ConflictStatusError:\n\t\t\treturn shipmentops.NewApproveShipmentConflict().WithPayload(&ghcmessages.Error{Message: handlers.FmtString(err.Error())})\n\t\tdefault:\n\t\t\treturn shipmentops.NewApproveShipmentInternalServerError()\n\t\t}\n\t}\n\n\th.triggerShipmentApprovalEvent(shipmentID, shipment.MoveTaskOrderID, params)\n\n\tshipmentSITStatus := h.CalculateShipmentSITStatus(appCtx, *shipment)\n\tsitStatusPayload := payloads.SITStatus(shipmentSITStatus)\n\n\tpayload := payloads.MTOShipment(shipment, sitStatusPayload)\n\treturn shipmentops.NewApproveShipmentOK().WithPayload(payload)\n}", "title": "" } ]
[ { "docid": "fd9c450f542f174862e43c085af49072", "score": "0.7454613", "text": "func (f *shipmentApprover) ApproveShipment(shipmentID uuid.UUID, eTag string) (*models.MTOShipment, error) {\n\tshipment, err := f.findShipment(shipmentID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\texistingETag := etag.GenerateEtag(shipment.UpdatedAt)\n\tif existingETag != eTag {\n\t\treturn &models.MTOShipment{}, services.NewPreconditionFailedError(shipmentID, query.StaleIdentifierError{StaleIdentifier: eTag})\n\t}\n\n\terr = f.router.Approve(shipment)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = f.setRequiredDeliveryDate(shipment)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttransactionError := f.db.Transaction(func(tx *pop.Connection) error {\n\t\tverrs, err := tx.ValidateAndSave(shipment)\n\t\tif verrs != nil && verrs.HasAny() {\n\t\t\tinvalidInputError := services.NewInvalidInputError(shipment.ID, nil, verrs, \"There was an issue with validating the updates\")\n\n\t\t\treturn invalidInputError\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// after approving shipment, shipment level service items must be created\n\t\terr = f.createShipmentServiceItems(shipment)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t})\n\n\tif transactionError != nil {\n\t\treturn nil, transactionError\n\t}\n\n\treturn shipment, nil\n}", "title": "" }, { "docid": "945172d606f1e706f21d6fc7dde62db8", "score": "0.6112404", "text": "func (h CreateShipmentHandler) Handle(params shipmentop.CreateShipmentParams) middleware.Responder {\n\tsession, logger := h.SessionAndLoggerFromRequest(params.HTTPRequest)\n\n\t// #nosec UUID is pattern matched by swagger and will be ok\n\tmoveID, _ := uuid.FromString(params.MoveID.String())\n\n\t// Validate that this move belongs to the current user\n\tmove, err := models.FetchMove(h.DB(), session, moveID)\n\tif err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\n\tpayload := params.Shipment\n\n\tpickupAddress := addressModelFromPayload(payload.PickupAddress)\n\tsecondaryPickupAddress := addressModelFromPayload(payload.SecondaryPickupAddress)\n\tdeliveryAddress := addressModelFromPayload(payload.DeliveryAddress)\n\tpartialSITDeliveryAddress := addressModelFromPayload(payload.PartialSitDeliveryAddress)\n\tmarket := \"dHHG\"\n\n\tvar requestedPickupDate *time.Time\n\tif payload.RequestedPickupDate != nil {\n\t\tdate := time.Time(*payload.RequestedPickupDate)\n\t\trequestedPickupDate = &date\n\t}\n\n\thasSecondaryPickupAddress := false\n\tif payload.HasSecondaryPickupAddress != nil {\n\t\thasSecondaryPickupAddress = *payload.HasSecondaryPickupAddress\n\t}\n\n\thasDeliveryAddress := false\n\tif payload.HasDeliveryAddress != nil {\n\t\thasDeliveryAddress = *payload.HasDeliveryAddress\n\t}\n\n\thasPartialSitDeliveryAddress := false\n\tif payload.HasPartialSitDeliveryAddress != nil {\n\t\thasPartialSitDeliveryAddress = *payload.HasPartialSitDeliveryAddress\n\t}\n\n\tnewShipment := models.Shipment{\n\t\tMoveID: move.ID,\n\t\tServiceMemberID: session.ServiceMemberID,\n\t\tStatus: models.ShipmentStatusDRAFT,\n\t\tRequestedPickupDate: requestedPickupDate,\n\t\tEstimatedPackDays: payload.EstimatedPackDays,\n\t\tEstimatedTransitDays: payload.EstimatedTransitDays,\n\t\tWeightEstimate: handlers.PoundPtrFromInt64Ptr(payload.WeightEstimate),\n\t\tProgearWeightEstimate: handlers.PoundPtrFromInt64Ptr(payload.ProgearWeightEstimate),\n\t\tSpouseProgearWeightEstimate: handlers.PoundPtrFromInt64Ptr(payload.SpouseProgearWeightEstimate),\n\t\tPickupAddress: pickupAddress,\n\t\tHasSecondaryPickupAddress: hasSecondaryPickupAddress,\n\t\tSecondaryPickupAddress: secondaryPickupAddress,\n\t\tHasDeliveryAddress: hasDeliveryAddress,\n\t\tDeliveryAddress: deliveryAddress,\n\t\tHasPartialSITDeliveryAddress: hasPartialSitDeliveryAddress,\n\t\tPartialSITDeliveryAddress: partialSITDeliveryAddress,\n\t\tMarket: &market,\n\t}\n\tif err = updateShipmentDatesWithPayload(h, &newShipment, params.Shipment); err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\n\tverrs, err := models.SaveShipmentAndAddresses(h.DB(), &newShipment)\n\n\tif err != nil || verrs.HasAny() {\n\t\treturn handlers.ResponseForVErrors(logger, verrs, err)\n\t}\n\n\tshipmentPayload, err := payloadForShipmentModel(newShipment)\n\tif err != nil {\n\t\tlogger.Error(\"Error in shipment payload: \", zap.Error(err))\n\t}\n\n\treturn shipmentop.NewCreateShipmentCreated().WithPayload(shipmentPayload)\n}", "title": "" }, { "docid": "2b416562fc9ce50ab58297b0ad29f29f", "score": "0.6080558", "text": "func (h ApproveHHGHandler) Handle(params shipmentop.ApproveHHGParams) middleware.Responder {\n\n\tctx := params.HTTPRequest.Context()\n\n\tctx, span := beeline.StartSpan(ctx, reflect.TypeOf(h).Name())\n\tdefer span.Send()\n\n\tsession, logger := h.SessionAndLoggerFromContext(ctx)\n\tif !session.IsOfficeUser() {\n\t\treturn shipmentop.NewApproveHHGForbidden()\n\t}\n\n\t// #nosec UUID is pattern matched by swagger and will be ok\n\tshipmentID, _ := uuid.FromString(params.ShipmentID.String())\n\n\tshipment, err := models.FetchShipment(h.DB(), session, shipmentID)\n\tif err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\tvar approveDate time.Time\n\tif params.ApproveShipmentPayload.ApproveDate != nil {\n\t\tapproveDate = time.Time(*params.ApproveShipmentPayload.ApproveDate)\n\t}\n\terr = shipment.Approve(approveDate)\n\tif err != nil {\n\t\tlogger.Error(\"Attempted to approve HHG, got invalid transition\", zap.Error(err), zap.String(\"shipment_status\", string(shipment.Status)))\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\tverrs, err := h.DB().ValidateAndUpdate(shipment)\n\tif err != nil || verrs.HasAny() {\n\t\treturn handlers.ResponseForVErrors(logger, verrs, err)\n\t}\n\n\tshipmentPayload, err := payloadForShipmentModel(*shipment)\n\tif err != nil {\n\t\tlogger.Error(\"Error in shipment payload: \", zap.Error(err))\n\t}\n\n\treturn shipmentop.NewApproveHHGOK().WithPayload(shipmentPayload)\n}", "title": "" }, { "docid": "0672af419cad51f88eeae53f01a16a07", "score": "0.5983613", "text": "func (_Payments *PaymentsTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _amount *big.Int) (*types.Transaction, error) {\n\treturn _Payments.contract.Transact(opts, \"approve\", _spender, _amount)\n}", "title": "" }, { "docid": "bca3147103af622ce3c8896d1c80f5d1", "score": "0.5939618", "text": "func (_DLESS *DLESSTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _DLESS.contract.Transact(opts, \"approve\", _spender, _value)\n}", "title": "" }, { "docid": "9c590de36eda90ddb9d255185dfac512", "score": "0.59045887", "text": "func (_Fed *FedTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _Fed.contract.Transact(opts, \"approve\", _spender, _value)\n}", "title": "" }, { "docid": "ed39005c2527b5440e1d29e5aad1b6c1", "score": "0.59017646", "text": "func (_CSAI *CSAITransactor) Approve(opts *bind.TransactOpts, spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _CSAI.contract.Transact(opts, \"approve\", spender, amount)\n}", "title": "" }, { "docid": "f448e35abcb45edd97ed1d5295d3ba1b", "score": "0.58959675", "text": "func (e *erc20) Approve(ctx code.Context) code.Response {\n\tcaller := ctx.Caller()\n\tspender := string(ctx.Args()[\"spender\"])\n\tif spender == \"\" {\n\t\treturn code.Errors(\"missing spender argument\")\n\t}\n\tamountstr := string(ctx.Args()[\"amount\"])\n\tif amountstr == \"\" {\n\t\treturn code.Errors(\"missing amount argument\")\n\t}\n\n\tamount := e.allowance.Get(caller + \"_\" + spender)\n\t_, ok := amount.SetString(amountstr, 10)\n\tif !ok {\n\t\treturn code.Errors(\"bad amount number\")\n\t}\n\n\treturn code.OK(nil)\n}", "title": "" }, { "docid": "7029a8de6e3430caaac089d888a2ae22", "score": "0.5888744", "text": "func (_Gold *GoldTransactor) Approve(opts *bind.TransactOpts, from *big.Int, spender *big.Int, amount *big.Int) (*types.Transaction, error) {\n\treturn _Gold.contract.Transact(opts, \"approve\", from, spender, amount)\n}", "title": "" }, { "docid": "ce7936d38af3e7ccad6412259253cbee", "score": "0.58827883", "text": "func (_TellorPlayground *TellorPlaygroundTransactor) Approve(opts *bind.TransactOpts, spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _TellorPlayground.contract.Transact(opts, \"approve\", spender, amount)\n}", "title": "" }, { "docid": "1ff747515f3895f825b1f55faf04dfa7", "score": "0.58457667", "text": "func NewShipmentApprover(db *pop.Connection, router services.ShipmentRouter, siCreator services.MTOServiceItemCreator, planner route.Planner) services.ShipmentApprover {\n\treturn &shipmentApprover{\n\t\tdb,\n\t\trouter,\n\t\tsiCreator,\n\t\tplanner,\n\t}\n}", "title": "" }, { "docid": "0e684619eea86e9dcf4449e0d7fcf1e2", "score": "0.58252084", "text": "func (_ERC20Burnable *ERC20BurnableTransactor) Approve(opts *bind.TransactOpts, spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _ERC20Burnable.contract.Transact(opts, \"approve\", spender, amount)\n}", "title": "" }, { "docid": "7579415ca63dec1ec0fb5a87d2197927", "score": "0.5771093", "text": "func (_IERC20 *IERC20Transactor) Approve(opts *bind.TransactOpts, spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _IERC20.contract.Transact(opts, \"approve\", spender, amount)\n}", "title": "" }, { "docid": "7579415ca63dec1ec0fb5a87d2197927", "score": "0.5771093", "text": "func (_IERC20 *IERC20Transactor) Approve(opts *bind.TransactOpts, spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _IERC20.contract.Transact(opts, \"approve\", spender, amount)\n}", "title": "" }, { "docid": "5e4dc38405a549bb0ef0b1024bb956f6", "score": "0.57540745", "text": "func (s *server) ShipOrder(ctx context.Context, in *pb.ShipOrderRequest) (*pb.ShipOrderResponse, error) {\n\tspan := apitrace.SpanFromContext(ctx)\n\tspan.AddEvent(\"Ship Order\")\n\tlog.Info(\"[ShipOrder] received request\")\n\tdefer log.Info(\"[ShipOrder] completed request\")\n\t// 1. Create a Tracking ID\n\tbaseAddress := fmt.Sprintf(\"%s, %s, %s\", in.Address.StreetAddress, in.Address.City, in.Address.State)\n\tid := CreateTrackingId(baseAddress)\n\n\t// 2. Generate a response.\n\treturn &pb.ShipOrderResponse{\n\t\tTrackingId: id,\n\t}, nil\n}", "title": "" }, { "docid": "291cf4f1edd3f88e002f4f6397f2f07e", "score": "0.5744227", "text": "func (h ShipmentInvoiceHandler) Handle(params shipmentop.CreateAndSendHHGInvoiceParams) middleware.Responder {\n\n\tsession, logger := h.SessionAndLoggerFromRequest(params.HTTPRequest)\n\tif !session.IsOfficeUser() {\n\t\treturn shipmentop.NewCreateAndSendHHGInvoiceForbidden()\n\t}\n\n\t// #nosec UUID is pattern matched by swagger and will be ok\n\tshipmentID, _ := uuid.FromString(params.ShipmentID.String())\n\tshipment, err := invoiceop.FetchShipmentForInvoice{DB: h.DB()}.Call(shipmentID)\n\tif err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\tif shipment.Status != models.ShipmentStatusDELIVERED {\n\t\tlogger.Error(\"Shipment status not in delivered state.\")\n\t\treturn shipmentop.NewCreateAndSendHHGInvoicePreconditionFailed()\n\t}\n\n\t//for now we limit a shipment to 1 invoice\n\t//if invoices exists and at least one is either in process or has succeeded then return 409\n\texistingInvoices, err := models.FetchInvoicesForShipment(h.DB(), shipmentID)\n\tif err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\tfor _, invoice := range existingInvoices {\n\t\t//if an invoice has started, is in process or has been submitted successfully then throw err\n\t\tif invoice.Status != models.InvoiceStatusSUBMISSIONFAILURE {\n\t\t\tpayload := payloadForInvoiceModel(&invoice)\n\t\t\treturn shipmentop.NewCreateAndSendHHGInvoiceConflict().WithPayload(payload)\n\t\t}\n\t}\n\n\tapprover, err := models.FetchOfficeUserByID(h.DB(), session.OfficeUserID)\n\tif err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\n\t// before processing the invoice, save it in an in process state\n\tvar invoice models.Invoice\n\tverrs, err := invoiceop.CreateInvoice{DB: h.DB(), Clock: clock.New()}.Call(*approver, &invoice, shipment)\n\tif err != nil || verrs.HasAny() {\n\t\treturn handlers.ResponseForVErrors(logger, verrs, err)\n\t}\n\n\tinvoice858CString, verrs, err := invoiceop.ProcessInvoice{\n\t\tDB: h.DB(),\n\t\tLogger: logger,\n\t\tGexSender: h.GexSender(),\n\t\tSendProductionInvoice: h.SendProductionInvoice(),\n\t\tICNSequencer: h.ICNSequencer(),\n\t}.Call(&invoice, shipment)\n\tif err != nil || verrs.HasAny() {\n\t\treturn handlers.ResponseForVErrors(logger, verrs, err)\n\t}\n\n\t// Send invoice to S3 for storage if response from GEX is successful\n\tfs := h.FileStorer()\n\tverrs, err = invoiceop.StoreInvoice858C{\n\t\tDB: h.DB(),\n\t\tLogger: logger,\n\t\tStorer: &fs,\n\t}.Call(*invoice858CString, &invoice, session.UserID)\n\tif verrs.HasAny() {\n\t\tlogger.Error(\"Failed to store invoice record to s3, with validation errors\", zap.Error(verrs))\n\t}\n\tif err != nil {\n\t\tlogger.Error(\"Failed to store invoice record to s3, with error\", zap.Error(err))\n\t}\n\n\tpayload := payloadForInvoiceModel(&invoice)\n\n\treturn shipmentop.NewCreateAndSendHHGInvoiceOK().WithPayload(payload)\n}", "title": "" }, { "docid": "68284928cb44b4cb9299b52b62b912fe", "score": "0.5735103", "text": "func (h ApproveShipmentDiversionHandler) Handle(params shipmentops.ApproveShipmentDiversionParams) middleware.Responder {\n\tsession, logger := h.SessionAndLoggerFromRequest(params.HTTPRequest)\n\tappCtx := appcontext.NewAppContext(h.DB(), logger)\n\n\tif !session.IsOfficeUser() || !session.Roles.HasRole(roles.RoleTypeTOO) {\n\t\tlogger.Error(\"Only TOO role can approve shipment diversions\")\n\t\treturn shipmentops.NewApproveShipmentDiversionForbidden()\n\t}\n\n\tshipmentID := uuid.FromStringOrNil(params.ShipmentID.String())\n\teTag := params.IfMatch\n\tshipment, err := h.ApproveShipmentDiversion(appCtx, shipmentID, eTag)\n\n\tif err != nil {\n\t\tlogger.Error(\"ghcapi.ApproveShipmentDiversionHandler\", zap.Error(err))\n\n\t\tswitch e := err.(type) {\n\t\tcase services.NotFoundError:\n\t\t\treturn shipmentops.NewApproveShipmentDiversionNotFound()\n\t\tcase services.InvalidInputError:\n\t\t\tpayload := payloadForValidationError(\"Validation errors\", \"ApproveShipmentDiversion\", h.GetTraceID(), e.ValidationErrors)\n\t\t\treturn shipmentops.NewApproveShipmentDiversionUnprocessableEntity().WithPayload(payload)\n\t\tcase services.PreconditionFailedError:\n\t\t\treturn shipmentops.NewApproveShipmentDiversionPreconditionFailed().WithPayload(&ghcmessages.Error{Message: handlers.FmtString(err.Error())})\n\t\tcase mtoshipment.ConflictStatusError:\n\t\t\treturn shipmentops.NewApproveShipmentDiversionConflict().WithPayload(&ghcmessages.Error{Message: handlers.FmtString(err.Error())})\n\t\tdefault:\n\t\t\treturn shipmentops.NewApproveShipmentDiversionInternalServerError()\n\t\t}\n\t}\n\n\th.triggerShipmentDiversionApprovalEvent(shipmentID, shipment.MoveTaskOrderID, params)\n\n\tshipmentSITStatus := h.CalculateShipmentSITStatus(appCtx, *shipment)\n\tsitStatusPayload := payloads.SITStatus(shipmentSITStatus)\n\n\tpayload := payloads.MTOShipment(shipment, sitStatusPayload)\n\treturn shipmentops.NewApproveShipmentDiversionOK().WithPayload(payload)\n}", "title": "" }, { "docid": "1bd587a34de5d3578736e8591641c75d", "score": "0.57235336", "text": "func (_IOTX *IOTXTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _IOTX.contract.Transact(opts, \"approve\", _spender, _value)\n}", "title": "" }, { "docid": "cba6a1f1b3ae85c25bc887603de8bafa", "score": "0.5722009", "text": "func (_OMC *OMCTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _OMC.contract.Transact(opts, \"approve\", _spender, _value)\n}", "title": "" }, { "docid": "ff0fe7f5474f3e7b71bb7a49b437bd82", "score": "0.5712757", "text": "func (h PatchShipmentHandler) Handle(params shipmentop.PatchShipmentParams) middleware.Responder {\n\tsession, logger := h.SessionAndLoggerFromRequest(params.HTTPRequest)\n\n\t// #nosec UUID is pattern matched by swagger and will be ok\n\tshipmentID, _ := uuid.FromString(params.ShipmentID.String())\n\n\tshipment, err := models.FetchShipment(h.DB(), session, shipmentID)\n\tif err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\n\tpatchShipmentWithPayload(shipment, params.Shipment)\n\tif err = updateShipmentDatesWithPayload(h, shipment, params.Shipment); err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\n\t// Premove survey info can only be edited by office users or TSPs\n\tif session.IsOfficeUser() {\n\t\tpatchShipmentWithPremoveSurveyFields(shipment, params.Shipment)\n\t}\n\n\tverrs, err := models.SaveShipmentAndAddresses(h.DB(), shipment)\n\n\tif err != nil || verrs.HasAny() {\n\t\treturn handlers.ResponseForVErrors(logger, verrs, err)\n\t}\n\n\tshipmentPayload, err := payloadForShipmentModel(*shipment)\n\tif err != nil {\n\t\tlogger.Error(\"Error in shipment payload: \", zap.Error(err))\n\t}\n\n\treturn shipmentop.NewPatchShipmentOK().WithPayload(shipmentPayload)\n}", "title": "" }, { "docid": "36b6858a1c1fc174ea7f8421d732e3d9", "score": "0.5711137", "text": "func buildApprovedPPMShipmentWithActualInfo(db *pop.Connection, userUploader *uploader.UserUploader, customs []Customization) models.PPMShipment {\n\t// It's easier to use some of the data from other downstream\n\t// functions if we have them go first and then make our changes on\n\t// top of those changes.\n\tppmShipment := buildApprovedPPMShipmentWaitingOnCustomer(db, userUploader, customs)\n\n\tppmShipment.ActualMoveDate = models.TimePointer(ppmShipment.ExpectedDepartureDate.AddDate(0, 0, 1))\n\tppmShipment.ActualPickupPostalCode = &ppmShipment.PickupPostalCode\n\tppmShipment.ActualDestinationPostalCode = &ppmShipment.DestinationPostalCode\n\n\tif ppmShipment.HasRequestedAdvance != nil && *ppmShipment.HasRequestedAdvance {\n\t\tppmShipment.HasReceivedAdvance = models.BoolPointer(true)\n\n\t\tppmShipment.AdvanceAmountReceived = ppmShipment.AdvanceAmountRequested\n\t} else {\n\t\tppmShipment.HasReceivedAdvance = models.BoolPointer(false)\n\t}\n\n\tnewDutyLocationAddress := ppmShipment.Shipment.MoveTaskOrder.Orders.NewDutyLocation.Address\n\n\tw2Address := BuildAddress(db, []Customization{\n\t\t{\n\t\t\tModel: models.Address{\n\t\t\t\tStreetAddress1: \"987 New Street\",\n\t\t\t\tCity: newDutyLocationAddress.City,\n\t\t\t\tState: newDutyLocationAddress.State,\n\t\t\t\tPostalCode: newDutyLocationAddress.PostalCode,\n\t\t\t},\n\t\t},\n\t}, nil)\n\n\tppmShipment.W2AddressID = &w2Address.ID\n\tppmShipment.W2Address = &w2Address\n\n\tif db != nil {\n\t\tmustSave(db, &ppmShipment)\n\t} else {\n\t\t// tests expect a stubbed PPM Shipment built with this factory\n\t\t// method to have CreatedAt/UpdatedAt\n\t\tppmShipment.CreatedAt = time.Now()\n\t\tppmShipment.UpdatedAt = ppmShipment.CreatedAt\n\t}\n\n\t// Because of the way we're working with the PPMShipment, the\n\t// changes we've made to it aren't reflected in the pointer\n\t// reference that the MTOShipment has, so we'll need to update it\n\t// to point at the latest version.\n\tppmShipment.Shipment.PPMShipment = &ppmShipment\n\n\treturn ppmShipment\n\n}", "title": "" }, { "docid": "be91a7c55f960e68c4ee4d6e790da43c", "score": "0.56979847", "text": "func (_Weth *Weth) Approve(opts *bind.TransactOpts, guy common.Address, wad *big.Int) (*types.Transaction, error) {\n\treturn _Weth.Transact(opts, \"approve\", guy, wad)\n}", "title": "" }, { "docid": "85ff2c109fd94ad1fba4dccf1f9a0d19", "score": "0.5681662", "text": "func (_TellorPlayground *TellorPlaygroundSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _TellorPlayground.Contract.Approve(&_TellorPlayground.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "8a493dab142c8900e800472484377887", "score": "0.5672654", "text": "func (_Berry *BerryTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _amount *big.Int) (*types.Transaction, error) {\n\treturn _Berry.contract.Transact(opts, \"approve\", _spender, _amount)\n}", "title": "" }, { "docid": "101b1f7b288092c1ec489867ad99abbc", "score": "0.56644565", "text": "func (_Identity *IdentityCaller) Approved(opts *bind.CallOpts, arg0 *big.Int, arg1 *big.Int) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _Identity.contract.Call(opts, out, \"approved\", arg0, arg1)\n\treturn *ret0, err\n}", "title": "" }, { "docid": "7e09c4a22a72cfed7d7fcf9ee429c9e8", "score": "0.5648014", "text": "func (_TellorPlayground *TellorPlaygroundTransactorSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _TellorPlayground.Contract.Approve(&_TellorPlayground.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "5a8262523cb862a9dba33795dc18b37a", "score": "0.5644541", "text": "func (s *SmartContract) Approve(ctx contractapi.TransactionContextInterface, spender string, value int) error {\n\n\t// Check if contract has been intilized first\n\tinitialized, err := checkInitialized(ctx)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to check if contract is already initialized: %v\", err)\n\t}\n\tif !initialized {\n\t\treturn fmt.Errorf(\"Contract options need to be set before calling any function, call Initialize() to initialize contract\")\n\t}\n\n\t// Get ID of submitting client identity\n\towner, err := ctx.GetClientIdentity().GetID()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to get client id: %v\", err)\n\t}\n\n\t// Create allowanceKey\n\tallowanceKey, err := ctx.GetStub().CreateCompositeKey(allowancePrefix, []string{owner, spender})\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create the composite key for prefix %s: %v\", allowancePrefix, err)\n\t}\n\n\t// Update the state of the smart contract by adding the allowanceKey and value\n\terr = ctx.GetStub().PutState(allowanceKey, []byte(strconv.Itoa(value)))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to update state of smart contract for key %s: %v\", allowanceKey, err)\n\t}\n\n\t// Emit the Approval event\n\tapprovalEvent := event{owner, spender, value}\n\tapprovalEventJSON, err := json.Marshal(approvalEvent)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to obtain JSON encoding: %v\", err)\n\t}\n\terr = ctx.GetStub().SetEvent(\"Approval\", approvalEventJSON)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to set event: %v\", err)\n\t}\n\n\tlog.Printf(\"client %s approved a withdrawal allowance of %d for spender %s\", owner, value, spender)\n\n\treturn nil\n}", "title": "" }, { "docid": "d9e9f10dad7ee96ce3eb249802a09d20", "score": "0.5639014", "text": "func (_FAIL *FAILTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _FAIL.contract.Transact(opts, \"approve\", _spender, _value)\n}", "title": "" }, { "docid": "76d2898548d3880dbe7e0e875dd90227", "score": "0.56302696", "text": "func (_IdFedPair *IdFedPairTransactor) Approve(opts *bind.TransactOpts, spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _IdFedPair.contract.Transact(opts, \"approve\", spender, value)\n}", "title": "" }, { "docid": "76d2898548d3880dbe7e0e875dd90227", "score": "0.56302696", "text": "func (_IdFedPair *IdFedPairTransactor) Approve(opts *bind.TransactOpts, spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _IdFedPair.contract.Transact(opts, \"approve\", spender, value)\n}", "title": "" }, { "docid": "c3d0fd981817efbdb95bf4934030debe", "score": "0.5629143", "text": "func (_WizardPresale *WizardPresaleTransactor) Approve(opts *bind.TransactOpts, to common.Address, tokenId *big.Int) (*types.Transaction, error) {\n\treturn _WizardPresale.contract.Transact(opts, \"approve\", to, tokenId)\n}", "title": "" }, { "docid": "0ddcbe68a2ed18547ac3c6363c9f00b6", "score": "0.5603996", "text": "func (_ApprovalContract *ApprovalContractTransactor) Approve(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _ApprovalContract.contract.Transact(opts, \"approve\")\n}", "title": "" }, { "docid": "5d67486fab606cad9377f5cb755a3735", "score": "0.5596427", "text": "func (s *server) ShipOrder(ctx context.Context, in *pb.ShipOrderRequest) (*pb.ShipOrderResponse, error) {\n\tlog.Info(\"[ShipOrder] received request\")\n\tdefer log.Info(\"[ShipOrder] completed request\")\n\t// 1. Create a Tracking ID\n\tbaseAddress := fmt.Sprintf(\"%s, %s, %s\", in.Address.StreetAddress, in.Address.City, in.Address.State)\n\tid := CreateTrackingId(baseAddress)\n\n\t// 2. Generate a response.\n\treturn &pb.ShipOrderResponse{\n\t\tTrackingId: id,\n\t}, nil\n}", "title": "" }, { "docid": "e579017df2cc00b4310a8750dfbf452b", "score": "0.5580932", "text": "func (_Collateral *CollateralTransactor) Approve(opts *bind.TransactOpts, _operator common.Address, _assetId *big.Int) (*types.Transaction, error) {\n\treturn _Collateral.contract.Transact(opts, \"approve\", _operator, _assetId)\n}", "title": "" }, { "docid": "d24a9e07911e9fd4ffc9aa6586b8b8df", "score": "0.554559", "text": "func (g *RouteGroup) Ship() *Ship { return g.ship }", "title": "" }, { "docid": "898d8e087145ec18f43718883131212f", "score": "0.55410135", "text": "func (_PointCoin *PointCoinTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _PointCoin.contract.Transact(opts, \"approve\", _spender, _value)\n}", "title": "" }, { "docid": "80f7284d8711762f647b08d7c4cac3a8", "score": "0.5529891", "text": "func (ach *RequestHandler) Approve(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\n\tid, err := strconv.Atoi(ps.ByName(\"id\"))\n\tif err != nil {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\thttp.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)\n\t\treturn\n\t}\n\n\trequest, errs := ach.requestService.Request(uint(id))\n\n\tif len(errs) > 0 {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\thttp.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)\n\t\treturn\n\t}\n\n\tl := r.ContentLength\n\n\tbody := make([]byte, l)\n\n\tr.Body.Read(body)\n\n\tjson.Unmarshal(body, &request)\n\n\trequest, errs = ach.requestService.Approve(request)\n\n\tif len(errs) > 0 {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\thttp.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)\n\t\treturn\n\t}\n\n\toutput, err := json.MarshalIndent(request, \"\", \"\\t\\t\")\n\n\tif err != nil {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\thttp.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write(output)\n\treturn\n}", "title": "" }, { "docid": "710fd27b39ed46081494b706f38a6d59", "score": "0.552412", "text": "func (_IdFedERC20 *IdFedERC20Transactor) Approve(opts *bind.TransactOpts, spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _IdFedERC20.contract.Transact(opts, \"approve\", spender, value)\n}", "title": "" }, { "docid": "d91048b4007d42489583155422646841", "score": "0.5516773", "text": "func (_Fed *FedSession) Approve(_spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _Fed.Contract.Approve(&_Fed.TransactOpts, _spender, _value)\n}", "title": "" }, { "docid": "94285fff49989a82478fe7bfdfa07362", "score": "0.5512433", "text": "func (_IERC20 *IERC20Transactor) Approve(opts *bind.TransactOpts, spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _IERC20.contract.Transact(opts, \"approve\", spender, value)\n}", "title": "" }, { "docid": "6db2c9caecec28a08b73bc958ac3c2c4", "score": "0.5508818", "text": "func (_ERC20Initializable *ERC20InitializableTransactor) Approve(opts *bind.TransactOpts, spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _ERC20Initializable.contract.Transact(opts, \"approve\", spender, amount)\n}", "title": "" }, { "docid": "aa88ae889def32146e7e431cee0832d5", "score": "0.5505178", "text": "func (d *SDKSteps) ApproveRequest(agentID string, args interface{}) {\n\tc, found := d.bddContext.RouteCallbacks[agentID]\n\tif !found {\n\t\tlogger.Warnf(\"no callback channel found for %s\", agentID)\n\t\treturn\n\t}\n\n\tc <- args\n}", "title": "" }, { "docid": "059b4ab7a09311198bf5375f72ef66a2", "score": "0.5484662", "text": "func (_DLESS *DLESSTransactorSession) Approve(_spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _DLESS.Contract.Approve(&_DLESS.TransactOpts, _spender, _value)\n}", "title": "" }, { "docid": "ed1991e8f2b2ca3535847adf65ce2dbe", "score": "0.5479279", "text": "func (_DLESS *DLESSSession) Approve(_spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _DLESS.Contract.Approve(&_DLESS.TransactOpts, _spender, _value)\n}", "title": "" }, { "docid": "df48b34f3c43c6c77f3df147b2c1b7cb", "score": "0.547711", "text": "func (_ERC20 *ERC20Transactor) Approve(opts *bind.TransactOpts, _spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _ERC20.contract.Transact(opts, \"approve\", _spender, _value)\n}", "title": "" }, { "docid": "bda760c7b00516356f4df3bd1e921e08", "score": "0.54745734", "text": "func (_Fed *FedTransactorSession) Approve(_spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _Fed.Contract.Approve(&_Fed.TransactOpts, _spender, _value)\n}", "title": "" }, { "docid": "3cd96e8e0691ad67e7ab197842cf763d", "score": "0.5471971", "text": "func (re *RateEngine) HandleRunOnShipment(shipment models.Shipment) (CostByShipment, error) {\n\t// Validate expected model relationships are available.\n\tif shipment.PickupAddress == nil {\n\t\treturn CostByShipment{}, errors.New(\"PickupAddress is nil\")\n\t}\n\n\t// NewDutyStation's address/postal code is required per model/schema, so no nil check needed.\n\n\tif shipment.ShipmentOffers == nil {\n\t\treturn CostByShipment{}, errors.New(\"ShipmentOffers is nil\")\n\t} else if len(shipment.ShipmentOffers) == 0 {\n\t\treturn CostByShipment{}, errors.New(\"ShipmentOffers fetched, but none found\")\n\t}\n\n\tif shipment.ShipmentOffers[0].TransportationServiceProviderPerformance.ID == uuid.Nil {\n\t\treturn CostByShipment{}, errors.New(\"TransportationServiceProviderPerformance is nil\")\n\t}\n\n\tif shipment.NetWeight == nil {\n\t\treturn CostByShipment{}, errors.New(\"NetWeight is nil\")\n\t}\n\n\tif shipment.ActualPickupDate == nil {\n\t\treturn CostByShipment{}, errors.New(\"ActualPickupDate is nil\")\n\t}\n\n\tif shipment.PickupAddress.PostalCode[0:5] == shipment.Move.Orders.NewDutyStation.Address.PostalCode[0:5] {\n\t\treturn CostByShipment{}, errors.New(\"PickupAddress cannot have the same PostalCode as the NewDutyStation PostalCode\")\n\t}\n\n\t// All required relationships should exist at this point.\n\tdaysInSIT := 0\n\tvar sitDiscount unit.DiscountRate\n\tsitDiscount = 0.0\n\n\t// Assume the most recent matching shipment offer is the right one.\n\tlhDiscount := shipment.ShipmentOffers[0].TransportationServiceProviderPerformance.LinehaulRate\n\n\t// Apply rate engine to shipment\n\tvar shipmentCost CostByShipment\n\tcost, err := re.ComputeShipment(*shipment.NetWeight,\n\t\tshipment.PickupAddress.PostalCode,\n\t\tshipment.Move.Orders.NewDutyStation.Address.PostalCode,\n\t\ttime.Time(*shipment.ActualPickupDate),\n\t\tdaysInSIT, // We don't want any SIT charges\n\t\tlhDiscount,\n\t\tsitDiscount,\n\t)\n\tif err != nil {\n\t\treturn CostByShipment{}, err\n\t}\n\n\tshipmentCost = CostByShipment{\n\t\tShipment: shipment,\n\t\tCost: cost,\n\t}\n\treturn shipmentCost, err\n}", "title": "" }, { "docid": "5b35488730dc44de976780c757d3b646", "score": "0.54671115", "text": "func (_Payments *PaymentsTransactorSession) Approve(_spender common.Address, _amount *big.Int) (*types.Transaction, error) {\n\treturn _Payments.Contract.Approve(&_Payments.TransactOpts, _spender, _amount)\n}", "title": "" }, { "docid": "c6c515ad5cfa21202a7f9f68ae7bc2b2", "score": "0.5461366", "text": "func (_ERC20Interface *ERC20InterfaceTransactor) Approve(opts *bind.TransactOpts, spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _ERC20Interface.contract.Transact(opts, \"approve\", spender, value)\n}", "title": "" }, { "docid": "37b24dc8f4930ebbe0a73c4cef4e1bb4", "score": "0.5457184", "text": "func (_Uniswapv2erc20 *Uniswapv2erc20Transactor) Approve(opts *bind.TransactOpts, spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _Uniswapv2erc20.contract.Transact(opts, \"approve\", spender, value)\n}", "title": "" }, { "docid": "4c6f75364991d6fb9eeee26abbca8c4a", "score": "0.54463917", "text": "func (_Payments *PaymentsSession) Approve(_spender common.Address, _amount *big.Int) (*types.Transaction, error) {\n\treturn _Payments.Contract.Approve(&_Payments.TransactOpts, _spender, _amount)\n}", "title": "" }, { "docid": "b75324108a0932b95e651b25881eed63", "score": "0.5444732", "text": "func (ach *RequestHandler) DisApprove(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\n\tid, err := strconv.Atoi(ps.ByName(\"id\"))\n\n\tif err != nil {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\thttp.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)\n\t\treturn\n\t}\n\n\t_, errs := ach.requestService.DisApprove(uint(id))\n\n\tif len(errs) > 0 {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\thttp.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusNoContent)\n\treturn\n}", "title": "" }, { "docid": "52d99911970c70b13a9ac40761a5046d", "score": "0.54292125", "text": "func (_Gold *GoldTransactorSession) Approve(from *big.Int, spender *big.Int, amount *big.Int) (*types.Transaction, error) {\n\treturn _Gold.Contract.Approve(&_Gold.TransactOpts, from, spender, amount)\n}", "title": "" }, { "docid": "ec9b55baece84795d7c21476d44b7de1", "score": "0.54287815", "text": "func (_ERC20 *ERC20Transactor) Approve(opts *bind.TransactOpts, spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _ERC20.contract.Transact(opts, \"approve\", spender, value)\n}", "title": "" }, { "docid": "ec9b55baece84795d7c21476d44b7de1", "score": "0.54287815", "text": "func (_ERC20 *ERC20Transactor) Approve(opts *bind.TransactOpts, spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _ERC20.contract.Transact(opts, \"approve\", spender, value)\n}", "title": "" }, { "docid": "3163296a33ce0e1493087513e715274c", "score": "0.5409696", "text": "func (s *Steranko) ApproveRequest(ctx echo.Context) error {\n\n\t// TODO: MEDIUM: Define filtering rules: Too many requests? Suspicious requests? etc.\n\treturn nil\n}", "title": "" }, { "docid": "ed41c6749150b5bf87cebf84140c8723", "score": "0.5408675", "text": "func (_ERC20Burnable *ERC20BurnableSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _ERC20Burnable.Contract.Approve(&_ERC20Burnable.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "2395081fdca9ad1acb15cf9de9204c73", "score": "0.54083174", "text": "func (_ERC20Burnable *ERC20BurnableTransactorSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _ERC20Burnable.Contract.Approve(&_ERC20Burnable.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "6b73a1d7fac9c5d6f1b5913660b6240a", "score": "0.5404004", "text": "func (_Gold *GoldSession) Approve(from *big.Int, spender *big.Int, amount *big.Int) (*types.Transaction, error) {\n\treturn _Gold.Contract.Approve(&_Gold.TransactOpts, from, spender, amount)\n}", "title": "" }, { "docid": "6bc12d61cfeef35bfb7c2aabca123c47", "score": "0.53792536", "text": "func (h RejectShipmentHandler) Handle(params shipmentops.RejectShipmentParams) middleware.Responder {\n\tsession, logger := h.SessionAndLoggerFromRequest(params.HTTPRequest)\n\tappCtx := appcontext.NewAppContext(h.DB(), logger)\n\n\tif !session.IsOfficeUser() || !session.Roles.HasRole(roles.RoleTypeTOO) {\n\t\tlogger.Error(\"Only TOO role can reject shipments\")\n\t\treturn shipmentops.NewRejectShipmentForbidden()\n\t}\n\n\tshipmentID := uuid.FromStringOrNil(params.ShipmentID.String())\n\teTag := params.IfMatch\n\trejectionReason := params.Body.RejectionReason\n\tshipment, err := h.RejectShipment(appCtx, shipmentID, eTag, rejectionReason)\n\n\tif err != nil {\n\t\tlogger.Error(\"ghcapi.RejectShipmentHandler\", zap.Error(err))\n\n\t\tswitch e := err.(type) {\n\t\tcase services.NotFoundError:\n\t\t\treturn shipmentops.NewRejectShipmentNotFound()\n\t\tcase services.InvalidInputError:\n\t\t\tpayload := payloadForValidationError(\"Validation errors\", \"RejectShipment\", h.GetTraceID(), e.ValidationErrors)\n\t\t\treturn shipmentops.NewRejectShipmentUnprocessableEntity().WithPayload(payload)\n\t\tcase services.PreconditionFailedError:\n\t\t\treturn shipmentops.NewRejectShipmentPreconditionFailed().WithPayload(&ghcmessages.Error{Message: handlers.FmtString(err.Error())})\n\t\tcase mtoshipment.ConflictStatusError:\n\t\t\treturn shipmentops.NewRejectShipmentConflict().WithPayload(&ghcmessages.Error{Message: handlers.FmtString(err.Error())})\n\t\tdefault:\n\t\t\treturn shipmentops.NewRejectShipmentInternalServerError()\n\t\t}\n\t}\n\n\th.triggerShipmentRejectionEvent(shipmentID, shipment.MoveTaskOrderID, params)\n\n\tpayload := payloads.MTOShipment(shipment, nil)\n\treturn shipmentops.NewRejectShipmentOK().WithPayload(payload)\n}", "title": "" }, { "docid": "5c69c6078143fb65b23c8d4818ed4f93", "score": "0.53696024", "text": "func (router moveRouter) Approve(move *models.Move) error {\n\trouter.logMove(move)\n\tif router.approvable(move) {\n\t\tmove.Status = models.MoveStatusAPPROVED\n\t\trouter.logger.Info(\"SUCCESS: Move approved\")\n\t\treturn nil\n\t}\n\tif router.alreadyApproved(move) {\n\t\treturn nil\n\t}\n\n\trouter.logger.Warn(fmt.Sprintf(\n\t\t\"A move can only be approved if it's in one of these states: %q. However, its current status is: %s\",\n\t\tvalidStatusesBeforeApproval, move.Status,\n\t))\n\n\treturn errors.Wrap(\n\t\tmodels.ErrInvalidTransition, fmt.Sprintf(\n\t\t\t\"A move can only be approved if it's in one of these states: %q. However, its current status is: %s\",\n\t\t\tvalidStatusesBeforeApproval, move.Status,\n\t\t),\n\t)\n}", "title": "" }, { "docid": "5df00cb1b4510ab403f83795ca19b482", "score": "0.53609806", "text": "func (_Identity *IdentityTransactor) Approve(opts *bind.TransactOpts, _id *big.Int, _approve bool) (*types.Transaction, error) {\n\treturn _Identity.contract.Transact(opts, \"approve\", _id, _approve)\n}", "title": "" }, { "docid": "257a34d31aae274de7a5afee214eec48", "score": "0.5355978", "text": "func buildApprovedPPMShipmentWaitingOnCustomer(db *pop.Connection, userUploader *uploader.UserUploader, customs []Customization) models.PPMShipment {\n\tppmShipment := BuildPPMShipment(db, customs, []Trait{GetTraitApprovedPPMShipment})\n\n\tif ppmShipment.HasRequestedAdvance == nil || !*ppmShipment.HasRequestedAdvance {\n\t\treturn ppmShipment\n\t}\n\n\tserviceMember := ppmShipment.Shipment.MoveTaskOrder.Orders.ServiceMember\n\tif db == nil && serviceMember.ID.IsNil() {\n\t\t// this is a stubbed ppm shipment and a stubbed service member\n\t\t// we want to fake out the id in this case\n\t\tserviceMember.ID = uuid.Must(uuid.NewV4())\n\t\tppmShipment.Shipment.MoveTaskOrder.Orders.ServiceMemberID = serviceMember.ID\n\t}\n\n\tif db == nil && ppmShipment.Shipment.MoveTaskOrder.ID.IsNil() {\n\t\t// this is a stubbed ppm shipment and a stubbed move\n\t\t// we want to fake out the id in this case\n\t\tppmShipment.Shipment.MoveTaskOrder.ID = uuid.Must(uuid.NewV4())\n\t\tppmShipment.Shipment.MoveTaskOrderID = ppmShipment.Shipment.MoveTaskOrder.ID\n\t}\n\n\taoaFile := testdatagen.Fixture(\"aoa-packet.pdf\")\n\n\taoaPacket := buildDocumentWithUploads(db, userUploader, serviceMember, aoaFile)\n\n\tppmShipment.AOAPacket = &aoaPacket\n\tppmShipment.AOAPacketID = &aoaPacket.ID\n\n\tif db != nil {\n\t\tmustSave(db, &ppmShipment)\n\t}\n\n\t// Because of the way we're working with the PPMShipment, the\n\t// changes we've made to it aren't reflected in the pointer\n\t// reference that the MTOShipment has, so we'll need to update it\n\t// to point at the latest version.\n\tppmShipment.Shipment.PPMShipment = &ppmShipment\n\n\treturn ppmShipment\n}", "title": "" }, { "docid": "e1c7eb5654d36b88d26dea8896e7200d", "score": "0.53545505", "text": "func (h DeleteShipmentHandler) Handle(params shipmentops.DeleteShipmentParams) middleware.Responder {\n\tsession, logger := h.SessionAndLoggerFromRequest(params.HTTPRequest)\n\tappCtx := appcontext.NewAppContext(h.DB(), logger)\n\n\tif !session.IsOfficeUser() || !session.Roles.HasRole(roles.RoleTypeServicesCounselor) {\n\t\tlogger.Error(\"user is not authenticated with service counselor office role\")\n\t\treturn shipmentops.NewDeleteShipmentForbidden()\n\t}\n\n\tshipmentID := uuid.FromStringOrNil(params.ShipmentID.String())\n\tmoveID, err := h.DeleteShipment(appCtx, shipmentID)\n\tif err != nil {\n\t\tlogger.Error(\"ghcapi.DeleteShipmentHandler\", zap.Error(err))\n\n\t\tswitch err.(type) {\n\t\tcase services.NotFoundError:\n\t\t\treturn shipmentops.NewDeleteShipmentNotFound()\n\t\tcase services.ForbiddenError:\n\t\t\treturn shipmentops.NewDeleteShipmentForbidden()\n\t\tdefault:\n\t\t\treturn shipmentops.NewDeleteShipmentInternalServerError()\n\t\t}\n\t}\n\n\t// Note that this is currently not sending any notifications because\n\t// the move isn't available to the Prime yet. See the objectEventHandler\n\t// function in pkg/services/event/notification.go.\n\t// We added this now because eventually, we will want to save events in\n\t// the DB for auditing purposes. When that happens, this code in the handler\n\t// will not change. However, we should make sure to add a test in\n\t// mto_shipment_test.go that verifies the audit got saved.\n\th.triggerShipmentDeletionEvent(shipmentID, moveID, params)\n\n\treturn shipmentops.NewDeleteShipmentNoContent()\n}", "title": "" }, { "docid": "7da026afba2f7123df90fbf2c9a9ce9d", "score": "0.5345951", "text": "func (_CSAI *CSAITransactorSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _CSAI.Contract.Approve(&_CSAI.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "be4ca4963034170e94104a9ede022ed0", "score": "0.53413725", "text": "func (m *MTOShipment) Validate(tx *pop.Connection) (*validate.Errors, error) {\n\tvar vs []validate.Validator\n\tvs = append(vs, &validators.StringInclusion{Field: string(m.Status), Name: \"Status\", List: []string{\n\t\tstring(MTOShipmentStatusApproved),\n\t\tstring(MTOShipmentStatusRejected),\n\t\tstring(MTOShipmentStatusSubmitted),\n\t}})\n\tvs = append(vs, &validators.UUIDIsPresent{Field: m.MoveTaskOrderID, Name: \"MoveTaskOrderID\"})\n\tvs = append(vs, &validators.UUIDIsPresent{Field: m.PickupAddressID, Name: \"PickupAddressID\"})\n\tvs = append(vs, &validators.UUIDIsPresent{Field: m.DestinationAddressID, Name: \"DestinationAddressID\"})\n\tif m.PrimeEstimatedWeight != nil {\n\t\tvs = append(vs, &validators.IntIsGreaterThan{Field: m.PrimeEstimatedWeight.Int(), Compared: -1, Name: \"PrimeEstimatedWeight\"})\n\t}\n\tif m.PrimeActualWeight != nil {\n\t\tvs = append(vs, &validators.IntIsGreaterThan{Field: m.PrimeActualWeight.Int(), Compared: -1, Name: \"PrimeActualWeight\"})\n\t}\n\tif m.Status == MTOShipmentStatusRejected {\n\t\tvar rejectionReason string\n\t\tif m.RejectionReason != nil {\n\t\t\trejectionReason = *m.RejectionReason\n\t\t}\n\t\tvs = append(vs, &validators.StringIsPresent{Field: rejectionReason, Name: \"RejectionReason\"})\n\t}\n\treturn validate.Validate(vs...), nil\n}", "title": "" }, { "docid": "6e1c499cf37668f942fa72a6be6f4bc4", "score": "0.5330844", "text": "func (c *Contract) AmendmentRequest(ctx context.Context, w *node.ResponseWriter,\r\n\titx *inspector.Transaction, rk *wallet.Key) error {\r\n\r\n\tnode.Log(ctx, \"Amendment Tx : %s\", itx.Hash)\r\n\r\n\tctx, span := trace.StartSpan(ctx, \"handlers.Contract.Amendment\")\r\n\tdefer span.End()\r\n\r\n\tmsg, ok := itx.MsgProto.(*actions.ContractAmendment)\r\n\tif !ok {\r\n\t\treturn errors.New(\"Could not assert as *protocol.ContractAmendment\")\r\n\t}\r\n\r\n\tv := ctx.Value(node.KeyValues).(*node.Values)\r\n\r\n\t// Validate all fields have valid values.\r\n\tif itx.RejectCode != 0 {\r\n\t\tnode.LogWarn(ctx, \"Contract amendment invalid : %d %s\", itx.RejectCode, itx.RejectText)\r\n\t\treturn node.RespondRejectText(ctx, w, itx, rk, itx.RejectCode, itx.RejectText)\r\n\t}\r\n\r\n\t// Locate Contract\r\n\tct, err := contract.Retrieve(ctx, c.MasterDB, rk.Address, c.Config.IsTest)\r\n\tif err != nil {\r\n\t\tif errors.Cause(err) == contract.ErrNotFound {\r\n\t\t\taddress := bitcoin.NewAddressFromRawAddress(rk.Address, w.Config.Net)\r\n\t\t\tnode.LogWarn(ctx, \"Contract doesn't exist : %s\", address)\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsContractDoesNotExist)\r\n\t\t}\r\n\t\treturn errors.Wrap(err, \"Failed to retrieve contract\")\r\n\t}\r\n\r\n\tif !ct.MovedTo.IsEmpty() {\r\n\t\taddress := bitcoin.NewAddressFromRawAddress(ct.MovedTo, w.Config.Net)\r\n\t\tnode.LogWarn(ctx, \"Contract address changed : %s\", address)\r\n\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsContractMoved)\r\n\t}\r\n\r\n\tif !contract.IsOperator(ctx, ct, itx.Inputs[0].Address) {\r\n\t\taddress := bitcoin.NewAddressFromRawAddress(itx.Inputs[0].Address, w.Config.Net)\r\n\t\tnode.LogVerbose(ctx, \"Requestor is not operator : %s\", address)\r\n\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsNotOperator)\r\n\t}\r\n\r\n\tif ct.Revision != msg.ContractRevision {\r\n\t\tnode.LogWarn(ctx, \"Incorrect contract revision : specified %d != current %d\",\r\n\t\t\tmsg.ContractRevision, ct.Revision)\r\n\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsContractRevision)\r\n\t}\r\n\r\n\t// Check proposal if there was one\r\n\tproposed := false\r\n\tproposalType := uint32(0)\r\n\tvotingSystem := uint32(0)\r\n\r\n\tif len(msg.RefTxID) != 0 { // Vote Result Action allowing these amendments\r\n\t\tproposed = true\r\n\r\n\t\trefTxId, err := bitcoin.NewHash32(msg.RefTxID)\r\n\t\tif err != nil {\r\n\t\t\treturn errors.Wrap(err, \"Failed to convert bitcoin.Hash32 to Hash32\")\r\n\t\t}\r\n\r\n\t\t// Retrieve Vote Result\r\n\t\tvoteResultTx, err := transactions.GetTx(ctx, c.MasterDB, refTxId, c.Config.IsTest)\r\n\t\tif err != nil {\r\n\t\t\tnode.LogWarn(ctx, \"Vote Result tx not found for amendment\")\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t}\r\n\r\n\t\tvoteResult, ok := voteResultTx.MsgProto.(*actions.Result)\r\n\t\tif !ok {\r\n\t\t\tnode.LogWarn(ctx, \"Vote Result invalid for amendment\")\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t}\r\n\r\n\t\t// Retrieve the vote\r\n\t\tvoteTxId, err := bitcoin.NewHash32(voteResult.VoteTxId)\r\n\t\tif err != nil {\r\n\t\t\tnode.LogWarn(ctx, \"Invalid vote txid : %s\", err)\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t}\r\n\r\n\t\tvt, err := vote.Retrieve(ctx, c.MasterDB, rk.Address, voteTxId)\r\n\t\tif err == vote.ErrNotFound {\r\n\t\t\tnode.LogWarn(ctx, \"Vote not found : %s\", voteTxId)\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsVoteNotFound)\r\n\t\t} else if err != nil {\r\n\t\t\tnode.LogWarn(ctx, \"Failed to retrieve vote : %s : %s\", voteTxId, err)\r\n\t\t\treturn errors.Wrap(err, \"Failed to retrieve vote\")\r\n\t\t}\r\n\r\n\t\tif vt.CompletedAt.Nano() == 0 {\r\n\t\t\tnode.LogWarn(ctx, \"Vote not complete yet\")\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t}\r\n\r\n\t\tif vt.Result != \"A\" {\r\n\t\t\tnode.LogWarn(ctx, \"Vote result not A(Accept) : %s\", vt.Result)\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t}\r\n\r\n\t\tif len(vt.ProposedAmendments) == 0 {\r\n\t\t\tnode.LogWarn(ctx, \"Vote was not for specific amendments\")\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t}\r\n\r\n\t\tif vt.AssetCode != nil && !vt.AssetCode.IsZero() {\r\n\t\t\tnode.LogWarn(ctx, \"Vote was not for contract amendments\")\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t}\r\n\r\n\t\t// Verify proposal amendments match these amendments.\r\n\t\tif len(voteResult.ProposedAmendments) != len(msg.Amendments) {\r\n\t\t\tnode.LogWarn(ctx, \"Proposal has different count of amendments : %d != %d\",\r\n\t\t\t\tlen(voteResult.ProposedAmendments), len(msg.Amendments))\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t}\r\n\r\n\t\tfor i, amendment := range voteResult.ProposedAmendments {\r\n\t\t\tif !amendment.Equal(msg.Amendments[i]) {\r\n\t\t\t\tnode.LogWarn(ctx, \"Proposal amendment %d doesn't match\", i)\r\n\t\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tproposalType = vt.Type\r\n\t\tvotingSystem = vt.VoteSystem\r\n\t}\r\n\r\n\t// Contract Formation <- Contract Amendment\r\n\tcf, err := contract.FetchContractFormation(ctx, c.MasterDB, rk.Address, c.Config.IsTest)\r\n\tif err != nil {\r\n\t\treturn errors.Wrap(err, \"fetch contract formation\")\r\n\t}\r\n\r\n\t// Ensure reduction in qty is OK, keeping in mind that zero (0) means\r\n\t// unlimited asset creation is permitted.\r\n\tif cf.RestrictedQtyAssets > 0 && cf.RestrictedQtyAssets < uint64(len(ct.AssetCodes)) {\r\n\t\tnode.LogWarn(ctx, \"Cannot reduce allowable assets below existing number\")\r\n\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsContractAssetQtyReduction)\r\n\t}\r\n\r\n\tif msg.ChangeAdministrationAddress || msg.ChangeOperatorAddress {\r\n\t\tif !ct.OperatorAddress.IsEmpty() {\r\n\t\t\tif len(itx.Inputs) < 2 {\r\n\t\t\t\tnode.Log(ctx, \"All operators required for operator change\")\r\n\t\t\t\treturn node.RespondReject(ctx, w, itx, rk,\r\n\t\t\t\t\tactions.RejectionsContractBothOperatorsRequired)\r\n\t\t\t}\r\n\r\n\t\t\tif itx.Inputs[0].Address.Equal(itx.Inputs[1].Address) ||\r\n\t\t\t\t!contract.IsOperator(ctx, ct, itx.Inputs[0].Address) ||\r\n\t\t\t\t!contract.IsOperator(ctx, ct, itx.Inputs[1].Address) {\r\n\t\t\t\tnode.Log(ctx, \"All operators required for operator change\")\r\n\t\t\t\treturn node.RespondReject(ctx, w, itx, rk,\r\n\t\t\t\t\tactions.RejectionsContractBothOperatorsRequired)\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\tif len(itx.Inputs) < 1 {\r\n\t\t\t\tnode.Log(ctx, \"All operators required for operator change\")\r\n\t\t\t\treturn node.RespondReject(ctx, w, itx, rk,\r\n\t\t\t\t\tactions.RejectionsContractBothOperatorsRequired)\r\n\t\t\t}\r\n\r\n\t\t\tif !contract.IsOperator(ctx, ct, itx.Inputs[0].Address) {\r\n\t\t\t\tnode.Log(ctx, \"All operators required for operator change\")\r\n\t\t\t\treturn node.RespondReject(ctx, w, itx, rk,\r\n\t\t\t\t\tactions.RejectionsContractBothOperatorsRequired)\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t// Pull from amendment tx.\r\n\t// Administration change. New administration in second input\r\n\tinputIndex := 1\r\n\tif !ct.OperatorAddress.IsEmpty() {\r\n\t\tinputIndex++\r\n\t}\r\n\r\n\tif msg.ChangeAdministrationAddress {\r\n\t\tif len(itx.Inputs) <= inputIndex {\r\n\t\t\treturn errors.New(\"New administration specified but not included in inputs\")\r\n\t\t}\r\n\r\n\t\tcf.AdminAddress = itx.Inputs[inputIndex].Address.Bytes()\r\n\t\tinputIndex++\r\n\t}\r\n\r\n\t// Operator changes. New operator in second input unless there is also a new administration,\r\n\t// then it is in the third input\r\n\tif msg.ChangeOperatorAddress {\r\n\t\tif len(itx.Inputs) <= inputIndex {\r\n\t\t\treturn errors.New(\"New operator specified but not included in inputs\")\r\n\t\t}\r\n\r\n\t\tcf.OperatorAddress = itx.Inputs[inputIndex].Address.Bytes()\r\n\t}\r\n\r\n\tif err := applyContractAmendments(cf, msg.Amendments, proposed, proposalType,\r\n\t\tvotingSystem); err != nil {\r\n\t\tnode.LogWarn(ctx, \"Failed to apply amendments : %s\", err)\r\n\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsMsgMalformed)\r\n\t}\r\n\r\n\t// Verify entity contract\r\n\tif len(cf.EntityContract) > 0 {\r\n\t\tif _, err := bitcoin.DecodeRawAddress(cf.EntityContract); err != nil {\r\n\t\t\treturn node.RespondRejectText(ctx, w, itx, rk, actions.RejectionsMsgMalformed,\r\n\t\t\t\tfmt.Sprintf(\"Entity contract address invalid : %s\", err))\r\n\t\t}\r\n\t}\r\n\r\n\t// Verify operator entity contract\r\n\tif len(cf.OperatorEntityContract) > 0 {\r\n\t\tra, err := bitcoin.DecodeRawAddress(cf.OperatorEntityContract)\r\n\t\tif err != nil {\r\n\t\t\treturn node.RespondRejectText(ctx, w, itx, rk, actions.RejectionsMsgMalformed,\r\n\t\t\t\tfmt.Sprintf(\"Operator entity contract address invalid : %s\", err))\r\n\t\t}\r\n\r\n\t\tentityCF, err := contract.FetchContractFormation(ctx, c.MasterDB, ra, c.Config.IsTest)\r\n\t\tif err != nil {\r\n\t\t\tif errors.Cause(err) == contract.ErrNotFound {\r\n\t\t\t\treturn node.RespondRejectText(ctx, w, itx, rk, actions.RejectionsMsgMalformed,\r\n\t\t\t\t\t\"Operator entity contract not found\")\r\n\t\t\t}\r\n\t\t\treturn errors.Wrap(err, \"fetch operator entity contract formation\")\r\n\t\t}\r\n\t\tlogger.Info(ctx, \"Found Operator Entity Contract : %s\", entityCF.ContractName)\r\n\t}\r\n\r\n\t// Check admin identity oracle signatures\r\n\tfor _, adminCert := range cf.AdminIdentityCertificates {\r\n\t\tif err := validateContractAmendOracleSig(ctx, c.MasterDB, cf, adminCert, c.Headers,\r\n\t\t\tc.Config.IsTest); err != nil {\r\n\t\t\tnode.LogVerbose(ctx, \"New admin identity signature invalid : %s\", err)\r\n\t\t\treturn node.RespondReject(ctx, w, itx, rk, actions.RejectionsInvalidSignature)\r\n\t\t}\r\n\t}\r\n\r\n\t// Check any oracle entity contracts\r\n\tfor _, oracle := range cf.Oracles {\r\n\t\tra, err := bitcoin.DecodeRawAddress(oracle.EntityContract)\r\n\t\tif err != nil {\r\n\t\t\tnode.LogWarn(ctx, \"Invalid oracle entity address : %s\", err)\r\n\t\t\treturn node.RespondRejectText(ctx, w, itx, rk, actions.RejectionsMsgMalformed,\r\n\t\t\t\tfmt.Sprintf(\"Invalid oracle entity address : %s\", err))\r\n\t\t}\r\n\t\tif _, err := contract.FetchContractFormation(ctx, c.MasterDB, ra, c.Config.IsTest); err != nil {\r\n\t\t\treturn node.RespondRejectText(ctx, w, itx, rk, actions.RejectionsMsgMalformed,\r\n\t\t\t\tfmt.Sprintf(\"Oracle entity address : %s\", err))\r\n\t\t}\r\n\t}\r\n\r\n\t// Apply modifications\r\n\tcf.ContractRevision = ct.Revision + 1 // Bump the revision\r\n\tcf.Timestamp = v.Now.Nano()\r\n\r\n\t// Build outputs\r\n\t// 1 - Contract Address\r\n\t// 2 - Contract Fee (change)\r\n\tw.AddOutput(ctx, rk.Address, 0)\r\n\tw.AddContractFee(ctx, ct.ContractFee)\r\n\r\n\t// Save Tx.\r\n\tif err := transactions.AddTx(ctx, c.MasterDB, itx); err != nil {\r\n\t\treturn errors.Wrap(err, \"Failed to save tx\")\r\n\t}\r\n\r\n\tnode.Log(ctx, \"Accepting contract amendment\")\r\n\r\n\t// Respond with a formation\r\n\tif err := node.RespondSuccess(ctx, w, itx, rk, cf); err == nil {\r\n\t\treturn contract.SaveContractFormation(ctx, c.MasterDB, rk.Address, cf, c.Config.IsTest)\r\n\t}\r\n\treturn err\r\n}", "title": "" }, { "docid": "2c144c3a7b152ae5131cf5a97a0b7f65", "score": "0.5314894", "text": "func (_Nft *NftTransactor) Approve(opts *bind.TransactOpts, to common.Address, tokenId *big.Int) (*types.Transaction, error) {\n\treturn _Nft.contract.Transact(opts, \"approve\", to, tokenId)\n}", "title": "" }, { "docid": "43cafc5b3402cf5af46a941e6783538f", "score": "0.52975035", "text": "func (a *ERC20) Approve(spender web3.Address, amount *big.Int) *contract.Txn {\n\treturn a.c.Txn(\"approve\", spender, amount)\n}", "title": "" }, { "docid": "aa7bbd9f3b02d206ee4863fad3db22fe", "score": "0.52966607", "text": "func (_CSAI *CSAISession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _CSAI.Contract.Approve(&_CSAI.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "82aec2d798a016ea71a7709735641fcb", "score": "0.5282936", "text": "func (_Identity *IdentitySession) Approved(arg0 *big.Int, arg1 *big.Int) (common.Address, error) {\n\treturn _Identity.Contract.Approved(&_Identity.CallOpts, arg0, arg1)\n}", "title": "" }, { "docid": "ea423f0b1b0f3a36388558f433d1ef26", "score": "0.527434", "text": "func (_PaletteToken *PaletteTokenTransactor) Approve(opts *bind.TransactOpts, spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _PaletteToken.contract.Transact(opts, \"approve\", spender, amount)\n}", "title": "" }, { "docid": "b59d229125ad0dfe51aef578528177db", "score": "0.5274123", "text": "func (a *API) ApproveActivity(ctx *app.Context, w http.ResponseWriter, r *http.Request) error {\n\tif ctx.User.Group != \"inst_group\" {\n\t\treturn fmt.Errorf(\"giveBadge must be called by admin only\")\n\t}\n\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer r.Body.Close()\n\n\tvar activityRequest *model.ApproveActivityRequest\n\tif err := json.Unmarshal(body, &activityRequest); err != nil {\n\t\treturn err\n\t}\n\n\tactivities := make([]string, 0)\n\tfor _, a := range activityRequest.Activities {\n\t\tactivities = append(activities, fmt.Sprintf(\"%d%s\", a.ActivityID, a.StudentID))\n\t}\n\tmessageBytes := strings.Join(activities, \"\")\n\tctx.Logger.Infof(\"payload: %s\\n\", messageBytes)\n\n\thashed := sha256.Sum256([]byte(messageBytes))\n\n\tpublickey, err := ctx.Database.GetStaffPublicKey(ctx.User.UserID)\n\tif err != nil {\n\t\tctx.Logger.Errorln(\"error when requesting publickey\")\n\t\treturn err\n\t}\n\n\t// Verify step\n\tisVerified, err := ctx.VerifySignature(hashed[:], activityRequest.Signature, publickey)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !isVerified {\n\t\treturn errors.New(\"unauthenticated\")\n\t}\n\n\ttxs := make([]string, 0)\n\tfor _, activity := range activityRequest.Activities {\n\t\tactivity.Approver = publickey\n\t\ttx, currentIndex, err := ctx.ApproveActivity(&activity, a.App.CurrentPeerIndex, a.Config.Peers, a.App.SK)\n\t\ta.App.CurrentPeerIndex = currentIndex\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\ttxs = append(txs, tx)\n\t}\n\n\ttxsBytes, err := json.Marshal(txs)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tw.Write(txsBytes)\n\n\treturn nil\n}", "title": "" }, { "docid": "7aa5d0ac439c429429d108aea78e4f67", "score": "0.5272184", "text": "func (_LoanManager *LoanManagerTransactor) ApproveRequest(opts *bind.TransactOpts, _id [32]byte) (*types.Transaction, error) {\n\treturn _LoanManager.contract.Transact(opts, \"approveRequest\", _id)\n}", "title": "" }, { "docid": "d675a65f384ee29406252789bf4f2cd8", "score": "0.5270333", "text": "func (h GetShipmentHandler) Handle(params shipmentop.GetShipmentParams) middleware.Responder {\n\tctx := params.HTTPRequest.Context()\n\n\tlogger := logging.FromContext(ctx).(Logger)\n\n\tsession := auth.SessionFromContext(ctx)\n\n\t// #nosec UUID is pattern matched by swagger and will be ok\n\tshipmentID, _ := uuid.FromString(params.ShipmentID.String())\n\n\tshipment, err := models.FetchShipment(h.DB(), session, shipmentID)\n\tif err != nil {\n\t\treturn handlers.ResponseForError(logger, err)\n\t}\n\n\tshipmentPayload, err := payloadForShipmentModel(*shipment)\n\tif err != nil {\n\t\tlogger.Error(\"Error in shipment payload: \", zap.Error(err))\n\t}\n\n\treturn shipmentop.NewGetShipmentOK().WithPayload(shipmentPayload)\n}", "title": "" }, { "docid": "e6bb22bda04066c1c63ab3ee3464500b", "score": "0.5250978", "text": "func (_IOTX *IOTXTransactorSession) Approve(_spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _IOTX.Contract.Approve(&_IOTX.TransactOpts, _spender, _value)\n}", "title": "" }, { "docid": "53c794fd629e418a5b72c2b23580d865", "score": "0.52508336", "text": "func (_Erc721 *Erc721Transactor) Approve(opts *bind.TransactOpts, to common.Address, tokenId *big.Int) (*types.Transaction, error) {\n\treturn _Erc721.contract.Transact(opts, \"approve\", to, tokenId)\n}", "title": "" }, { "docid": "85e573950c04df6d427bf7922717fa44", "score": "0.52485543", "text": "func (h *Hook) AfterDeliverTx(ctx sdk.Context, req abci.RequestDeliverTx, res abci.ResponseDeliverTx) {\n\n}", "title": "" }, { "docid": "69958d04fbea3a140d4477b5949672ea", "score": "0.52469444", "text": "func (a *AskPassCredentialHelper) Approve(_ Creds) error { return nil }", "title": "" }, { "docid": "dc66a291419e92d8d410172df9ebcb56", "score": "0.52337676", "text": "func (_WizardPresale *WizardPresaleSession) Approve(to common.Address, tokenId *big.Int) (*types.Transaction, error) {\n\treturn _WizardPresale.Contract.Approve(&_WizardPresale.TransactOpts, to, tokenId)\n}", "title": "" }, { "docid": "73dbd713ab671f1e87792989d573670e", "score": "0.523314", "text": "func (_IdFedPair *IdFedPairTransactorSession) Approve(spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _IdFedPair.Contract.Approve(&_IdFedPair.TransactOpts, spender, value)\n}", "title": "" }, { "docid": "73dbd713ab671f1e87792989d573670e", "score": "0.523314", "text": "func (_IdFedPair *IdFedPairTransactorSession) Approve(spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _IdFedPair.Contract.Approve(&_IdFedPair.TransactOpts, spender, value)\n}", "title": "" }, { "docid": "8bb3b3217e4282818690885efca0b32c", "score": "0.52304703", "text": "func (_IdFedPair *IdFedPairSession) Approve(spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _IdFedPair.Contract.Approve(&_IdFedPair.TransactOpts, spender, value)\n}", "title": "" }, { "docid": "8bb3b3217e4282818690885efca0b32c", "score": "0.52304703", "text": "func (_IdFedPair *IdFedPairSession) Approve(spender common.Address, value *big.Int) (*types.Transaction, error) {\n\treturn _IdFedPair.Contract.Approve(&_IdFedPair.TransactOpts, spender, value)\n}", "title": "" }, { "docid": "faaefc5946ddd503563b635e11ccc4cc", "score": "0.52291256", "text": "func (_ERC20Initializable *ERC20InitializableSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _ERC20Initializable.Contract.Approve(&_ERC20Initializable.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "d2e22ddf82ef2ae14330c70ad8e7299e", "score": "0.5226473", "text": "func Approve(runtime settings.Runtime) error {\n\tkep, err := keps.Open(runtime.TargetDir())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = kep.SetState(states.Implementable)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = kep.Persist()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// TODO add mechanics for creating PR\n\n\treturn nil\n}", "title": "" }, { "docid": "6c55cbd9ca521626b1d9ac8f1f23efed", "score": "0.52259874", "text": "func (_IOTX *IOTXSession) Approve(_spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _IOTX.Contract.Approve(&_IOTX.TransactOpts, _spender, _value)\n}", "title": "" }, { "docid": "36fae89c899c2ea2e25b18cda6315d3d", "score": "0.5210874", "text": "func BuildPPMShipmentThatNeedsPaymentApproval(db *pop.Connection, userUploader *uploader.UserUploader, customs []Customization) models.PPMShipment {\n\t// It's easier to use some of the data from other downstream\n\t// functions if we have them go first and then make our changes on\n\t// top of those changes.\n\tppmShipment := buildPPMShipmentReadyForFinalCustomerCloseOutWithCustoms(db, userUploader, customs)\n\n\tmove := ppmShipment.Shipment.MoveTaskOrder\n\tcertType := models.SignedCertificationTypePPMPAYMENT\n\n\tsignedCert := BuildSignedCertification(db, []Customization{\n\t\t{\n\t\t\tModel: move,\n\t\t\tLinkOnly: true,\n\t\t},\n\t\t{\n\t\t\tModel: models.SignedCertification{\n\t\t\t\tPpmID: &ppmShipment.ID,\n\t\t\t\tCertificationType: &certType,\n\t\t\t},\n\t\t},\n\t}, nil)\n\n\tppmShipment.SignedCertification = &signedCert\n\n\tppmShipment.Status = models.PPMShipmentStatusNeedsPaymentApproval\n\tif ppmShipment.SubmittedAt == nil {\n\t\tppmShipment.SubmittedAt = models.TimePointer(time.Now())\n\t}\n\n\tif db != nil {\n\t\tmustSave(db, &ppmShipment)\n\t}\n\n\t// Because of the way we're working with the PPMShipment, the\n\t// changes we've made to it aren't reflected in the pointer\n\t// reference that the MTOShipment has, so we'll need to update it\n\t// to point at the latest version.\n\tppmShipment.Shipment.PPMShipment = &ppmShipment\n\n\treturn ppmShipment\n}", "title": "" }, { "docid": "d60cc57ade4e64f6d4186b2ac2a8aaba", "score": "0.52089024", "text": "func (_IERC20 *IERC20Session) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _IERC20.Contract.Approve(&_IERC20.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "d60cc57ade4e64f6d4186b2ac2a8aaba", "score": "0.52089024", "text": "func (_IERC20 *IERC20Session) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _IERC20.Contract.Approve(&_IERC20.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "d951b28baf852dbb849a533440e503d5", "score": "0.520817", "text": "func (router moveRouter) ApproveAmendedOrders(moveID uuid.UUID, ordersID uuid.UUID) (models.Move, error) {\n\tvar move models.Move\n\terr := router.db.EagerPreload(\"MTOServiceItems\").\n\t\tWhere(\"moves.id = ?\", moveID).\n\t\tFirst(&move)\n\n\tif err != nil {\n\t\trouter.logger.Error(\"failure encountered querying for move associated with orders\", zap.Error(err))\n\t\treturn models.Move{}, fmt.Errorf(\"failure encountered querying for move associated with orders, %s, id: %s\", err.Error(), ordersID)\n\t}\n\n\tif move.Status != models.MoveStatusAPPROVALSREQUESTED {\n\t\treturn models.Move{}, errors.Wrap(\n\t\t\tmodels.ErrInvalidTransition,\n\t\t\t\"Cannot approve move with amended orders because the move status is not APPROVALS REQUESTED\",\n\t\t)\n\t}\n\n\tvar hasRequestedServiceItems bool\n\tfor _, serviceItem := range move.MTOServiceItems {\n\t\tif serviceItem.Status == models.MTOServiceItemStatusSubmitted {\n\t\t\thasRequestedServiceItems = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif !hasRequestedServiceItems {\n\t\tapproveErr := router.Approve(&move)\n\t\tif approveErr != nil {\n\t\t\treturn models.Move{}, approveErr\n\t\t}\n\t}\n\n\treturn move, nil\n}", "title": "" }, { "docid": "18cc4e9929730ce41db575d2df087a6e", "score": "0.519997", "text": "func (_IERC20 *IERC20TransactorSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _IERC20.Contract.Approve(&_IERC20.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "18cc4e9929730ce41db575d2df087a6e", "score": "0.519997", "text": "func (_IERC20 *IERC20TransactorSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _IERC20.Contract.Approve(&_IERC20.TransactOpts, spender, amount)\n}", "title": "" }, { "docid": "40a7155a5965a89a2c8e18729ea7a6fd", "score": "0.51997083", "text": "func (_ReserveToken *ReserveTokenTransactor) Approve(opts *bind.TransactOpts, _spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _ReserveToken.contract.Transact(opts, \"approve\", _spender, _value)\n}", "title": "" }, { "docid": "1f2f1d8d62e3e7f157908bbbfe983f80", "score": "0.5198389", "text": "func (_OMC *OMCTransactorSession) Approve(_spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _OMC.Contract.Approve(&_OMC.TransactOpts, _spender, _value)\n}", "title": "" }, { "docid": "e01a45bfdc5fd6fd9444fbb41c8f3744", "score": "0.518291", "text": "func (_Identity *IdentityCallerSession) Approved(arg0 *big.Int, arg1 *big.Int) (common.Address, error) {\n\treturn _Identity.Contract.Approved(&_Identity.CallOpts, arg0, arg1)\n}", "title": "" }, { "docid": "e0df59ea7dc60d9f09d9acac72379c07", "score": "0.5177465", "text": "func (h ApprovePPMHandler) Handle(params officeop.ApprovePPMParams) middleware.Responder {\n\tsession := auth.SessionFromRequestContext(params.HTTPRequest)\n\n\t// #nosec UUID is pattern matched by swagger and will be ok\n\tppmID, _ := uuid.FromString(params.PersonallyProcuredMoveID.String())\n\n\tppm, err := models.FetchPersonallyProcuredMove(h.db, session, ppmID)\n\tif err != nil {\n\t\treturn responseForError(h.logger, err)\n\t}\n\n\tmoveID := ppm.MoveID\n\tppm.Status = models.PPMStatusAPPROVED\n\n\tverrs, err := h.db.ValidateAndUpdate(ppm)\n\tif err != nil || verrs.HasAny() {\n\t\treturn responseForVErrors(h.logger, verrs, err)\n\t}\n\n\terr = notifications.SendNotification(\n\t\tnotifications.NewMoveApproved(h.db, h.logger, session, moveID),\n\t\th.sesService,\n\t)\n\tif err != nil {\n\t\th.logger.Error(\"problem sending email to user\", zap.Error(err))\n\t\t// TODO how should we handle this error?\n\t\t// return newErrResponse(500)\n\t}\n\n\tppmPayload, err := payloadForPPMModel(h.storage, *ppm)\n\tif err != nil {\n\t\treturn responseForError(h.logger, err)\n\t}\n\treturn officeop.NewApprovePPMOK().WithPayload(ppmPayload)\n}", "title": "" }, { "docid": "fa5c3a8f3401ccd89303e74065435697", "score": "0.51730686", "text": "func (_FAIL *FAILSession) Approve(_spender common.Address, _value *big.Int) (*types.Transaction, error) {\n\treturn _FAIL.Contract.Approve(&_FAIL.TransactOpts, _spender, _value)\n}", "title": "" }, { "docid": "ef4a68003766abbd594396910862e8ee", "score": "0.5172554", "text": "func (h IndexShipmentsHandler) Handle(p shipmentop.IndexShipmentsParams) middleware.Responder {\n\tvar response middleware.Responder\n\n\tshipments, err := models.FetchPossiblyAwardedShipments(h.db)\n\n\tif err != nil {\n\t\th.logger.Error(\"DB Query\", zap.Error(err))\n\t\tresponse = shipmentop.NewIndexShipmentsBadRequest()\n\t} else {\n\t\tisp := make(internalmessages.IndexShipmentsPayload, len(shipments))\n\t\tfor i, s := range shipments {\n\t\t\tisp[i] = payloadForShipmentModel(s)\n\t\t}\n\t\tresponse = shipmentop.NewIndexShipmentsOK().WithPayload(isp)\n\t}\n\treturn response\n}", "title": "" }, { "docid": "a2516a7e9c46912719ec2afc5a860f5e", "score": "0.51714426", "text": "func (_ERC20Initializable *ERC20InitializableTransactorSession) Approve(spender common.Address, amount *big.Int) (*types.Transaction, error) {\n\treturn _ERC20Initializable.Contract.Approve(&_ERC20Initializable.TransactOpts, spender, amount)\n}", "title": "" } ]
82ae24420db82761d7191d2378749c58
NewAddOperatingSystemKernelDefault creates a AddOperatingSystemKernelDefault with default headers values
[ { "docid": "b1bb547eddad52d68e2336a1987910c5", "score": "0.75933355", "text": "func NewAddOperatingSystemKernelDefault(code int) *AddOperatingSystemKernelDefault {\n\treturn &AddOperatingSystemKernelDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" } ]
[ { "docid": "3751e017f8f9b31fadfbbdb4f1e4e7dd", "score": "0.5970262", "text": "func NewAddOperatingSystemVersionDefault(code int) *AddOperatingSystemVersionDefault {\n\treturn &AddOperatingSystemVersionDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "f16507ae885083588edae223664a3c3d", "score": "0.53301144", "text": "func initPlatformDefaults() {\n\t// Linux does not have special defaults\n}", "title": "" }, { "docid": "49e32bf9fe1a64cb231bfddad600204e", "score": "0.52790123", "text": "func NewAddOperatingSystemKernelOK() *AddOperatingSystemKernelOK {\n\treturn &AddOperatingSystemKernelOK{}\n}", "title": "" }, { "docid": "2a24c4501694facaaef3abcb2fa987f0", "score": "0.52561307", "text": "func CreateModifyLaunchTemplateDefaultVersionRequest() (request *ModifyLaunchTemplateDefaultVersionRequest) {\n\trequest = &ModifyLaunchTemplateDefaultVersionRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"Ecs\", \"2014-05-26\", \"ModifyLaunchTemplateDefaultVersion\", \"ecs\", \"openAPI\")\n\trequest.Method = requests.POST\n\treturn\n}", "title": "" }, { "docid": "267a990e20a1f6815dca676ad327d881", "score": "0.5241927", "text": "func (r *Provider) Default() {\n\tproviderlog.Info(\"default\", \"name\", r.Name)\n\tif r.Labels == nil {\n\t\tr.Labels = make(map[string]string)\n\t}\n\tr.Labels[meta.LabelUndistro] = \"\"\n\tr.Labels[meta.LabelUndistroClusterName] = \"\"\n\tr.Labels[meta.LabelUndistroClusterType] = \"management\"\n\tif r.Spec.Repository.URL == \"\" {\n\t\tr.Spec.Repository.URL = defaultRepo\n\t}\n\tif r.Spec.ProviderName == \"undistro\" && r.Spec.Repository.URL == defaultRepo {\n\t\tr.Labels[meta.LabelProviderType] = \"core\"\n\t}\n}", "title": "" }, { "docid": "ea7095aee2bdaf31a01d353c02bd7a2d", "score": "0.5186137", "text": "func (e *EKSCluster) AddDefaultsToUpdate(r *pkgCluster.UpdateClusterRequest) {\n\tdefaultImage := pkgEks.DefaultImages[e.modelCluster.Location]\n\n\t// add default node image(s) if needed\n\tif r != nil && r.Eks != nil && r.Eks.NodePools != nil {\n\t\tfor _, np := range r.Eks.NodePools {\n\t\t\tif len(np.Image) == 0 {\n\t\t\t\tnp.Image = defaultImage\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "42db6a8a7f70ed4a59788ce504299009", "score": "0.51800287", "text": "func NewAddOperatingSystemKernelBadRequest() *AddOperatingSystemKernelBadRequest {\n\treturn &AddOperatingSystemKernelBadRequest{}\n}", "title": "" }, { "docid": "8ca885831bc86bf879e6f20f38a0e6b5", "score": "0.512985", "text": "func withDefault() oci.SpecOpts {\n\treturn oci.Compose(\n\t\toci.WithSelinuxLabel(\"system_u:system_r:control_t:s0-s0:c0.c1023\"),\n\t\t// Non-privileged containers only have access to a subset of the devices\n\t\toci.WithDefaultUnixDevices,\n\t\t// No additional capabilities required for non-privileged containers\n\t\tseccomp.WithDefaultProfile(),\n\t)\n}", "title": "" }, { "docid": "a369fc057c01753d14ad710735c831e8", "score": "0.5105876", "text": "func NewPutMachineDefault(code int) *PutMachineDefault {\n\treturn &PutMachineDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "a1485ba6140a79c367aea19b1c1ee340", "score": "0.5105316", "text": "func (p *provider) AddDefaults(spec clusterv1alpha1.MachineSpec) (clusterv1alpha1.MachineSpec, error) {\n\treturn spec, nil\n}", "title": "" }, { "docid": "52ed671cd3db6b9383897981d4e27fe0", "score": "0.5101197", "text": "func DefaultPlaneVariablesKernel() map[string]string {\n\treturn map[string]string{\n\t\tcommon.DataplaneMetricsEnabledKey: \"false\",\n\t}\n}", "title": "" }, { "docid": "e72da779c321897c1e69a6c394960abf", "score": "0.5083062", "text": "func (v *AlterClientQuotasRequestEntryOp) Default() {\n}", "title": "" }, { "docid": "abe9cf2754359e745d87de54e5d102fd", "score": "0.5061936", "text": "func initPlatformDefaults() {\n\t// TODO: should darwin default to running SyOS? (\"syos\" flag)\n\t// hide this flag from the help so that users don't try to turn it off\n\tactionVMFlag.DefaultValue = true\n\tactionVMFlag.Hidden = true\n}", "title": "" }, { "docid": "5b0b8cc560fed02d7c544d6e2debb865", "score": "0.5057801", "text": "func (v *DescribeClusterRequest) Default() {\n}", "title": "" }, { "docid": "30c5d0e0e57708e49809b30ac9a78ab8", "score": "0.50521755", "text": "func (v *ApiVersionsResponseSupportedFeature) Default() {\n}", "title": "" }, { "docid": "36921db532f5f9f6b7403a0ec2185130", "score": "0.5051463", "text": "func GetPlatformDefaultConfig() OSConfig {\n\treturn OSConfig{\n\t\tOpen: `start \"\" {{filename}}`,\n\t\tOpenLink: `start \"\" {{link}}`,\n\t}\n}", "title": "" }, { "docid": "22b43635b6cf63981c9d56114bd25e5f", "score": "0.50355357", "text": "func CreateDefaultESX(ctx *Context, f *Folder) {\n\tdc := NewDatacenter(ctx, &f.Folder)\n\n\thost := NewHostSystem(esx.HostSystem)\n\n\tsummary := new(types.ComputeResourceSummary)\n\taddComputeResource(summary, host)\n\n\tcr := &mo.ComputeResource{\n\t\tSummary: summary,\n\t\tNetwork: esx.Datacenter.Network,\n\t}\n\tcr.EnvironmentBrowser = newEnvironmentBrowser()\n\tcr.Self = *host.Parent\n\tcr.Name = host.Name\n\tcr.Host = append(cr.Host, host.Reference())\n\thost.Network = cr.Network\n\tctx.Map.PutEntity(cr, host)\n\n\tpool := NewResourcePool()\n\tcr.ResourcePool = &pool.Self\n\tctx.Map.PutEntity(cr, pool)\n\tpool.Owner = cr.Self\n\n\tfolderPutChild(ctx, &ctx.Map.Get(dc.HostFolder).(*Folder).Folder, cr)\n}", "title": "" }, { "docid": "12ac239ef637d9297c1333d64f42258d", "score": "0.50295174", "text": "func (v *UpdateFeaturesRequestFeatureUpdate) Default() {\n}", "title": "" }, { "docid": "2f07fea423adccc974dddfea36bf4fcb", "score": "0.50175256", "text": "func (v *ApiVersionsRequest) Default() {\n}", "title": "" }, { "docid": "4e096ce7ce1aa096ea7084c0bde708e0", "score": "0.49886054", "text": "func (v *ApiVersionsResponseApiKey) Default() {\n}", "title": "" }, { "docid": "4f2a07b1bebc6c57d5cd8400ffa442a0", "score": "0.49707508", "text": "func (v *AlterISRRequestTopicPartition) Default() {\n}", "title": "" }, { "docid": "8ce9fd7b426ae70f72715c251a304db1", "score": "0.49634656", "text": "func NewPatchMachineConfigurationDefault(code int) *PatchMachineConfigurationDefault {\n\treturn &PatchMachineConfigurationDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "809a600a2d0352f5d6bc22c9d89f11a3", "score": "0.49538", "text": "func NewKernel(ctx context.Context) (*Kernel, error) {\n\t// Context is required to use gocloud.dev functions\n\n\tkernelConfig := new(Kernel)\n\n\t// Init config\n\tviper.SetConfigName(\"alexandria-config\")\n\tviper.SetConfigType(\"yaml\")\n\tviper.AddConfigPath(\"./config/\")\n\tviper.AddConfigPath(\"/etc/alexandria/\")\n\tviper.AddConfigPath(\"$HOME/.alexandria\")\n\tviper.AddConfigPath(\".\")\n\n\t// Open config\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tif _, ok := err.(viper.ConfigFileNotFoundError); ok {\n\t\t\t// Config file not found; ignore error if desired\n\t\t\t_ = viper.SafeWriteConfig()\n\t\t}\n\n\t\t// Config file was found but another error was produced, use default values\n\t}\n\n\t// Map kernel configuration\n\tkernelConfig.Transport = newTransportConfig()\n\tkernelConfig.Tracing = newTracingConfig()\n\tkernelConfig.EventBus = newEventBusConfig()\n\tkernelConfig.Docstore = newDocstoreConfig()\n\tkernelConfig.DBMS = newDBMSConfig()\n\tkernelConfig.InMemory = newInMemoryConfig()\n\tkernelConfig.AWS = newAWSConfig()\n\tkernelConfig.Auth = newAuthConfig()\n\n\tkernelConfig.Version = viper.GetString(\"alexandria.info.version\")\n\tkernelConfig.Service = viper.GetString(\"alexandria.info.service\")\n\n\t// Prefer AWS KMS/Hashicorp Vault/Key Parameter Store over local, replace default or local config\n\t// TODO: Implement Hashicorp Vault or AWS KMS key/secret fetching\n\n\treturn kernelConfig, nil\n}", "title": "" }, { "docid": "0ab21752032d0523a9e05028ed440d43", "score": "0.49425593", "text": "func (v *AddPartitionsToTxnRequest) Default() {\n}", "title": "" }, { "docid": "3945560021e4c1baa998dcb7cfc74246", "score": "0.49311656", "text": "func (sys *SystemRPCs) SetDefaultBootOrder(ctx iris.Context) {\n\tdefer ctx.Next()\n\tctxt := ctx.Request().Context()\n\tvar req systemsproto.DefaultBootOrderRequest\n\treq.SystemID = ctx.Params().Get(\"id\")\n\treq.SessionToken = ctx.Request().Header.Get(AuthTokenHeader)\n\tif req.SessionToken == \"\" {\n\t\terrorMessage := invalidAuthTokenErrorMsg\n\t\tcommon.SendInvalidSessionResponse(ctx, errorMessage)\n\t}\n\tl.LogWithFields(ctxt).Debugf(\"Incoming request received for setting default boot order with request id %s\", req.SystemID)\n\tresp, err := sys.SetDefaultBootOrderRPC(ctxt, req)\n\tif err != nil {\n\t\terrorMessage := rpcFailedErrMsg + err.Error()\n\t\tl.LogWithFields(ctxt).Error(errorMessage)\n\t\tcommon.SendFailedRPCCallResponse(ctx, errorMessage)\n\t}\n\tl.LogWithFields(ctxt).Debugf(\"Outgoing response for setting default boot order is %s with status code %d\", string(resp.Body), int(resp.StatusCode))\n\tsendSystemsResponse(ctx, resp)\n}", "title": "" }, { "docid": "3a36635fb60c706c5b59b88db6903467", "score": "0.49292356", "text": "func (v *ApiVersionsResponseFinalizedFeature) Default() {\n}", "title": "" }, { "docid": "206900a4e2a7a8d4703cca88eb13272b", "score": "0.49287826", "text": "func (v *AlterISRRequestTopic) Default() {\n}", "title": "" }, { "docid": "a111d128a50b93e22d41b94cca86498d", "score": "0.49192315", "text": "func (app *application) addDefaultData(td *templateData, r *http.Request) *templateData {\n\tif td == nil {\n\t\ttd = &templateData{}\n\t}\n\n\ttd.CSRFToken = nosurf.Token(r)\n\ttd.AuthenticatedUser = app.authenticatedUser(r)\n\ttd.CurrentYear = time.Now().Year()\n\ttd.Notice = app.session.PopString(r, \"notice\")\n\n\thosts := app.inventoryHost.GetCountUnapprovedHosts()\n\tservices := app.inventoryService.GetCountUnapprovedServices()\n\ttd.CountUnapproved = hosts + services\n\ttd.Version = \"web:0.0.2 api:0.0.1 agent:2:0.0.4-2\"\n\n\treturn td\n}", "title": "" }, { "docid": "59b405fad944eb5890c6ff492d8e4eba", "score": "0.4913183", "text": "func NewOsWithDefaults() *Os {\n\tthis := Os{}\n\treturn &this\n}", "title": "" }, { "docid": "771a72055cf59882ed29b2ebb504eab9", "score": "0.48848063", "text": "func Default() *fvdpb.VirtualDevice {\n\treturn &fvdpb.VirtualDevice{\n\t\tName: \"default\",\n\t\tKernel: \"qemu-kernel\",\n\t\tInitrd: \"zircon-a\",\n\t\tDrive: &fvdpb.Drive{\n\t\t\tId: \"maindisk\",\n\t\t\tImage: \"storage-full\",\n\t\t},\n\t\tHw: &fvdpb.HardwareProfile{\n\t\t\tArch: \"x64\",\n\t\t\tCpuCount: 1,\n\t\t\tRam: \"1M\",\n\t\t\tMac: \"52:54:00:63:5e:7a\",\n\t\t},\n\t}\n}", "title": "" }, { "docid": "1c1c71c242e81916e933963060806a1d", "score": "0.4856896", "text": "func NewKnoxRequestDefault(code int) *KnoxRequestDefault {\n\treturn &KnoxRequestDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "9dcc6cb1ce378c1e4fa7bd1f1386ee72", "score": "0.4856888", "text": "func (config *Config) GenerateDefault(options []string) error {\n\tif config.UvmTimeoutSeconds < 0 {\n\t\treturn fmt.Errorf(\"opengcs: cannot generate a config when supplied a negative utility VM timeout\")\n\t}\n\n\tenvTimeoutSeconds := 0\n\toptTimeoutSeconds := 0\n\n\tif config.UvmTimeoutSeconds != 0 {\n\t\tenvTimeout := os.Getenv(\"OPENGCS_UVM_TIMEOUT_SECONDS\")\n\t\tif len(envTimeout) > 0 {\n\t\t\tvar err error\n\t\t\tif envTimeoutSeconds, err = strconv.Atoi(envTimeout); err != nil {\n\t\t\t\treturn fmt.Errorf(\"opengcs: OPENGCS_UVM_TIMEOUT_SECONDS could not be interpreted as an integer\")\n\t\t\t}\n\t\t\tif envTimeoutSeconds < 0 {\n\t\t\t\treturn fmt.Errorf(\"opengcs: OPENGCS_UVM_TIMEOUT_SECONDS cannot be negative\")\n\t\t\t}\n\t\t}\n\t}\n\n\tfor _, v := range options {\n\t\topt := strings.SplitN(v, \"=\", 2)\n\t\tif len(opt) == 2 {\n\t\t\tswitch strings.ToLower(opt[0]) {\n\t\t\tcase \"opengcskirdpath\":\n\t\t\t\tconfig.KirdPath = opt[1]\n\t\t\tcase \"opengcskernel\":\n\t\t\t\tconfig.KernelFile = opt[1]\n\t\t\tcase \"opengcsinitrd\":\n\t\t\t\tconfig.InitrdFile = opt[1]\n\t\t\tcase \"opengcsvhdx\":\n\t\t\t\tconfig.Vhdx = opt[1]\n\t\t\tcase \"opengcstimeoutsecs\":\n\t\t\t\tvar err error\n\t\t\t\tif optTimeoutSeconds, err = strconv.Atoi(opt[1]); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"opengcs: opengcstimeoutsecs option could not be interpreted as an integer\")\n\t\t\t\t}\n\t\t\t\tif optTimeoutSeconds < 0 {\n\t\t\t\t\treturn fmt.Errorf(\"opengcs: opengcstimeoutsecs option cannot be negative\")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif config.KirdPath == \"\" {\n\t\tconfig.KirdPath = filepath.Join(os.Getenv(\"ProgramFiles\"), \"Linux Containers\")\n\t}\n\n\tif config.Vhdx == \"\" {\n\t\tconfig.Vhdx = filepath.Join(config.KirdPath, `uvm.vhdx`)\n\t}\n\tif config.KernelFile == \"\" {\n\t\tconfig.KernelFile = `bootx64.efi`\n\t}\n\tif config.InitrdFile == \"\" {\n\t\tconfig.InitrdFile = `initrd.img`\n\t}\n\n\t// Which timeout are we going to take? If not through option or environment,\n\t// then use the default constant, otherwise the maximum of the option or\n\t// environment supplied setting. A requested on in the config supplied\n\t// overrides all of this.\n\tif config.UvmTimeoutSeconds == 0 {\n\t\tconfig.UvmTimeoutSeconds = defaultUvmTimeoutSeconds\n\t\tif optTimeoutSeconds != 0 || envTimeoutSeconds != 0 {\n\t\t\tconfig.UvmTimeoutSeconds = optTimeoutSeconds\n\t\t\tif envTimeoutSeconds > optTimeoutSeconds {\n\t\t\t\tconfig.UvmTimeoutSeconds = envTimeoutSeconds\n\t\t\t}\n\t\t}\n\t}\n\n\tconfig.MappedVirtualDisks = nil\n\n\treturn nil\n}", "title": "" }, { "docid": "81df40478f8024632439574b115035a0", "score": "0.48520845", "text": "func (v *BrokerRegistrationRequestFeature) Default() {\n}", "title": "" }, { "docid": "d2309384ca31de8008165a185fd1561d", "score": "0.48236772", "text": "func (v *AlterConfigsRequest) Default() {\n}", "title": "" }, { "docid": "a9ce0544320ed9ee846e7f5ba0ff8562", "score": "0.48229283", "text": "func (v *LeaderAndISRRequestTopicPartition) Default() {\n}", "title": "" }, { "docid": "8337794dd59be99ee89d1235f8c0afd3", "score": "0.48178494", "text": "func NewUpdateVMDiskDefault(code int) *UpdateVMDiskDefault {\n\tif code <= 0 {\n\t\tcode = 500\n\t}\n\n\treturn &UpdateVMDiskDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "6c8a9d347cd36c39308243879efbce20", "score": "0.48158672", "text": "func (o *AddCPUArchitectureParams) SetDefaults() {\n\t// no default values defined for this parameter\n}", "title": "" }, { "docid": "b2a618e5bb63ff63adfe3010a559bdc7", "score": "0.48105067", "text": "func (v *IncrementalAlterConfigsRequest) Default() {\n}", "title": "" }, { "docid": "42d748f387e233d204ce91c7f75006be", "score": "0.48089823", "text": "func (v *AlterISRResponseTopicPartition) Default() {\n}", "title": "" }, { "docid": "f0bfbff6a00e3046889e39c5516b2300", "score": "0.48075452", "text": "func (v *AlterConfigsRequestResourceConfig) Default() {\n}", "title": "" }, { "docid": "4b9fc8233f77f3d097f8fd4c4bfb0020", "score": "0.48049656", "text": "func CreateDefault() error {\n\tconfigTemplate := `\n---\nwithTimestamps: false\n\ncarriers:\n - type: slack\n token: \"xoxp-XXXXXX\"\n channels: \"@user1, #general\"\n`\n\treturn ioutil.WriteFile(DefaultConfigPath, []byte(configTemplate), 0644)\n}", "title": "" }, { "docid": "340de7b9ac0fc4d9f449bdfa2763f9f4", "score": "0.48030034", "text": "func DefaultKubernetesUserAgent() string {\n\treturn buildUserAgent(\n\t\tadjustCommand(os.Args[0]),\n\t\tadjustVersion(version.Get().GitVersion),\n\t\tgruntime.GOOS,\n\t\tgruntime.GOARCH,\n\t\tadjustCommit(version.Get().GitCommit))\n}", "title": "" }, { "docid": "1d609152253cb518e1532016ee25fe57", "score": "0.47989574", "text": "func (s *CloneStackInput) SetDefaultOs(v string) *CloneStackInput {\n\ts.DefaultOs = &v\n\treturn s\n}", "title": "" }, { "docid": "be444dffd25e4dd04d498eb5682a43a9", "score": "0.47925508", "text": "func (nsOS) Kernel(value string) fld.Field {\n\treturn ecsString(\"os.kernel\", value)\n}", "title": "" }, { "docid": "9524cd79e102178a18a4b0cbbe1c7a1a", "score": "0.47843924", "text": "func (k *KKCluster) Default() {\n\tkkclusterlog.Info(\"default\", \"name\", k.Name)\n\n\tdefaultDistribution(&k.Spec)\n\tdefaultAuth(&k.Spec.Nodes.Auth)\n\tdefaultInstance(&k.Spec)\n\tdefaultInPlaceUpgradeAnnotation(k.GetAnnotations())\n}", "title": "" }, { "docid": "049a9df100530e67fc5f5c027d45cfca", "score": "0.47815445", "text": "func NewDefaultSpec() *Spec {\n\treturn &Spec{\n\t\tName: \"gke-cluster\",\n\t\tK8SVersion: DefaultK8SVersion,\n\t\tNodePools: []NodePool{\n\t\t\tNodePool{\n\t\t\t\tName: \"np-1\",\n\t\t\t\tSize: 1,\n\t\t\t\tMachineType: DefaultMachineType,\n\t\t\t\tImageType: DefaultImageType,\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "156692d44893dc766b98c864b4c6ba47", "score": "0.47780693", "text": "func (v *AddPartitionsToTxnResponse) Default() {\n}", "title": "" }, { "docid": "09665bd571ba6fcdd31716cf42fed279", "score": "0.4773431", "text": "func (v *AlterClientQuotasRequestEntryEntity) Default() {\n}", "title": "" }, { "docid": "f13238bec42c0e9db63a96b733d0976a", "score": "0.47730896", "text": "func DefaultOSLocatorParams() OSLocatorParams {\n\treturn NewOSLocatorParams(DefaultMaxURILength)\n}", "title": "" }, { "docid": "ab8d7ace14e874db523c993d792d42c0", "score": "0.47563833", "text": "func (v *DescribeConfigsRequest) Default() {\n}", "title": "" }, { "docid": "ed47c2f14c15058bd6d3a6d16e2b37ba", "score": "0.47470754", "text": "func (b *Box) DefaultAddSpace() {\n\t(*b).Content.AddSpace = boxAddSpc\n}", "title": "" }, { "docid": "2baa0b57fa4343bb59198437ff27f6db", "score": "0.47457397", "text": "func NewGetHclOperatingSystemVendorsMoidDefault(code int) *GetHclOperatingSystemVendorsMoidDefault {\n\treturn &GetHclOperatingSystemVendorsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "41e77314bd1d97cbb216d7c65d143de1", "score": "0.47454235", "text": "func (v *IncrementalAlterConfigsRequestResourceConfig) Default() {\n}", "title": "" }, { "docid": "c9850589bf6bb373ef6095089d965c6a", "score": "0.47450683", "text": "func newFakeInstanceDefault(config group.Spec, logicalID *instance.LogicalID) instance.Spec {\n\treturn newFakeInstance(config, logicalID, provisionTagsDefault(config, logicalID))\n}", "title": "" }, { "docid": "ed6f5f29745e344ecd29531f5db1a2c9", "score": "0.47447035", "text": "func (o *CreateElfImageParams) SetDefaults() {\n\t// no default values defined for this parameter\n}", "title": "" }, { "docid": "fb165aa7cb01dd5220fab618fab52bb7", "score": "0.4735894", "text": "func (v *AlterConfigsRequestResource) Default() {\n}", "title": "" }, { "docid": "7a7f95511eecf3ce5adad8ca4aa9c54b", "score": "0.47247037", "text": "func NewDefaultSystemConfig(server bool) (*Config, error) {\n\tc := newConfig()\n\tc.InitiatedByServer = server\n\n\terr := c.normalize()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c, nil\n}", "title": "" }, { "docid": "0262b90c64e1553b28ab2db6a5f1a390", "score": "0.4720482", "text": "func (v *AddPartitionsToTxnRequestTopic) Default() {\n}", "title": "" }, { "docid": "8ed7996b63b548d55e8cb6aa4569f9cb", "score": "0.4719281", "text": "func (v *CreateACLsRequest) Default() {\n}", "title": "" }, { "docid": "fcaa3b82cb70bec8a845c4e1fb654486", "score": "0.47151327", "text": "func (v *AlterISRResponseTopic) Default() {\n}", "title": "" }, { "docid": "5c946631e773b9f5b09135ba029daae7", "score": "0.47014016", "text": "func (c *InMemoryClusterTemplate) Default() {\n\n}", "title": "" }, { "docid": "1fd1373ca00c954160d88d2bcc7c4395", "score": "0.46955946", "text": "func (s *UpdateStackInput) SetDefaultOs(v string) *UpdateStackInput {\n\ts.DefaultOs = &v\n\treturn s\n}", "title": "" }, { "docid": "f78aa50427e19747ee9c0f7a7fee0985", "score": "0.4686278", "text": "func (v *IncrementalAlterConfigsRequestResource) Default() {\n}", "title": "" }, { "docid": "5b6bd12bc4c6f1949f8fa0f193455ecf", "score": "0.46861857", "text": "func setDefaultConfig() {\n\t// Handling the built-in flavor\n\tviper.SetDefault(FLAVORS+\".default.use_default\", true) // All containers inherit from default\n\tviper.SetDefault(FLAVORS+\".default.memory_size\", \"512MB\")\n\tviper.SetDefault(FLAVORS+\".default.cpu_count\", int64(1))\n\tviper.SetDefault(FLAVORS+\".default.privileged\", false)\n\tviper.SetDefault(FLAVORS+\".default.data\", \"\")\n\tviper.SetDefault(FLAVORS+\".default.size\", \"\")\n\tviper.SetDefault(FLAVORS+\".default.work_directory\", DEFAULTWORKDIRECTORY)\n\tviper.SetDefault(FLAVORS+\".medium.memory_size\", \"768MB\")\n\tviper.SetDefault(FLAVORS+\".large.memory_size\", \"1GB\")\n\tviper.SetDefault(FLAVORS+\".huge.memory_size\", \"4GB\")\n\tviper.SetDefault(FLAVORS+\".huge.cpu_count\", int64(2))\n\n\t// Handling the built-in image aliases\n\tviper.SetDefault(IMAGES+\".default.use_default\", true) // All containers inherit from default\n\tviper.SetDefault(IMAGES+\".default.image_name\", DEFAULTIMAGE)\n\t// Setting up the aliases to be reported in 'image show-aliases' command\n\tviper.SetDefault(IMAGES+\".mimic.image_name\", LATESTIMAGE+\"mimic\")\n\tviper.SetDefault(IMAGES+\".luminous.image_name\", LATESTIMAGE+\"luminous\")\n\tviper.SetDefault(IMAGES+\".redhat.image_name\", \"registry.access.redhat.com/rhceph/rhceph-3-rhel7\")\n\n\t// Setting up the default update notification configuration\n\tviper.SetDefault(UPDATE+\".config.want_update_notification\", true)\n\tviper.SetDefault(UPDATE+\".config.reminder_wait_period_in_hours\", 24)\n}", "title": "" }, { "docid": "34879317e8601192e9e8de7967f7bbd8", "score": "0.46860114", "text": "func NewDefaultHeader() Header {\n\treturn NewHeader(\n\t\t[]Card{},\n\t\tIMAGE_HDU,\n\t\t8,\n\t\t[]int64{},\n\t)\n}", "title": "" }, { "docid": "3443c21b7e4f566e86f4f39ffc6613c3", "score": "0.46835357", "text": "func (env *Environment) InitDefault() error {\n\tdefaultConf := Config{}\n\tdefaultConf.OccupyAll = false\n\tdefaultConf.ReserveCores = append(defaultConf.ReserveCores, 0)\n\n\treturn env.Init(defaultConf)\n}", "title": "" }, { "docid": "7e8eb1cbef7505cc6812f54cd6fbe83c", "score": "0.4682748", "text": "func NewDefaultVM(c *Concierge, enableGPU bool, diskSize uint64) *VM {\n\treturn &VM{\n\t\tConcierge: c,\n\t\tname: DefaultVMName,\n\t\tContextID: -1, // not populated until VM is started.\n\t\tseneschalHandle: 0, // not populated until VM is started.\n\t\tEnableGPU: enableGPU, // enable the gpu if set.\n\t\tdiskSize: 0, // not populated until VM is started.\n\t\ttargetDiskSize: diskSize,\n\t}\n}", "title": "" }, { "docid": "144443eacc70c91af083318383f7d9b4", "score": "0.4673195", "text": "func (v *AlterClientQuotasRequestEntry) Default() {\n}", "title": "" }, { "docid": "1b0e706c4108793b3bf10b417c04c3e4", "score": "0.46684834", "text": "func setDefaultLabels(target *ServiceConfig) {\n\ttarget.Labels.Add(config.LabelWorkloadImagePullPolicy, config.DefaultImagePullPolicy)\n\ttarget.Labels.Add(config.LabelWorkloadServiceAccountName, config.DefaultServiceAccountName)\n}", "title": "" }, { "docid": "3b98a3ff422b1f2ae503a93f1d39c2ae", "score": "0.46675256", "text": "func (s *CreateStackInput) SetDefaultOs(v string) *CreateStackInput {\n\ts.DefaultOs = &v\n\treturn s\n}", "title": "" }, { "docid": "c540cdac0ddfe19dce13793311c6438b", "score": "0.4665704", "text": "func (v *ApiVersionsResponse) Default() {\n\tv.FinalizedFeaturesEpoch = -1\n}", "title": "" }, { "docid": "f5b1f26cab1c5125714af78bfdbea58a", "score": "0.4663661", "text": "func (v *AddPartitionsToTxnResponseTopic) Default() {\n}", "title": "" }, { "docid": "74381bfb62d0dabe43c2bca9aa4005a6", "score": "0.4660421", "text": "func (pool *WorkspacesBigDataPool) Default() {\n\tpool.defaultImpl()\n\tvar temp any = pool\n\tif runtimeDefaulter, ok := temp.(genruntime.Defaulter); ok {\n\t\truntimeDefaulter.CustomDefault()\n\t}\n}", "title": "" }, { "docid": "05221217bdf402d58176cab704d8bc09", "score": "0.46569186", "text": "func NewOsValidationInformationWithDefaults() *OsValidationInformation {\n\tthis := OsValidationInformation{}\n\tvar classId string = \"os.ValidationInformation\"\n\tthis.ClassId = classId\n\tvar objectType string = \"os.ValidationInformation\"\n\tthis.ObjectType = objectType\n\treturn &this\n}", "title": "" }, { "docid": "444dd93cc43493a8d417ae3d52f65f4f", "score": "0.4648454", "text": "func (v *DescribeQuorumRequest) Default() {\n}", "title": "" }, { "docid": "ac4f12518f061cfad59dbcdba200b6c2", "score": "0.4640647", "text": "func (v *LeaderAndISRResponseTopicPartition) Default() {\n}", "title": "" }, { "docid": "a95424f81170d2aec5b69a0c1530581f", "score": "0.46399137", "text": "func DefaultPlatform() *Platform {\n\treturn platform\n}", "title": "" }, { "docid": "6c7d697e3fc79183a619c6f7ed025d5d", "score": "0.46340907", "text": "func (v *CreateTopicsRequestTopicConfig) Default() {\n}", "title": "" }, { "docid": "2d2b2cc4f183ce1fb40a483583cc57b8", "score": "0.46239612", "text": "func (v *UpdateFeaturesResponse) Default() {\n}", "title": "" }, { "docid": "37847ab719ee120a121b6752927e197e", "score": "0.46155968", "text": "func NewUpdateFirmwareDefault(code int) *UpdateFirmwareDefault {\n\treturn &UpdateFirmwareDefault{\n\t\t_statusCode: code,\n\t}\n}", "title": "" }, { "docid": "2f0c545f8ba488d8930451c92cf0761d", "score": "0.46083006", "text": "func (v *DescribeConfigsRequestResource) Default() {\n}", "title": "" }, { "docid": "5b760a461edf1a49e162d7cb95357898", "score": "0.4604198", "text": "func SetPlatformDefaults(p *aws.Platform) {\n}", "title": "" }, { "docid": "d2db0f919f7ef61a2f0ca94d31a7dcad", "score": "0.46012735", "text": "func KfSystemWithDefaults(name string, kfo ...KfSystemOption) *v1alpha1.KfSystem {\n\toptions := append([]KfSystemOption{WithDefaults}, kfo...)\n\treturn KfSystem(name, options...)\n}", "title": "" }, { "docid": "d7da54c82e3f34a384078d09337ea739", "score": "0.45962083", "text": "func newDefaultConf() (*wordsConf, error) {\n\tif defaultConfErr != nil {\n\t\treturn nil, defaultConfErr\n\t}\n\treturn newConf(defaultConf)\n}", "title": "" }, { "docid": "65087406663798685944502b8ae7f36c", "score": "0.4594891", "text": "func DefaultNewNode(config *cfg.Config, logger log.Logger, misbehaviors map[int64]cs.Misbehavior) (*Node, error) {\n\tnodeKey, err := p2p.LoadOrGenNodeKey(config.NodeKeyFile())\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to load or gen node key %s, err: %w\", config.NodeKeyFile(), err)\n\t}\n\n\treturn NewNode(config,\n\t\tLoadOrGenFilePV(config.PrivValidatorKeyFile(), config.PrivValidatorStateFile()),\n\t\tnodeKey,\n\t\tproxy.DefaultClientCreator(config.ProxyApp, config.ABCI, config.DBDir()),\n\t\tDefaultGenesisDocProviderFunc(config),\n\t\tDefaultDBProvider,\n\t\tDefaultMetricsProvider(config.Instrumentation),\n\t\tlogger,\n\t\tmisbehaviors,\n\t)\n}", "title": "" }, { "docid": "df2a3cd5288b2bdba775d7f9ca7b2de9", "score": "0.45841035", "text": "func DefaultUserAgent() string {\n\treturn fmt.Sprintf(\"godatahub/%s golang/%s %s\", DATAHUB_SDK_VERSION, runtime.Version(), runtime.GOOS)\n}", "title": "" }, { "docid": "85e02332f13b2db5f716cbecf3e813b8", "score": "0.45838445", "text": "func (v *AlterClientQuotasResponseEntryEntity) Default() {\n}", "title": "" }, { "docid": "09e9fbdc33070edeada2a31590b4e581", "score": "0.45780313", "text": "func NewKernel(app inter.App) http.Kernel {\n\treturn http.Kernel{App: &app}\n}", "title": "" }, { "docid": "361e8ea767d26f6ce047861f0d4ba753", "score": "0.45740607", "text": "func (v *DescribeDelegationTokenRequestOwner) Default() {\n}", "title": "" }, { "docid": "8c676ef20e02e260a21045c11c22bb7f", "score": "0.45705643", "text": "func (v *CreatePartitionsRequestTopic) Default() {\n}", "title": "" }, { "docid": "2403c0082e44ca53741c694d06d59cfc", "score": "0.45576093", "text": "func (s *Stack) SetDefaultOs(v string) *Stack {\n\ts.DefaultOs = &v\n\treturn s\n}", "title": "" }, { "docid": "d3db599790581dee77a80b51d237f422", "score": "0.45557344", "text": "func (v *IncrementalAlterConfigsResponse) Default() {\n}", "title": "" }, { "docid": "f1f381e98a55fb68cbf69cdfde87dae9", "score": "0.45548287", "text": "func (r *AzureManagedControlPlane) Default() {\n\tazuremanagedcontrolplanelog.Info(\"default\", \"name\", r.Name)\n\n\tif r.Spec.NetworkPlugin == nil {\n\t\tnetworkPlugin := \"azure\"\n\t\tr.Spec.NetworkPlugin = &networkPlugin\n\t}\n\tif r.Spec.LoadBalancerSKU == nil {\n\t\tloadBalancerSKU := \"standard\"\n\t\tr.Spec.LoadBalancerSKU = &loadBalancerSKU\n\t}\n\tif r.Spec.NetworkPolicy == nil {\n\t\tNetworkPolicy := \"calico\"\n\t\tr.Spec.NetworkPolicy = &NetworkPolicy\n\t}\n}", "title": "" }, { "docid": "da83be4dd54841a1d6ef0d6414913c0b", "score": "0.45533484", "text": "func (v *AddPartitionsToTxnResponseTopicPartition) Default() {\n}", "title": "" }, { "docid": "4edfefd829886116768b1f7c2666f390", "score": "0.45532376", "text": "func (v *AlterISRResponse) Default() {\n}", "title": "" }, { "docid": "fc8c7866fb096e4bd7d0ca93c4d6cf32", "score": "0.454798", "text": "func (o BareMetalNodePoolConfigOutput) OperatingSystem() BareMetalNodePoolConfigOperatingSystemPtrOutput {\n\treturn o.ApplyT(func(v BareMetalNodePoolConfig) *BareMetalNodePoolConfigOperatingSystem { return v.OperatingSystem }).(BareMetalNodePoolConfigOperatingSystemPtrOutput)\n}", "title": "" }, { "docid": "b92646d07e4a8c12a5eb040537780713", "score": "0.45473453", "text": "func (v *AlterConfigsResponse) Default() {\n}", "title": "" }, { "docid": "272d6548be9b72bdefbe863344f336b9", "score": "0.4546281", "text": "func (v *DescribeClientQuotasRequest) Default() {\n}", "title": "" }, { "docid": "6049f61b1f58599cfa273414dea72bad", "score": "0.4544332", "text": "func (s *Service) GetDefaultWindowsImage(ctx context.Context, location, k8sVersion, runtime, osAndVersion string) (*infrav1.Image, error) {\n\tv122 := semver.MustParse(\"1.22.0\")\n\tv, err := semver.ParseTolerant(k8sVersion)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"unable to parse Kubernetes version \\\"%s\\\"\", k8sVersion)\n\t}\n\n\t// If containerd is specified we don't currently support less than 1.22\n\tif v.LE(v122) && runtime == \"containerd\" {\n\t\treturn nil, errors.New(\"containerd image only supported in 1.22+\")\n\t}\n\n\tif osAndVersion == \"\" {\n\t\tosAndVersion = azure.DefaultWindowsOsAndVersion\n\t}\n\n\t// Starting with 1.22 we default to containerd for Windows unless the runtime flag is set.\n\tif v.GE(v122) && runtime != \"dockershim\" && !strings.HasSuffix(osAndVersion, \"-containerd\") {\n\t\tosAndVersion += \"-containerd\"\n\t}\n\n\tpublisher, offer := azure.DefaultImagePublisherID, azure.DefaultWindowsImageOfferID\n\tskuID, version, err := s.getSKUAndVersion(\n\t\tctx, location, publisher, offer, k8sVersion, osAndVersion)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to get default image\")\n\t}\n\n\tdefaultImage := &infrav1.Image{\n\t\tMarketplace: &infrav1.AzureMarketplaceImage{\n\t\t\tImagePlan: infrav1.ImagePlan{\n\t\t\t\tPublisher: publisher,\n\t\t\t\tOffer: offer,\n\t\t\t\tSKU: skuID,\n\t\t\t},\n\t\t\tVersion: version,\n\t\t},\n\t}\n\n\treturn defaultImage, nil\n}", "title": "" }, { "docid": "2ed54a0ae2306a05836ccb1e2e2a9e14", "score": "0.45406744", "text": "func (o *AddOperatingSystemKernelDefault) Code() int {\n\treturn o._statusCode\n}", "title": "" } ]
cd305395760746ff2097d6784791b216
fixDate converts dates from DD/MM/YYYY to YYYYMMDD.
[ { "docid": "33db072842e549dad14c39ed38136feb", "score": "0.7825216", "text": "func fixDate(date string) string {\n\tif len(date) != len(\"26/04/2021\") || strings.Count(date, \"/\") != 2 {\n\t\treturn date\n\t}\n\n\treturn date[6:10] + \"-\" + date[3:5] + \"-\" + date[0:2]\n}", "title": "" } ]
[ { "docid": "d0d56a741b38e601e86a4b7e7ff2e7d6", "score": "0.6059644", "text": "func ReformatDate(date string) string {\r\n\tmonthMap := map[string]string{\r\n\t\t\"Jan\": \"1\",\r\n\t\t\"Feb\": \"2\",\r\n\t\t\"Mar\": \"3\",\r\n\t\t\"Apr\": \"4\",\r\n\t\t\"May\": \"5\",\r\n\t\t\"Jun\": \"6\",\r\n\t\t\"Jul\": \"7\",\r\n\t\t\"Aug\": \"8\",\r\n\t\t\"Sep\": \"9\",\r\n\t\t\"Oct\": \"10\",\r\n\t\t\"Nov\": \"11\",\r\n\t\t\"Dec\": \"12\"}\r\n\tarr := strings.Fields(date)\r\n\tvar day, month, year = arr[0], arr[1], arr[2]\r\n\tdayReg := regexp.MustCompile(\"[0-9]{1,2}\")\r\n\tdayString := dayReg.Find([]byte(day))\r\n\toutputList := []string{year, monthMap[month], string(dayString)}\r\n\toutput := strings.Join(outputList, \"-\")\r\n\treturn output\r\n}", "title": "" }, { "docid": "3b2eeabecd5efab691594f1a59fb7786", "score": "0.6040901", "text": "func ConvertDate(date string) (string, error) {\n\t// expected date format YYYY-MM-DDT00:00:00Z\n\t// therefore we want to get the YYYY-MM-DD only\n\tsplitDateTime := strings.Split(date, \"T\")\n\n\tif len(splitDateTime) == 2 {\n\t\tformat := \"2006-01-02\"\n\t\t// get the YYYY-MM-DD\n\t\tgetYMD := splitDateTime[0]\n\n\t\tsplittedDate := strings.Split(getYMD, \"-\")\n\n\t\tif len(splittedDate) == 3 {\n\t\t\tdateTime, err := time.Parse(format, getYMD)\n\n\t\t\tif err != nil {\n\t\t\t\treturn date, errors.New(\"Parsing error\")\n\t\t\t}\n\n\t\t\t// Format data 20 June 2020\n\t\t\tnewDateFormat := strconv.Itoa(dateTime.Day()) + \" \" + dateTime.Month().String() + \" \" + strconv.Itoa(dateTime.Year())\n\n\t\t\treturn newDateFormat, nil\n\t\t}\n\n\t\treturn date, errors.New(\"Database date format is not as expected\")\n\t}\n\n\treturn date, errors.New(\"Database date format is not as expected\")\n}", "title": "" }, { "docid": "951507102b58e6e9a382960efb63e7ca", "score": "0.60147697", "text": "func (tr Row) ForceDate(nn int) (val Date) {\n\tval, _ = tr.DateErr(nn)\n\treturn\n}", "title": "" }, { "docid": "2c60bcf71434780958b4a8c9a454ae95", "score": "0.5937199", "text": "func DateReformat(dt1 string, currentFormat, newFormat string) string {\n\tif dx := StringAsDateTime(dt1, currentFormat); !dx.IsZero() {\n\t\treturn DateTimeAsString(dx, newFormat)\n\t}\n\n\treturn \"\"\n}", "title": "" }, { "docid": "776c816096989a9f9afe973c7caac0eb", "score": "0.55484045", "text": "func CondensedDate(rawDate string) string {\n\treturn strings.Replace(rawDate, \"-\", \"\", -1)\n}", "title": "" }, { "docid": "16abece521339f77eaf8a3a41207dd3c", "score": "0.54563546", "text": "func Date(date string) string {\n\n\tvar newDate string\n\tsplitDate := strings.Split(date, \" \")\n\tmonths := [12]string{\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"Jun\", \"Jul\", \"Aug\", \"Sep\", \"Oct\", \"Nov\", \"Dec\"}\n\n\tfor i, month := range months {\n\t\tif month == splitDate[0] {\n\t\t\tnewDate = strconv.Itoa(i)\n\t\t}\n\t}\n\tnewDate += \"/\" + string(splitDate[1][:len(splitDate[1])-1]) + \"/\" + splitDate[2]\n\treturn newDate\n}", "title": "" }, { "docid": "3d95594f3c47198aa640131458f9e24f", "score": "0.53649557", "text": "func DateStrAddSlash (dateStrShort string)(dateStr string){\r\n\tdateByte := []byte(dateStrShort)\r\n\tbyteTmp := [][]byte{ dateByte[0:4], []byte(\"-\"), dateByte[4:6], []byte(\"-\"), dateByte[6:8] }\r\n\tfor _, v := range byteTmp{\r\n \tstrTmp := string(v)\r\n\t\t//fmt.Printf(\"##, v: %v, strTmp: %v \\n \", v, strTmp)\r\n\t\tdateStr = dateStr + strTmp\r\n\t}\r\n\t//fmt.Print(\"## dateStr:\", dateStr,)\r\n\treturn\r\n}", "title": "" }, { "docid": "88506e06836ec8ee117adde8a4c87bb2", "score": "0.53640515", "text": "func sortDate(testdates []string) ([]string, error) {\n\t//var date string\n\n\tfor num1, i := range testdates {\n\t\tfor num2, j := range testdates {\n\t\t\tcheck := false\n\n\t\t\tyearI, err := strconv.Atoi(i[0:4])\n\t\t\tif err != nil {\n\t\t\t\treturn testdates, err\n\t\t\t}\n\n\t\t\tyearJ, err := strconv.Atoi(j[0:4])\n\t\t\tif err != nil {\n\t\t\t\treturn testdates, err\n\t\t\t}\n\n\t\t\tmonthI, err := strconv.Atoi(i[5:7])\n\t\t\tif err != nil {\n\t\t\t\treturn testdates, err\n\t\t\t}\n\n\t\t\tmonthJ, err := strconv.Atoi(j[5:7])\n\t\t\tif err != nil {\n\t\t\t\treturn testdates, err\n\t\t\t}\n\n\t\t\tdayI, err := strconv.Atoi(i[8:10])\n\t\t\tif err != nil {\n\t\t\t\treturn testdates, err\n\t\t\t}\n\n\t\t\tdayJ, err := strconv.Atoi(j[8:10])\n\t\t\tif err != nil {\n\t\t\t\treturn testdates, err\n\t\t\t}\n\n\t\t\tif yearI < yearJ {\n\t\t\t\tcheck = true\n\t\t\t} else if monthI < monthJ && yearI == yearJ {\n\t\t\t\tcheck = true\n\t\t\t} else if dayI < dayJ && monthI == monthJ && yearI == yearJ {\n\t\t\t\tcheck = true\n\t\t\t}\n\n\t\t\tif check {\n\t\t\t\ttestdates[num1], testdates[num2] = testdates[num2], testdates[num1]\n\t\t\t}\n\t\t}\n\t}\n\n\treturn testdates, nil\n}", "title": "" }, { "docid": "9291a36bdf13d7cfbdd36eb525c11a51", "score": "0.5278999", "text": "func (query *DateQuery) Reformat() {\n\tfor i, v := range query.Dates {\n\t\ttemp := strings.Split(v, \"T\")\n\t\ttemp = append(temp[0:1], \"T00:00:00.000\")\n\t\tquery.Dates[i] = strings.Join(temp, \"\")\n\t}\n}", "title": "" }, { "docid": "38ea13f2ef4c8a2e3d6694891ff6bc7e", "score": "0.5237361", "text": "func DateStrRmvSlash (dateStr_ string)(dateStr string){\r\n dateByte := []byte(dateStr_)\r\n byteTmp := [][]byte{ dateByte[0:4], dateByte[5:7], dateByte[8:10] }\r\n for _, v := range byteTmp{\r\n strTmp := string(v)\r\n //fmt.Printf(\"##, v: %v, strTmp: %v \\n \", v, strTmp)\r\n dateStr = dateStr + strTmp\r\n }\r\n //fmt.Print(\"## dateStr:\", dateStr,)\r\n return\r\n}", "title": "" }, { "docid": "512cdfb538ebb3071e158a37b7dda40c", "score": "0.5197794", "text": "func (j *Journals) convertDates() error {\n\tvar err error\n\tfor n := len(j.Journals) - 1; n >= 0; n-- {\n\t\tj.Journals[n].JournalDate, err = helpers.DotNetJSONTimeToRFC3339(j.Journals[n].JournalDate, false)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tj.Journals[n].CreatedDateUTC, err = helpers.DotNetJSONTimeToRFC3339(j.Journals[n].CreatedDateUTC, true)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "463380ebd6dc08e71903d6c23b8f6b76", "score": "0.5175275", "text": "func (c *Contacts) convertDates() error {\n\tvar err error\n\tfor n := len(c.Contacts) - 1; n >= 0; n-- {\n\t\tc.Contacts[n].UpdatedDateUTC, err = helpers.DotNetJSONTimeToRFC3339(c.Contacts[n].UpdatedDateUTC, true)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "0b2af4ae8aaf62ed6f9cd5fd3b24c95b", "score": "0.51106316", "text": "func convertDate(input string) (strfmt.Date, error) {\n\ttemp, err := formats.Parse(\"date\", input)\n\tif err != nil {\n\t\treturn strfmt.Date{}, err\n\t}\n\treturn *temp.(*strfmt.Date), nil\n}", "title": "" }, { "docid": "9556229a06ba8209bc5f3c78dfa6f439", "score": "0.51099086", "text": "func ConvertDateSearchByGroupType(form *QueryForm) (error, string) {\n\n\tswitch form.GroupType {\n\tcase GROUP_TYPE_MONTH:\n\t\t// convert YYYY/MM to YYYY/MM/DD\n\t\tif strings.TrimSpace(form.MonthFrom) == \"\" {\n\t\t\tform.DateFrom = \"\"\n\t\t} else {\n\t\t\tmonthFromTime, err := time.Parse(Common.DATE_FORMAT_YM_SLASH, form.MonthFrom)\n\t\t\tif err != nil {\n\t\t\t\treturn err, RPComon.REPORT_ERROR_MONTH_FORMAT\n\t\t\t}\n\t\t\tform.DateFrom = monthFromTime.Format(Common.DATE_FORMAT_YMD_SLASH)\n\t\t}\n\n\t\tif strings.TrimSpace(form.MonthTo) == \"\" {\n\t\t\tform.DateTo = \"\"\n\t\t} else {\n\t\t\tmonthToTime, err := time.Parse(Common.DATE_FORMAT_YM_SLASH, form.MonthTo)\n\t\t\tif err != nil {\n\t\t\t\treturn err, RPComon.REPORT_ERROR_MONTH_FORMAT\n\t\t\t}\n\t\t\tform.DateTo = monthToTime.AddDate(0, 1, -1).Format(Common.DATE_FORMAT_YMD_SLASH)\n\t\t}\n\tcase GROUP_TYPE_WEEK:\n\t\t// convert YYYY/MM/DD~MM/DD to YYYY/MM/DD\n\t\tarrWF := strings.Split(form.WeekFrom, \"~\")\n\t\tif strings.TrimSpace(form.WeekFrom) == \"\" || len(arrWF) != 2 {\n\t\t\tform.DateFrom = \"\"\n\t\t} else {\n\t\t\tif len(arrWF) <= 0 {\n\t\t\t\treturn nil, RPComon.REPORT_ERROR_WEEK_FORMAT\n\t\t\t}\n\t\t\tweekFromTime, err := time.Parse(Common.DATE_FORMAT_YMD_SLASH, arrWF[0])\n\t\t\tif err != nil {\n\t\t\t\treturn err, RPComon.REPORT_ERROR_WEEK_FORMAT\n\t\t\t}\n\t\t\tform.DateFrom = weekFromTime.Format(Common.DATE_FORMAT_YMD_SLASH)\n\t\t}\n\n\t\tarrWT := strings.Split(form.WeekTo, \"~\")\n\t\tif strings.TrimSpace(form.WeekTo) == \"\" || len(arrWT) != 2 {\n\t\t\tform.DateTo = \"\"\n\t\t} else {\n\t\t\tif len(arrWT) <= 0 {\n\t\t\t\treturn nil, RPComon.REPORT_ERROR_WEEK_FORMAT\n\t\t\t}\n\t\t\tweekToTime, err := time.Parse(Common.DATE_FORMAT_YMD_SLASH, arrWT[0])\n\t\t\tif err != nil {\n\t\t\t\treturn err, RPComon.REPORT_ERROR_WEEK_FORMAT\n\t\t\t}\n\t\t\tweekToTimeFull, err := time.Parse(Common.DATE_FORMAT_YMD_SLASH, weekToTime.Format(\"2006\")+\"/\"+arrWT[1])\n\t\t\tif err != nil {\n\t\t\t\treturn err, RPComon.REPORT_ERROR_WEEK_FORMAT\n\t\t\t}\n\t\t\tform.DateTo = weekToTimeFull.Format(Common.DATE_FORMAT_YMD_SLASH)\n\t\t}\n\t}\n\treturn nil, \"\"\n}", "title": "" }, { "docid": "024037484167cda87c109fd84264b74a", "score": "0.5054348", "text": "func ParseDate(currentdate string, intonly bool) (returndate string) {\n\tnewdate := strings.TrimSpace(currentdate)\n\tnewdatesplit := strings.SplitAfter(newdate, \",\")\n\tdate, year := newdatesplit[0], newdatesplit[1]\n\tnewyearsplit := strings.SplitAfter(year, \"at\")\n\tyear = strings.TrimSuffix(newyearsplit[0], \" at\")\n\tyear = strings.TrimPrefix(year, \" \")\n\n\tdatesplit2 := strings.SplitAfter(date, \" \")\n\tday, month := datesplit2[1], datesplit2[0]\n\tday = strings.TrimSuffix(day, \",\")\n\n\tswitch month {\n\tcase \"January \":\n\t\tmonth = \"1\"\n\tcase \"February \":\n\t\tmonth = \"2\"\n\tcase \"March \":\n\t\tmonth = \"3\"\n\tcase \"April \":\n\t\tmonth = \"4\"\n\tcase \"May \":\n\t\tmonth = \"5\"\n\tcase \"June \":\n\t\tmonth = \"6\"\n\tcase \"July \":\n\t\tmonth = \"7\"\n\tcase \"August \":\n\t\tmonth = \"8\"\n\tcase \"September \":\n\t\tmonth = \"9\"\n\tcase \"October \":\n\t\tmonth = \"10\"\n\tcase \"November \":\n\t\tmonth = \"11\"\n\tcase \"December \":\n\t\tmonth = \"12\"\n\t}\n\n\tnk := year + \"-\" + month + \"-\" + day\n\tif intonly == true {\n\t\tnk = year + month + day\n\t}\n\n\treturn nk\n}", "title": "" }, { "docid": "7943621fec200ef2583a136ce5819afd", "score": "0.4938656", "text": "func (date Date) Normalized(lang ...language.Code) (Date, error) {\n\tnormalized, _, err := normalizeAndCheckDate(string(date), getLangHint(lang))\n\treturn normalized, err\n}", "title": "" }, { "docid": "5546050444b6426a69ecc512d48aba6f", "score": "0.4926953", "text": "func formatDate(t time.Time) string {\n\tlayout := \"02/01/2006\"\n\treturn t.Format(layout)\n}", "title": "" }, { "docid": "b541425133acf5c08f013f55e7372246", "score": "0.4907135", "text": "func FormatIssuedDate(md *libgin.RepositoryMetadata) string {\n\tvar datestr string\n\tfor _, mddate := range md.Dates {\n\t\t// There should be only one, but we might add some other types of date\n\t\t// at some point, so best be safe.\n\t\tif mddate.Type == \"Issued\" {\n\t\t\tdatestr = mddate.Value\n\t\t\tbreak\n\t\t}\n\t}\n\n\tdate, err := time.Parse(\"2006-01-02\", datestr)\n\tif err != nil {\n\t\t// This will also occur if the date isn't found in 'md' and the string\n\t\t// remains empty\n\t\tlog.Printf(\"Failed to parse issued date: %s\", datestr)\n\t\treturn \"\"\n\t}\n\treturn date.Format(\"02 Jan. 2006\")\n}", "title": "" }, { "docid": "e9e5f2645791b4aec14b99fe85637eeb", "score": "0.49009135", "text": "func toDate(x string, layout string) int32 {\r\n\tt, err := time.Parse(layout, x)\r\n\tif err != nil {\r\n\t\tfmt.Fprintf(os.Stderr, \"Error with date '%v', not following format '%v'\\n\", x, layout)\r\n\t\treturn 0\r\n\t}\r\n\treturn int32(t.Unix() / 60 / 60 / 24)\r\n}", "title": "" }, { "docid": "12fe37cf29a4cd697f04ea55f4e2aa93", "score": "0.48918688", "text": "func appendDateToSlug(d string, sl string) string {\n\t// get the string date into RFC3339 format (Must have a Z at the end - WP doesn't)\n\tt, _ := time.Parse(time.RFC3339, d+\"Z\")\n\t// conver date to UK format seperated by -\n\tfd := strings.Replace(t.Format(shortFormUK), \" \", \"-\", -1)\n\t// join the strings with a -\n\treturn strings.Join([]string{fd, sl}, \"-\")\n}", "title": "" }, { "docid": "8b9a688fd610575540ac70cbf046fc2a", "score": "0.48716983", "text": "func CheckDateFormat(e *log.Logger, path string, sheet int, row int, column string, s string) (string, int) {\n\t//if date is empty, just return\n\tif s == \"\" {\n\n\t\treturn s, 2\n\t}\n\t// get rid of \"\\\\\",\";\"and \"@\"in the date strings\n\tvalue := strings.Replace(s, \"\\\\\", \"\", -1)\n\tvalue = strings.Replace(value, \";\", \"\", -1)\n\tvalue = strings.Replace(value, \"@\", \"\", -1)\n\n\t// YYYY-MM-DD\n\tmatched1, err := regexp.MatchString(\"^[0-9]{4}-(0?[1-9]|1[012])-(0?[1-9]|[12][0-9]|3[01])$\", value)\n\tCheckErr(e, err)\n\n\t// DD-MMM-YY\n\tmatched2, err := regexp.MatchString(\"^(0?[1-9]|[12][0-9]|3[01])-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-[0-9]{2}$\", value)\n\tCheckErr(e, err)\n\n\t// MM-DD-YY\n\tmatched3, err := regexp.MatchString(\"^(0?[1-9]|1[012])-(0?[1-9]|[12][0-9]|3[01])-[0-9]{2}$\", value)\n\tCheckErr(e, err)\n\n\t// YYYY/MM/DD\n\tmatched4, err := regexp.MatchString(\"^[0-9]{4}/(0?[1-9]|1[012])/(0?[1-9]|[12][0-9]|3[01])$\", value)\n\tCheckErr(e, err)\n\n\t// M/DD/YY HH:MM\n\tmatched5, err := regexp.MatchString(\"^(0?[1-9]|1[012])/(0?[1-9]|[12][0-9]|3[01])/[0-9]{2} ([0-9]|0[0-9]|1[0-9]|2[0-3]):[0-5][0-9]$\", value)\n\tCheckErr(e, err)\n\n\t// YYYY-M\n\tmatched6, err := regexp.MatchString(\"^[0-9]{4}-(0?[1-9]|1[012])$\", value)\n\tCheckErr(e, err)\n\n\t// YYYY\n\tmatched7, err := regexp.MatchString(\"^[0-9]{4}$\", value)\n\tCheckErr(e, err)\n\n\tif matched1 {\n\n\t\treturn value, 0\n\t} else if matched2 {\n\n\t\tt, err := time.Parse(\"02-Jan-06\", value)\n\t\tCheckErr(e, err)\n\t\treturn t.Format(\"2006-01-02\"), 0\n\t} else if matched3 {\n\n\t\tt, err := time.Parse(\"01-02-06\", value)\n\t\tCheckErr(e, err)\n\t\treturn t.Format(\"2006-01-02\"), 0\n\t} else if matched4 {\n\n\t\tt, err := time.Parse(\"2006/01/02\", value)\n\t\tCheckErr(e, err)\n\t\treturn t.Format(\"2006-01-02\"), 0\n\t} else if matched5 {\n\n\t\tt, err := time.Parse(\"1/2/06 15:04\", value)\n\t\tCheckErr(e, err)\n\t\treturn t.Format(\"2006-01-02\"), 0\n\t} else if matched6 {\n\n\t\tt, err := time.Parse(\"2006-1\", value)\n\t\tCheckErr(e, err)\n\t\tnewTime := t.Format(\"2006-01\")\n\t\tnewTime += \"-15\"\n\t\treturn newTime, 1\n\t} else if matched7 {\n\n\t\tnewTime := value + \"-07-01\"\n\t\treturn newTime, 1\n\t}\n\n\te.Println(path, \"Sheet#:\", sheet+1, \"Row#:\", row+2, \"Column:\", column, \"INFO: Invalid Format of Date:\", value)\n\n\t// return value\n\treturn value, 3\n}", "title": "" }, { "docid": "e521e1e5cf60a5308b60e7891e871109", "score": "0.4842186", "text": "func MysqlToYYYYMMDD(text string) (date string, err error) {\n\treturn ConvertMysql(text, func(t time.Time) string {\n\t\treturn fmt.Sprintf(\"%v/%v/%v\", t.Year(), int(t.Month()), t.Day())\n\t})\n}", "title": "" }, { "docid": "f631552f1bcc7d23617194e5e07b827d", "score": "0.4821478", "text": "func GetYNABDate(r1 string) string {\n\t// example 20161028 -> 28/10/2016\n\treturn r1[6:8] + \"/\" + r1[4:6] + \"/\" + r1[:4]\n}", "title": "" }, { "docid": "64724694136aad01b0484faf04be8ea3", "score": "0.48142985", "text": "func FormatDate(date string, format string, newformat string) string {\n\ttmpDate, _ := time.Parse(format, date)\n\n\treturn tmpDate.Format(newformat)\n}", "title": "" }, { "docid": "35f4e7bb516a746cd45891a7293e178a", "score": "0.48113963", "text": "func (l *Logger) LogDirectoryCleanup(baseFilePath string, daysToKeep int) {\n\n\tl.Startedf(\"LogDirectoryCleanup\", \"BaseFilePath[%s] DaysToKeep[%d]\", baseFilePath, daysToKeep)\n\n\t// Get a list of existing directories.\n\tfileInfos, err := ioutil.ReadDir(baseFilePath)\n\tif err != nil {\n\t\tl.CompletedError(\"LogDirectoryCleanup\", err)\n\t\treturn\n\t}\n\n\t// Create the date to compare for directories to remove.\n\tcurrentDate := time.Now().UTC()\n\tcompareDate := time.Date(currentDate.Year(), currentDate.Month(), currentDate.Day()-daysToKeep, 0, 0, 0, 0, time.UTC)\n\n\tl.Debug(\"LogDirectoryCleanup\", \"CompareDate[%v]\", compareDate)\n\n\tfor _, fileInfo := range fileInfos {\n\t\tif fileInfo.IsDir() == false {\n\t\t\tcontinue\n\t\t}\n\n\t\t// The file name look like: YYYY-MM-DD\n\t\tparts := strings.Split(fileInfo.Name(), \"-\")\n\n\t\tyear, err := strconv.Atoi(parts[0])\n\t\tif err != nil {\n\t\t\tl.Errorf(\"LogDirectoryCleanup\", err, \"Attempting To Convert Directory [%s]\", fileInfo.Name())\n\t\t\tcontinue\n\t\t}\n\n\t\tmonth, err := strconv.Atoi(parts[1])\n\t\tif err != nil {\n\t\t\tl.Errorf(\"LogDirectoryCleanup\", err, \"Attempting To Convert Directory [%s]\", fileInfo.Name())\n\t\t\tcontinue\n\t\t}\n\n\t\tday, err := strconv.Atoi(parts[2])\n\t\tif err != nil {\n\t\t\tl.Errorf(\"LogDirectoryCleanup\", err, \"Attempting To Convert Directory [%s]\", fileInfo.Name())\n\t\t\tcontinue\n\t\t}\n\n\t\t// The directory to check.\n\t\tfullFileName := fmt.Sprintf(\"%s/%s\", baseFilePath, fileInfo.Name())\n\n\t\t// Create a time type from the directory name.\n\t\tdirectoryDate := time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC)\n\n\t\t// Compare the dates and convert to days.\n\t\tdaysOld := int(compareDate.Sub(directoryDate).Hours() / 24)\n\n\t\tl.Debug(\"LogDirectoryCleanup\", \"Checking Directory[%s] DaysOld[%d]\", fullFileName, daysOld)\n\n\t\tif daysOld >= 0 {\n\t\t\tl.Debug(\"LogDirectoryCleanup\", \"Removing Directory[%s]\", fullFileName)\n\n\t\t\terr = os.RemoveAll(fullFileName)\n\t\t\tif err != nil {\n\t\t\t\tl.Debug(\"LogDirectoryCleanup\", \"Attempting To Remove Directory [%s]\", fullFileName)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tl.Debug(\"LogDirectoryCleanup\", \"Directory Removed [%s]\", fullFileName)\n\t\t}\n\t}\n\n\t// We don't need the catch handler to log any errors.\n\terr = nil\n\n\tl.Completed(\"LogDirectoryCleanup\")\n\treturn\n}", "title": "" }, { "docid": "69a5e1d215c50167d532ae2d3298e0dd", "score": "0.47640568", "text": "func MakeDateStyle(font *Font, fill *Fill, alignment *Alignment, border *Border) StreamStyle {\n\treturn MakeStyle(DateFormat_dd_mm_yy, font, fill, alignment, border)\n}", "title": "" }, { "docid": "b520700380380aa8bffb1ae07ea99e88", "score": "0.47117734", "text": "func convertDate(timeStamp time.Time) string {\n\treturn timeStamp.Format(\"2006-01-02 15:04:05\")\n}", "title": "" }, { "docid": "e20e565b848d8b1cbf48aa59390c20e1", "score": "0.4696594", "text": "func FmtDatePtr(date *time.Time) *strfmt.Date {\n\tif date == nil || date.IsZero() {\n\t\treturn nil\n\t}\n\treturn (*strfmt.Date)(date)\n}", "title": "" }, { "docid": "85bb3181622148beb47f4f2ad2c3bf4e", "score": "0.4675275", "text": "func (rd *ReturnDetail) ForwardBundleDateField() string {\n\treturn rd.formatYYYYMMDDDate(rd.ForwardBundleDate)\n}", "title": "" }, { "docid": "f2114b914dafef95a990db82fb875014", "score": "0.46689245", "text": "func processDate(date string) time.Time {\n\n\t// hmmmmm\n\t// month in golang is a renamed int type\n\tyear, _ := strconv.Atoi(date[0:4])\n\tmonth, _ := strconv.Atoi(date[4:6])\n\tday, _ := strconv.Atoi(date[6:])\n\treturn time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC)\n}", "title": "" }, { "docid": "3292c5be55186fea1f94c394c8bbb8ca", "score": "0.46628237", "text": "func (o *BeaconUserCreateEmbeddedReport) SetFraudDate(v string) {\n\to.FraudDate = v\n}", "title": "" }, { "docid": "89a79a7b176ecbc61cdfc5feb91f222a", "score": "0.46443677", "text": "func MakeDate(year int, month time.Month, day int) Date {\n\treturn Date(time.Date(year, month, day, 0, 0, 0, 0, time.UTC))\n}", "title": "" }, { "docid": "c63998e0bfe7cf4528b34aa909ce237d", "score": "0.4625173", "text": "func absDate(abs uint64, full bool) (year int, month time.Month, day int, yday int)", "title": "" }, { "docid": "ba9c68392271a6253d803afdf36152e5", "score": "0.46200898", "text": "func (o *BeaconReportCreateRequest) SetFraudDate(v string) {\n\to.FraudDate = v\n}", "title": "" }, { "docid": "10b7f8f189bf0a331d5730b361564177", "score": "0.4619124", "text": "func FmtDate(date time.Time) *strfmt.Date {\n\tif date.IsZero() {\n\t\treturn nil\n\t}\n\n\tfmtDate := strfmt.Date(date)\n\treturn &fmtDate\n}", "title": "" }, { "docid": "5aa7102ebef6a4ed2b342cefb6be788b", "score": "0.46131393", "text": "func (m Message) IssueDate() (*field.IssueDateField, quickfix.MessageRejectError) {\n\tf := &field.IssueDateField{}\n\terr := m.Body.Get(f)\n\treturn f, err\n}", "title": "" }, { "docid": "72bf16427a98699dbcff91a13f6d1bab", "score": "0.46070114", "text": "func Date(layout string) DateRule {\n\treturn DateRule{\n\t\tlayout: layout,\n\t\terr: ErrDateInvalid,\n\t\trangeErr: ErrDateOutOfRange,\n\t}\n}", "title": "" }, { "docid": "e698df2ff097a3d5c7b2ef9915e8956d", "score": "0.4602064", "text": "func (r *CrashReport) parseDate() {\n\tif r.Data.Time == 0 {\n\t\tr.Data.Time = float64(time.Now().Unix())\n\t}\n\tr.ReportDate = time.Unix(int64(r.Data.Time), 0)\n}", "title": "" }, { "docid": "f60eba719339d220f63ab156ebb5b6b0", "score": "0.45554784", "text": "func BuildURLFromDate(date SimpleDate) string {\n\tstr := buildDateString(date)\n\treturn \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports/\" + str\n}", "title": "" }, { "docid": "2ea41ffd55c330d7b2c662330afe0598", "score": "0.4542199", "text": "func (o *BeaconReportCreateResponse) SetFraudDate(v string) {\n\to.FraudDate = v\n}", "title": "" }, { "docid": "26ca64c17666ca24a9b0c6596d97cb1b", "score": "0.45408174", "text": "func (m *ManualJournals) convertManualJournalDates() error {\n\tvar err error\n\tfor n := len(m.ManualJournals) - 1; n >= 0; n-- {\n\t\tm.ManualJournals[n].Date, err = helpers.DotNetJSONTimeToRFC3339(m.ManualJournals[n].Date, false)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tm.ManualJournals[n].UpdatedDateUTC, err = helpers.DotNetJSONTimeToRFC3339(m.ManualJournals[n].UpdatedDateUTC, true)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "a151318547a2a4082a7799090e8aab20", "score": "0.4533527", "text": "func IsDate(date string) bool {\n\tif len(date) != len(\"2021-04-26\") || strings.Count(date, \"-\") != 2 {\n\t\treturn false\n\t}\n\n\ty, errY := strconv.Atoi(date[0:4])\n\tm, errM := strconv.Atoi(date[5:7])\n\td, errD := strconv.Atoi(date[8:10])\n\tif errY != nil || errM != nil || errD != nil {\n\t\treturn false\n\t}\n\n\t// Ok, we'll still be using this in 2200 :)\n\tif y < 1970 || y > 2200 {\n\t\treturn false\n\t}\n\tif m < 1 || m > 12 {\n\t\treturn false\n\t}\n\tnDays := [13]int{0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}\n\tif d < 1 || d > nDays[m] {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "abd896ea98e22741bd92761362ab0977", "score": "0.45095685", "text": "func myTruncateDate(t time.Time) string {\n\n\t// convert time to string\n\ts := t.String()\n\n\t// lop off time (00:44:20 +0000 UTC) and only leave date (2019-04-11)\n\tif len(s) > 10 {\n\t\treturn s[:10]\n\t}\n\treturn s\n}", "title": "" }, { "docid": "2d7899fd3e5b6943f79da612ca8b8e97", "score": "0.45025805", "text": "func (m Message) DatedDate() (*field.DatedDateField, quickfix.MessageRejectError) {\n\tf := &field.DatedDateField{}\n\terr := m.Body.Get(f)\n\treturn f, err\n}", "title": "" }, { "docid": "fd1a871b59980fcbbe043c0e854b0648", "score": "0.44740152", "text": "func FixOutOfDateTransform(sub, pre *OTransform) {\n\t// Get insertion lengths (codepoints)\n\tsubInsert, preInsert := bytes.Runes([]byte(sub.Insert)), bytes.Runes([]byte(pre.Insert))\n\tsubLength, preLength := len(subInsert), len(preInsert)\n\n\tif pre.Position <= sub.Position {\n\t\tif preLength > 0 && pre.Delete == 0 {\n\t\t\tsub.Position += preLength\n\t\t} else if pre.Delete > 0 && (pre.Position+pre.Delete) <= sub.Position {\n\t\t\tsub.Position += (preLength - pre.Delete)\n\t\t} else if pre.Delete > 0 && (pre.Position+pre.Delete) > sub.Position {\n\t\t\toverhang := intMin(sub.Delete, (pre.Position+pre.Delete)-sub.Position)\n\t\t\tsub.Delete -= overhang\n\t\t\tsub.Position = pre.Position + preLength\n\t\t}\n\t} else if sub.Delete > 0 && (sub.Position+sub.Delete) > pre.Position {\n\t\tposGap := pre.Position - sub.Position\n\t\texcess := intMax(0, (sub.Delete - posGap))\n\n\t\tif excess > pre.Delete {\n\t\t\tsub.Delete += (preLength - pre.Delete)\n\n\t\t\tnewInsert := make([]rune, subLength+preLength)\n\t\t\tcopy(newInsert[:], subInsert)\n\t\t\tcopy(newInsert[subLength:], preInsert)\n\n\t\t\tsub.Insert = string(newInsert)\n\t\t} else {\n\t\t\tsub.Delete = posGap\n\t\t}\n\t}\n}", "title": "" }, { "docid": "523caacfd0ca5507b5f0d06b273675e8", "score": "0.44673717", "text": "func (s *StationboardService) formatDate(date time.Time) (string, error) {\n\t// check if the date is zero\n\tif date.IsZero() {\n\t\treturn \"\", fmt.Errorf(\"provided date is zero: please provide a valid time.Time as date\")\n\t}\n\treturn date.Format(\"2006-01-02 15:04\"), nil\n}", "title": "" }, { "docid": "367d31ea6bc5e0203f1e223521b4d73d", "score": "0.44637328", "text": "func ToyyyyMMdd(time time.Time) string {\n\treturn FormatTime(\"yyyy-MM-dd\", time)\n}", "title": "" }, { "docid": "4378c30207cf55da04fbfd88b050f942", "score": "0.44571188", "text": "func schdlDropDate(timestamp uint32) uint32 {\n\treturn timestamp - schdlDropTime(timestamp)\n}", "title": "" }, { "docid": "ab263f519503288abdc7fe3cdd0900aa", "score": "0.4446166", "text": "func ToDateFromCivilDate(date civil.Date) time.Time {\n\tformatedDate, _ := time.Parse(dateFormat, strconv.Itoa(date.Year)+\"-\"+strconv.Itoa(int(date.Month))+\"-\"+strconv.Itoa(date.Day))\n\treturn formatedDate\n}", "title": "" }, { "docid": "12ac8f268e004b07711f728dc5eca5c9", "score": "0.44421008", "text": "func toDate(date string) (t time.Time) {\r\n\tif date == \"\" {\r\n\t\treturn time.Time{}\r\n\t}\r\n\tlayout := \"2006-01-02\"\r\n\tt, err := time.ParseInLocation(layout, date, time.Local)\r\n\tif err != nil {\r\n\t\tpanic(err)\r\n\t}\r\n\treturn t\r\n}", "title": "" }, { "docid": "8cca049e41da3d65b0faba02dde6553e", "score": "0.44351193", "text": "func (f *File) SetDate(fields []string) {\n\tvar dateValue string\n\tvar dateField string\n\tlayout := \"2006:01:02 15:04:05\"\n\tlayoutTZ := \"2006:01:02 15:04:05-07:00\"\n\n\t// Try exif meta data\n\tfor _, field := range fields {\n\t\tif value, ok := f.ExifMetaData[field]; ok {\n\t\t\tdateField = field\n\t\t\tdateValue = value.(string)\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif dateValue != \"\" {\n\t\tvar dateLayout string\n\t\tif strings.Contains(dateValue, \"-\") || strings.Contains(dateValue, \"+\") {\n\t\t\tdateLayout = layoutTZ\n\t\t} else {\n\t\t\tdateLayout = layout\n\t\t}\n\t\tif date, err := time.Parse(dateLayout, dateValue); err != nil {\n\t\t\tlogrus.WithError(err).Error(\"could not parse time\")\n\t\t} else {\n\t\t\tf.Date = date\n\t\t\tf.DateField = dateField\n\t\t}\n\t}\n\n\t// Try birth time\n\tif f.Date.IsZero() {\n\t\tif t, err := times.Stat(f.Path); err == nil && t.HasBirthTime() {\n\t\t\tf.Date = t.BirthTime()\n\t\t\tf.DateField = \"BirthTime\"\n\t\t}\n\t}\n\n\tif f.Date.IsZero() {\n\t\tf.Date = f.Info.ModTime()\n\t\tf.DateField = \"ModTime\"\n\t}\n}", "title": "" }, { "docid": "9456b08f4a193a7cec886739f74b589d", "score": "0.44170967", "text": "func (o *BeaconUserCreateEmbeddedReport) GetFraudDate() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.FraudDate\n}", "title": "" }, { "docid": "bd09a2e0a21c4fad15fbbfd9b49cb390", "score": "0.44130275", "text": "func CreateDateDir(basePath string) error {\n\tif _, err := os.Stat(basePath); os.IsNotExist(err) {\n\t\tif err := os.MkdirAll(basePath, 0777); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif err := os.Chmod(basePath, 0777); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "02d95c7210621ff7a369cdc4adaf3f6d", "score": "0.4404643", "text": "func (field *Field) unpackDate() (err error) {\n\tvar logp = `unpackDate`\n\n\tif len(field.Value) == 0 {\n\t\treturn fmt.Errorf(`%s: empty date`, logp)\n\t}\n\n\tvar (\n\t\tvalue = sanitize([]byte(field.Value))\n\t\tparser = libbytes.NewParser(value, []byte{',', ' '})\n\n\t\tvstr string\n\t\ttoken []byte\n\t\tc byte\n\t\tok bool\n\t)\n\n\ttoken, c = parser.ReadNoSpace()\n\tparser.RemoveDelimiters([]byte{','})\n\tif c == ',' {\n\t\tvar dow = string(token)\n\t\tfor _, vstr = range libtime.ShortDayNames {\n\t\t\tif vstr == dow {\n\t\t\t\tok = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(`%s: invalid day of week %s`, logp, dow)\n\t\t}\n\t\ttoken, _ = parser.ReadNoSpace()\n\t}\n\n\t// Get day ...\n\tvar day int64\n\tday, err = strconv.ParseInt(string(token), 10, 64)\n\tif err != nil {\n\t\treturn fmt.Errorf(`%s: invalid or missing day %s`, logp, token)\n\t}\n\n\t// Get month ...\n\tvar month time.Month\n\ttoken, _ = parser.ReadNoSpace()\n\tmonth, ok = libtime.ShortMonths[string(token)]\n\tif !ok {\n\t\treturn fmt.Errorf(`%s: invalid or missing month %s`, logp, token)\n\t}\n\n\t// Get year ...\n\tvar year int64\n\ttoken, _ = parser.ReadNoSpace()\n\tyear, err = strconv.ParseInt(string(token), 10, 64)\n\tif err != nil {\n\t\treturn fmt.Errorf(`%s: invalid or missing year %s`, logp, token)\n\t}\n\n\t// Obsolete year format allow two or three digits.\n\tswitch {\n\tcase year < 50:\n\t\tyear += 2000\n\tcase year >= 50 && year < 1000:\n\t\tyear += 1900\n\t}\n\n\tparser.AddDelimiters([]byte{':'})\n\n\t// Get hour ...\n\tvar hour int64\n\ttoken, c = parser.ReadNoSpace()\n\thour, err = strconv.ParseInt(string(token), 10, 64)\n\tif err != nil {\n\t\treturn fmt.Errorf(`%s: invalid or missing hour %s`, logp, token)\n\t}\n\tif hour < 0 || hour > 23 {\n\t\treturn fmt.Errorf(`%s: invalid hour %d`, logp, hour)\n\t}\n\tif c == ' ' {\n\t\t_, c = parser.SkipSpaces()\n\t}\n\tif c != ':' {\n\t\treturn fmt.Errorf(`%s: invalid or missing time separator`, logp)\n\t}\n\n\t// Get minute ...\n\tvar min int64\n\ttoken, c = parser.ReadNoSpace()\n\tmin, err = strconv.ParseInt(string(token), 10, 64)\n\tif err != nil {\n\t\treturn fmt.Errorf(`%s: invalid or missing minute %s`, logp, token)\n\t}\n\tif min < 0 || min > 59 {\n\t\treturn fmt.Errorf(`%s: invalid minute %d`, logp, min)\n\t}\n\ttoken = nil\n\n\tif c == ' ' {\n\t\ttoken, c = parser.ReadNoSpace()\n\t\tif c == ':' && len(token) != 0 {\n\t\t\treturn fmt.Errorf(`%s: unknown token after minute %q`, logp, token)\n\t\t}\n\t\t// At this point the date may have second and token may be a\n\t\t// zone.\n\t\t// We check again later if token is nil after parsing the\n\t\t// second part.\n\t}\n\n\tparser.RemoveDelimiters([]byte{':'})\n\n\t// Get second ...\n\tvar sec int64\n\tif c == ':' {\n\t\ttoken, _ = parser.ReadNoSpace()\n\t\tsec, err = strconv.ParseInt(string(token), 10, 64)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(`%s: invalid second %s`, logp, token)\n\t\t}\n\t\tif sec < 0 || sec > 59 {\n\t\t\treturn fmt.Errorf(`%s: invalid second %d`, logp, sec)\n\t\t}\n\t\ttoken = nil\n\t}\n\n\t// Get zone offset.\n\tvar (\n\t\toff int64\n\t\tzone string\n\t)\n\tif token == nil { // The data contains second.\n\t\ttoken, _ = parser.ReadNoSpace()\n\t\tif len(token) == 0 {\n\t\t\treturn fmt.Errorf(`%s: invalid or missing zone %s`, logp, token)\n\t\t}\n\t}\n\tif len(token) != 0 {\n\t\tif token[0] == '+' || token[0] == '-' {\n\t\t\toff, err = strconv.ParseInt(string(token), 10, 64)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(`%s: invalid or missing zone offset %s`, logp, token)\n\t\t\t}\n\t\t\tzone = `UTC`\n\t\t} else {\n\t\t\tzone = string(token)\n\t\t}\n\t}\n\n\tvar (\n\t\tloc = time.FixedZone(zone, computeOffSeconds(off))\n\t\ttd = time.Date(int(year), month, int(day), int(hour), int(min), int(sec), 0, loc)\n\t)\n\n\tfield.date = &td\n\tfield.unpacked = true\n\n\treturn nil\n}", "title": "" }, { "docid": "1e4b56bceb8b6b481bb9fc6c58e3f76e", "score": "0.4402338", "text": "func parseDate(item *gofeed.Item) string {\n\tt := item.Title\n\treturn string(t[len(t)-10:])\n}", "title": "" }, { "docid": "d5d4958f2bcc596ff9b071d616055bb4", "score": "0.44009572", "text": "func TestPdfDateBuild(t *testing.T) {\n\t// Case 1. Test everything.\n\tdateStr1 := \"D:20080313232937+01'00'\"\n\tdate, err := NewPdfDate(dateStr1)\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\n\tobj := date.ToPdfObject()\n\tstrObj, ok := obj.(*core.PdfObjectString)\n\tif !ok {\n\t\tt.Errorf(\"Date PDF object should be a string\")\n\t\treturn\n\t}\n\tif strObj.Str() != dateStr1 {\n\t\tt.Errorf(\"Built date string does not match original (%s)\", strObj)\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "ee147490008249e953762f9b95a82e09", "score": "0.4399088", "text": "func (c *Config) validateDate() error {\n\tif c.DataSettings.DatabaseData != nil {\n\t\tif c.DataSettings.DatabaseData.StartDate.IsZero() ||\n\t\t\tc.DataSettings.DatabaseData.EndDate.IsZero() {\n\t\t\treturn errStartEndUnset\n\t\t}\n\t\tif c.DataSettings.DatabaseData.StartDate.After(c.DataSettings.DatabaseData.EndDate) ||\n\t\t\tc.DataSettings.DatabaseData.StartDate.Equal(c.DataSettings.DatabaseData.EndDate) {\n\t\t\treturn errBadDate\n\t\t}\n\t}\n\tif c.DataSettings.APIData != nil {\n\t\tif c.DataSettings.APIData.StartDate.IsZero() ||\n\t\t\tc.DataSettings.APIData.EndDate.IsZero() {\n\t\t\treturn errStartEndUnset\n\t\t}\n\t\tif c.DataSettings.APIData.StartDate.After(c.DataSettings.APIData.EndDate) ||\n\t\t\tc.DataSettings.APIData.StartDate.Equal(c.DataSettings.APIData.EndDate) {\n\t\t\treturn errBadDate\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4f0d9e544f93fd38c57dd7d8ff3ddc99", "score": "0.4386545", "text": "func MysqlToMMDD(text string) (date string, err error) {\n\treturn ConvertMysql(text, func(t time.Time) string {\n\t\treturn fmt.Sprintf(\"%v/%v\", int(t.Month()), t.Day())\n\t})\n}", "title": "" }, { "docid": "87803edcd03c5daf0bab488fc0fce87b", "score": "0.43837702", "text": "func parseFullDate(buff []byte, cursor *int, l int) (fullDate, error) {\n\tvar fd fullDate\n\n\tyear, err := parseYear(buff, cursor, l)\n\tif err != nil {\n\t\treturn fd, err\n\t}\n\n\tif *cursor >= l || buff[*cursor] != '-' {\n\t\treturn fd, syslogparser.ErrTimestampUnknownFormat\n\t}\n\n\t*cursor++\n\n\tmonth, err := parseMonth(buff, cursor, l)\n\tif err != nil {\n\t\treturn fd, err\n\t}\n\n\tif *cursor >= l || buff[*cursor] != '-' {\n\t\treturn fd, syslogparser.ErrTimestampUnknownFormat\n\t}\n\n\t*cursor++\n\n\tday, err := parseDay(buff, cursor, l)\n\tif err != nil {\n\t\treturn fd, err\n\t}\n\n\tfd = fullDate{\n\t\tyear: year,\n\t\tmonth: month,\n\t\tday: day,\n\t}\n\n\treturn fd, nil\n}", "title": "" }, { "docid": "ac5d36cfd18c41db698a0b696409d231", "score": "0.43715212", "text": "func SliceSortDate(slice [][]string, fmtDate string, dateCol, secDateCol int, ascendant bool) [][]string {\n\tfieldsCount := len(slice[0]) // Get nb of columns\n\tvar firstLine int\n\tvar previous, after string\n\tvar positiveidx, negativeidx int\n\t// compute unix date using given column numbers\n\tfor idx := firstLine; idx < len(slice); idx++ {\n\t\tdateStr := glte.FindDate(slice[idx][dateCol], fmtDate)\n\t\tif dateStr != nil { // search for 1st column\n\t\t\tslice[idx] = append(slice[idx], fmt.Sprintf(\"%d\", glte.FormatDate(fmtDate, dateStr[0]).Unix()))\n\t\t} else if secDateCol != -1 { // Check for second column if it was given\n\t\t\tdateStr = glte.FindDate(slice[idx][secDateCol], fmtDate)\n\t\t\tif dateStr != nil { // If date was not found in 1st column, search for 2nd column\n\t\t\t\tslice[idx] = append(slice[idx], fmt.Sprintf(\"%d\", glte.FormatDate(fmtDate, slice[idx][secDateCol]).Unix()))\n\t\t\t} else { // in case where none of the columns given contain date field, put null string if there is no way to find a date\n\t\t\t\tslice[idx] = append(slice[idx], ``)\n\t\t\t}\n\t\t} else { // put null string if there is no way to find a date\n\t\t\tslice[idx] = append(slice[idx], ``)\n\t\t}\n\t}\n\t// Ensure we always have a value in sorting field (get previous or next closer)\n\tfor idx := firstLine; idx < len(slice); idx++ {\n\t\tif slice[idx][fieldsCount] == `` {\n\t\t\tfor idxFind := firstLine + 1; idxFind < len(slice); idxFind++ {\n\t\t\t\tpositiveidx = idx + idxFind\n\t\t\t\tnegativeidx = idx - idxFind\n\t\t\t\tif positiveidx >= len(slice) { // Check index to avoiding 'out of range'\n\t\t\t\t\tpositiveidx = len(slice) - 1\n\t\t\t\t}\n\t\t\t\tif negativeidx <= 0 {\n\t\t\t\t\tnegativeidx = 0\n\t\t\t\t}\n\t\t\t\tafter = slice[positiveidx][fieldsCount] // Get previous or next value\n\t\t\t\tprevious = slice[negativeidx][fieldsCount]\n\t\t\t\tif previous != `` { // Set value, prioritise the previous one.\n\t\t\t\t\tslice[idx][fieldsCount] = previous\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tif after != `` {\n\t\t\t\t\tslice[idx][fieldsCount] = after\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\ttmpLines := make([][]string, 0)\n\tif ascendant != true {\n\t\t// Sort by unix date preserving order descendant\n\t\tsort.SliceStable(slice, func(i, j int) bool { return slice[i][len(slice[i])-1] > slice[j][len(slice[i])-1] })\n\t\tfor idx := firstLine; idx < len(slice); idx++ { // Store row count elements - 1\n\t\t\ttmpLines = append(tmpLines, slice[idx][:len(slice[idx])-1])\n\t\t}\n\t} else {\n\t\t// Sort by unix date preserving order ascendant\n\t\tsort.SliceStable(slice, func(i, j int) bool { return slice[i][len(slice[i])-1] < slice[j][len(slice[i])-1] })\n\t\tfor idx := firstLine; idx < len(slice); idx++ { // Store row count elements - 1\n\t\t\ttmpLines = append(tmpLines, slice[idx][:len(slice[idx])-1])\n\t\t}\n\t}\n\treturn tmpLines\n}", "title": "" }, { "docid": "14743fb1f802023f4a2efbf0b4b8bade", "score": "0.43700585", "text": "func (u utilsCollection) ToDate(val interface{}) string {\n\tswitch v := val.(type) {\n\tcase time.Time, *time.Time:\n\t\tt, ok := v.(time.Time)\n\t\tif !ok {\n\t\t\ttptr, _ := v.(*time.Time)\n\t\t\tt = *tptr\n\t\t}\n\t\ty, m, d := t.Date()\n\t\tdate := time.Date(y, m, d, 0, 0, 0, 0, t.Location())\n\t\treturn date.Format(\"2006-01-02\")\n\tdefault:\n\t\treturn u.ToString(val)\n\t}\n}", "title": "" }, { "docid": "45ad2ba1310f5eda6a8b2044680fe94e", "score": "0.43699014", "text": "func (ctx *Context) ExtractDate(s string) (Date, Span, error) {\n\n\tfor _, pat := range dateCrackers {\n\t\tfd := Date{}\n\t\tspan := Span{}\n\t\tnames := pat.SubexpNames()\n\t\tmatchSpans := pat.FindStringSubmatchIndex(s)\n\t\tif matchSpans == nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar fail bool\n\n\t\tunknowns := make([]int, 0, 3) // for ambiguous components\n\t\tfor i, name := range names {\n\t\t\tstart, end := matchSpans[i*2], matchSpans[(i*2)+1]\n\t\t\tvar sub string\n\t\t\tif start >= 0 && end >= 0 {\n\t\t\t\tsub = strings.ToLower(s[start:end])\n\t\t\t}\n\n\t\t\tswitch name {\n\t\t\tcase \"year\":\n\t\t\t\tyear, e := strconv.Atoi(sub)\n\t\t\t\tif e == nil {\n\t\t\t\t\tyear = ExtendYear(year)\n\t\t\t\t\tfd.SetYear(year)\n\t\t\t\t} else {\n\t\t\t\t\tfail = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\tcase \"month\":\n\t\t\t\tmonth, e := strconv.Atoi(sub)\n\t\t\t\tif e == nil {\n\t\t\t\t\t// it was a number\n\t\t\t\t\tif month < 1 || month > 12 {\n\t\t\t\t\t\tfail = true\n\t\t\t\t\t\tbreak // month out of range\n\t\t\t\t\t}\n\t\t\t\t\tfd.SetMonth(month)\n\t\t\t\t} else {\n\t\t\t\t\t// try month name\n\t\t\t\t\tmonth, ok := monthLookup[sub]\n\t\t\t\t\tif !ok {\n\t\t\t\t\t\tfail = true\n\t\t\t\t\t\tbreak // nope.\n\t\t\t\t\t}\n\t\t\t\t\tfd.SetMonth(month)\n\t\t\t\t}\n\t\t\tcase \"cruftmonth\":\n\t\t\t\t// special case to handle \"Jan/Feb 2010\"...\n\t\t\t\t// we'll make sure the first month is valid, then ignore it\n\t\t\t\t_, ok := monthLookup[sub]\n\t\t\t\tif !ok {\n\t\t\t\t\tfail = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\tcase \"day\":\n\t\t\t\tday, e := strconv.Atoi(sub)\n\t\t\t\tif e != nil {\n\t\t\t\t\tfail = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tif day < 1 || day > 31 {\n\t\t\t\t\tfail = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tfd.SetDay(day)\n\t\t\tcase \"x1\", \"x2\", \"x3\":\n\t\t\t\t// could be day, month or year...\n\t\t\t\tx, e := strconv.Atoi(sub)\n\t\t\t\tif e != nil {\n\t\t\t\t\tfail = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tunknowns = append(unknowns, x)\n\t\t\t}\n\t\t}\n\n\t\tif fail {\n\t\t\t// regexp matched, but values sucked.\n\t\t\tcontinue\n\t\t}\n\n\t\t// got enough?\n\t\tif (fd.HasYear() && fd.HasMonth()) || (fd.HasMonth() && fd.HasDay()) {\n\t\t\tif fd.sane() {\n\t\t\t\tspan.Begin, span.End = matchSpans[0], matchSpans[1]\n\t\t\t\treturn fd, span, nil\n\t\t\t}\n\t\t} else {\n\t\t\t// got some ambiguous components to try?\n\t\t\tif len(unknowns) == 2 && fd.HasYear() {\n\t\t\t\tunknowns = append(unknowns, fd.Year())\n\t\t\t}\n\t\t\tif len(unknowns) == 3 {\n\t\t\t\tvar err error\n\t\t\t\tfd, err = ctx.DateResolver(unknowns[0], unknowns[1], unknowns[2])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn Date{}, Span{}, err\n\t\t\t\t}\n\n\t\t\t\tif fd.HasYear() && fd.HasMonth() && fd.HasDay() && fd.sane() {\n\t\t\t\t\t// resolved.\n\t\t\t\t\tspan.Begin, span.End = matchSpans[0], matchSpans[1]\n\t\t\t\t\treturn fd, span, nil\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// nothing. Just return an empty date and span\n\treturn Date{}, Span{}, nil\n}", "title": "" }, { "docid": "905c9f53e99301a895ba782b9a088e2d", "score": "0.43607113", "text": "func FixturesDate(date string, include string, page int, allPages bool) ([]byte, error) {\n\treturn Get(fmt.Sprintf(\"fixtures/date/%v\", date), include, page, allPages)\n}", "title": "" }, { "docid": "ae7def062e73768a63548fa09295663b", "score": "0.43565458", "text": "func Normalize(str string, lang ...language.Code) (Date, error) {\n\treturn Date(str).Normalized(lang...)\n}", "title": "" }, { "docid": "0df7f05072b5eec327bdcda8e3ef6e65", "score": "0.4350024", "text": "func ParseDate(rawDate string) Date {\n\textractDateInfo := regexp.MustCompile(`(?P<Day>\\d{2})(?P<Month>[A-Z][a-z]{2})(?P<Year>\\d{4})\\((?P<WeekDay>\\w{3,4})\\)`)\n\tmatches := extractDateInfo.FindStringSubmatch(rawDate)\n\n\tnamedMatches := GetNamedMatches(matches, extractDateInfo.SubexpNames())\n\treturn Date{Day: namedMatches[\"Day\"], Month: namedMatches[\"Month\"], Year: namedMatches[\"Year\"], WeekDay: namedMatches[\"WeekDay\"]}\n}", "title": "" }, { "docid": "41f57effeaf1ce57dc1dcb4ddf14a028", "score": "0.43426913", "text": "func JulianToDate(jdn int) (year int, month time.Month, day int) {\n\t// Richards, E. G. (2013) pp. 585–624\n\n\te := 4*(jdn+1401+(4*jdn+274277)/146097*3/4-38) + 3\n\th := e%1461/4*5 + 2\n\n\tday = h%153/5 + 1\n\tmonth = time.Month((h/153+2)%12 + 1)\n\tyear = e/1461 - 4716 + (14-int(month))/12\n\n\treturn year, month, day\n}", "title": "" }, { "docid": "4b5d8c96de0893788e5b246fba122034", "score": "0.43406558", "text": "func (tr Row) Date(nn int) (val Date) {\n\tval, err := tr.DateErr(nn)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn\n}", "title": "" }, { "docid": "f93ca606429f9b223ba86b5ab9ad3dcc", "score": "0.43400264", "text": "func JulianDate(year int, month int, day int) float64 {\n\tif month <= 2 {\n\t\tyear -= 1\n\t\tmonth += 12\n\t}\n\tyearf, monthf, dayf := float64(year), float64(month), float64(day)\n\n\tA := math.Floor(yearf / 100.0)\n\tB := 2 - A + math.Floor(A/4)\n\n\tJD := math.Floor(365.25*(yearf+4716.0)) + math.Floor(30.6001*(monthf+1.0)) + dayf + B - 1524.5\n\treturn JD\n}", "title": "" }, { "docid": "fbeff08a62dd68fb8b6647e4a21fc4d7", "score": "0.43375006", "text": "func builtInToDays(parameters []*vector.Vector, result vector.FunctionResultWrapper, proc *process.Process, length int) error {\n\tdateParams := vector.GenerateFunctionFixedTypeParameter[types.Datetime](parameters[0])\n\trs := vector.MustFunctionResult[int64](result)\n\tfor i := uint64(0); i < uint64(length); i++ {\n\t\tdatetimeValue, isNull := dateParams.GetValue(i)\n\t\tif isNull {\n\t\t\tif err := rs.Append(0, true); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\t\trs.Append(DateTimeDiff(intervalUnitDAY, types.ZeroDatetime, datetimeValue)+ADZeroDays, false)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "45d0803173fc8d02c325672a76776349", "score": "0.4335912", "text": "func _815dateFunc(tls crt.TLS, _context uintptr /* *Tsqlite3_context = Ssqlite3_context */, _argc int32, _argv uintptr /* **Tsqlite3_value = Ssqlite3_value */) {\n\tesc := crt.MustMalloc(148)\n\tvar (\n\t\t_x = esc // *TDateTime = SDateTime\n\t\t_zBuf = esc + 48 // *[100]int8\n\t)\n\tdefer crt.Free(esc)\n\tif _1105isDate(tls, _context, _argc, _argv, _x) != int32(0) {\n\t\tgoto _1\n\t}\n\n\t_1107computeYMD(tls, _x)\n\tXsqlite3_snprintf(tls, int32(100), _zBuf, ts+21257 /* \"%04d-%02d-%02d\" */, *(*int32)(unsafe.Pointer(_x + 8)), *(*int32)(unsafe.Pointer(_x + 12)), *(*int32)(unsafe.Pointer(_x + 16)))\n\tXsqlite3_result_text(tls, _context, _zBuf, int32(-1), uintptr(18446744073709551615))\n_1:\n}", "title": "" }, { "docid": "05f117bc9f66c6b516ff4c045eb89dd1", "score": "0.43356544", "text": "func (t *JiraIssueProgress) DBUpdate(ctx context.Context, db DB) (sql.Result, error) {\n\tchecksum := t.CalculateChecksum()\n\tif t.GetChecksum() == checksum {\n\t\treturn nil, nil\n\t}\n\tt.Checksum = &checksum\n\tq := \"UPDATE `jira_issue_progress` SET `checksum`=?,`user_id`=?,`issue_id`=?,`start_date`=?,`end_date`=?,`duration`=?,`customer_id`=?,`ref_id`=? WHERE `id`=?\"\n\treturn db.ExecContext(ctx, q,\n\t\torm.ToSQLString(t.Checksum),\n\t\torm.ToSQLString(t.UserID),\n\t\torm.ToSQLString(t.IssueID),\n\t\torm.ToSQLInt64(t.StartDate),\n\t\torm.ToSQLInt64(t.EndDate),\n\t\torm.ToSQLInt64(t.Duration),\n\t\torm.ToSQLString(t.CustomerID),\n\t\torm.ToSQLString(t.RefID),\n\t\torm.ToSQLString(t.ID),\n\t)\n}", "title": "" }, { "docid": "1b4c8f538feb5ff555e2577b48f81cbf", "score": "0.43351185", "text": "func (i *Invoice) SetDueDate(dueDate string) (*InvoiceCell, *InvoiceCell) {\n\ti.dueDate[1].Value = dueDate\n\treturn i.dueDate[0], i.dueDate[1]\n}", "title": "" }, { "docid": "e3e7b9ffbce58c79f6326b2d9a6522a6", "score": "0.43345326", "text": "func (obj Dater) UpdateDates() {\n\tnow := time.Now().Unix()\n\tif obj.CreatedAt == 0 {\n\t\tobj.CreatedAt = now\n\t}\n\tobj.ModifiedAt = now\n}", "title": "" }, { "docid": "3a24d303cd8f07f4ec6236d11b016d71", "score": "0.43319082", "text": "func YMDasDate(yyyymmdd string) (time.Time, error) {\n\treturn YMDasDateUTC(yyyymmdd, false)\n}", "title": "" }, { "docid": "2e32c7ce0395be3e9158bd069375df1a", "score": "0.4325949", "text": "func (e *DateExternal) ToDate(db *bolt.DB) (*Date, error) {\n\tvar ret Date\n\n\t// Short-circuit if we have no target\n\tif e.Target == \"\" {\n\t\treturn nil, ErrNilTarget\n\t}\n\n\t// 0: group\n\tret.Group = e.Group\n\tif ret.Group == \"\" {\n\t\treturn nil, fmt.Errorf(\"Group is mandatory\")\n\t}\n\tg, err := getGroup(db, e.Group)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to find group: %s\", err.Error())\n\t}\n\tloc, err := g.GetLocation()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to get group location: %s\", err.Error())\n\t}\n\n\t// 1: Date (yyyy-mm-dddd)\n\tdate, err := parseDate(e.Date, loc)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to parse date: %s\", err.Error())\n\t}\n\tret.Date = date\n\n\t// 2: Start time\n\tstart, err := parseTime(e.Start)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to parse time range: %s\", err.Error())\n\t}\n\n\t// 3: Stop time\n\tstop, err := parseTime(e.Stop)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to parse time range: %s\", err.Error())\n\t}\n\n\tdiff := stop - start\n\tif diff < 0 {\n\t\t// If the time is negative, we wrapped to the next day\n\t\tdiff += 24 * time.Hour\n\t}\n\n\tret.Date = ret.Date.Add(start)\n\tret.Time = diff\n\n\tret.Target = e.Target\n\n\treturn &ret, nil\n\n}", "title": "" }, { "docid": "4202fa6aeeebbdd65ed7c3c4ba8a7c31", "score": "0.43209344", "text": "func TestDateParse(t *testing.T) {\n\t// Case 1. Test everything.\n\tstr := \"D:20080313232937+01'00'\"\n\tdate, err := NewPdfDate(str)\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif date.year != 2008 {\n\t\tt.Errorf(\"Year != 2008\")\n\t\treturn\n\t}\n\tif date.month != 3 {\n\t\tt.Errorf(\"month != 3\")\n\t\treturn\n\t}\n\tif date.day != 13 {\n\t\tt.Errorf(\"Day != 13\")\n\t\treturn\n\t}\n\tif date.hour != 23 {\n\t\tt.Errorf(\"Hour != 23 (%d)\", date.hour)\n\t\treturn\n\t}\n\tif date.minute != 29 {\n\t\tt.Errorf(\"Minute != 29 (%d)\", date.minute)\n\t}\n\tif date.second != 37 {\n\t\tt.Errorf(\"Second != 37 (%d)\", date.second)\n\t\treturn\n\t}\n\tif date.utOffsetSign != '+' {\n\t\tt.Errorf(\"Invalid offset sign\")\n\t\treturn\n\t}\n\tif date.utOffsetHours != 1 {\n\t\tt.Errorf(\"Invalid offset hours\")\n\t\treturn\n\t}\n\tif date.utOffsetMins != 0 {\n\t\tt.Errorf(\"Invalid offset minutes\")\n\t\treturn\n\t}\n\n\tdateFromTime, err := NewPdfDateFromTime(date.ToGoTime())\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif dateFromTime.ToPdfObject().String() != date.ToPdfObject().String() {\n\t\tt.Errorf(\"Convert to and from time failed\")\n\t\treturn\n\t}\n\n\t// Case 2: Negative sign.\n\tstr = \"D:20150811050933-07'00'\"\n\tdate, err = NewPdfDate(str)\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif date.utOffsetSign != '-' {\n\t\tt.Errorf(\"Invalid offset sign\")\n\t\treturn\n\t}\n\tif date.utOffsetHours != 7 {\n\t\tt.Errorf(\"Invalid offset hours\")\n\t\treturn\n\t}\n\tdateFromTime, err = NewPdfDateFromTime(date.ToGoTime())\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif dateFromTime.ToPdfObject().String() != date.ToPdfObject().String() {\n\t\tt.Errorf(\"Convert to and from time failed\")\n\t\treturn\n\t}\n\n\t// Case 3. Offset minutes.\n\tstr = \"D:20110807220047+09'30'\"\n\tdate, err = NewPdfDate(str)\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif date.utOffsetMins != 30 {\n\t\tt.Errorf(\"Offset mins != 30\")\n\t\treturn\n\t}\n\tdateFromTime, err = NewPdfDateFromTime(date.ToGoTime())\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif dateFromTime.ToPdfObject().String() != date.ToPdfObject().String() {\n\t\tt.Errorf(\"Convert to and from time failed\")\n\t\treturn\n\t}\n\n\t// Case 4. Another test from failed file.\n\t// Minutes not specified at end (assume is 0).\n\tstr = \"D:20061023115457-04'\"\n\tdate, err = NewPdfDate(str)\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif date.year != 2006 {\n\t\tt.Errorf(\"Year != 2006\")\n\t\treturn\n\t}\n\tif date.month != 10 {\n\t\tt.Errorf(\"month != 10\")\n\t\treturn\n\t}\n\tif date.day != 23 {\n\t\tt.Errorf(\"Day != 23\")\n\t\treturn\n\t}\n\tif date.hour != 11 {\n\t\tt.Errorf(\"Hour != 11 (%d)\", date.hour)\n\t\treturn\n\t}\n\tif date.minute != 54 {\n\t\tt.Errorf(\"Minute != 29 (%d)\", date.minute)\n\t}\n\tif date.second != 57 {\n\t\tt.Errorf(\"Second != 37 (%d)\", date.second)\n\t\treturn\n\t}\n\tif date.utOffsetSign != '-' {\n\t\tt.Errorf(\"Invalid offset sign\")\n\t\treturn\n\t}\n\tif date.utOffsetHours != 4 {\n\t\tt.Errorf(\"Invalid offset hours\")\n\t\treturn\n\t}\n\tif date.utOffsetMins != 0 {\n\t\tt.Errorf(\"Invalid offset minutes\")\n\t\treturn\n\t}\n\tdateFromTime, err = NewPdfDateFromTime(date.ToGoTime())\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif dateFromTime.ToPdfObject().String() != date.ToPdfObject().String() {\n\t\tt.Errorf(\"Convert to and from time failed\")\n\t\treturn\n\t}\n\n\t// Case 5: Missing some more parameters.\n\t// Seems that many implementations consider some stuff optional...\n\t// Not following the standard, but we need to handle it.\n\t// D:20050823042205\n\tstr = \"D:20050823042205\"\n\tdate, err = NewPdfDate(str)\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif date.year != 2005 {\n\t\tt.Errorf(\"Year != 2005\")\n\t\treturn\n\t}\n\tif date.month != 8 {\n\t\tt.Errorf(\"month != 8\")\n\t\treturn\n\t}\n\tif date.day != 23 {\n\t\tt.Errorf(\"Day != 23\")\n\t\treturn\n\t}\n\tif date.hour != 04 {\n\t\tt.Errorf(\"Hour != 11 (%d)\", date.hour)\n\t\treturn\n\t}\n\tif date.minute != 22 {\n\t\tt.Errorf(\"Minute != 29 (%d)\", date.minute)\n\t}\n\tif date.second != 05 {\n\t\tt.Errorf(\"Second != 37 (%d)\", date.second)\n\t\treturn\n\t}\n\tif date.utOffsetHours != 0 {\n\t\tt.Errorf(\"Invalid offset hours\")\n\t\treturn\n\t}\n\tif date.utOffsetMins != 0 {\n\t\tt.Errorf(\"Invalid offset minutes\")\n\t\treturn\n\t}\n\tdateFromTime, err = NewPdfDateFromTime(date.ToGoTime())\n\tif err != nil {\n\t\tt.Errorf(\"Fail: %s\", err)\n\t\treturn\n\t}\n\tif dateFromTime.ToPdfObject().String() != date.ToPdfObject().String() {\n\t\tt.Errorf(\"Convert to and from time failed\")\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "21a6bce2cfce40939c12cc01ea59d6fd", "score": "0.43159112", "text": "func DateBR(date string, hour bool) (formatted string, isValid bool) {\n\tisValid, dt := valid.IsDate(date)\n\n\tif isValid {\n\t\tformatted = fmt.Sprintf(\"%02d/%02d/%04d\", dt.Day(), dt.Month(), dt.Year())\n\n\t\tif hour {\n\t\t\tformatted = fmt.Sprintf(\"%s %02d:%02d:%02d\", formatted, dt.Hour(), dt.Minute(), dt.Second())\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "a67487c3d00b0cead4adbd26a48fb899", "score": "0.4300404", "text": "func (m NoRelatedSym) SetIssueDate(v string) {\n\tm.Set(field.NewIssueDate(v))\n}", "title": "" }, { "docid": "a67487c3d00b0cead4adbd26a48fb899", "score": "0.4300404", "text": "func (m NoRelatedSym) SetIssueDate(v string) {\n\tm.Set(field.NewIssueDate(v))\n}", "title": "" }, { "docid": "b0087882f7070ffaaaeb2f50881c9431", "score": "0.42908025", "text": "func (du *DiagnosisUpdate) SetDiagnosisDate(s string) *DiagnosisUpdate {\n\tdu.mutation.SetDiagnosisDate(s)\n\treturn du\n}", "title": "" }, { "docid": "7097814c925fca99630a74be85be3ea7", "score": "0.42887276", "text": "func (duo *DiagnosisUpdateOne) SetDiagnosisDate(s string) *DiagnosisUpdateOne {\n\tduo.mutation.SetDiagnosisDate(s)\n\treturn duo\n}", "title": "" }, { "docid": "9b2deafbbc18638163c6278afa9a85d9", "score": "0.42851877", "text": "func chkArgDate(dFlag string, lFlag int) error {\n\tvar isValid bool = true\n\n\tif lFlag == 0 && dFlag != \"\" && dFlag != \"today\" && dFlag != \"current\" {\n\t\tif len(dFlag) != 10 && len(dFlag) != 21 {\n\t\t\tisValid = false\n\t\t} else if len(dFlag) == 10 {\n\t\t\tre10 := regexp.MustCompile(`\\d{4}-\\d{2}-\\d{2}`)\n\t\t\tisValid = re10.MatchString(dFlag)\n\t\t} else if len(dFlag) == 21 {\n\t\t\tre21 := regexp.MustCompile(`\\d{4}-\\d{2}-\\d{2}\\:\\d{4}-\\d{2}-\\d{2}`)\n\t\t\tisValid = re21.MatchString(dFlag)\n\t\t}\n\t\tif !isValid {\n\t\t\treturn errors.New(\"invalid --date parameter value, allowed values: 'today', 'current', 'YYYY-MM-DD' or 'YYYY-MM-DD:YYYY-MM-DD'\")\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "388310891cfce6b609524770d99d0876", "score": "0.42817274", "text": "func (m Message) RedemptionDate() (*field.RedemptionDateField, quickfix.MessageRejectError) {\n\tf := &field.RedemptionDateField{}\n\terr := m.Body.Get(f)\n\treturn f, err\n}", "title": "" }, { "docid": "0d4efa69b9b697a018a6d291be5b075c", "score": "0.42809615", "text": "func (o *BeaconReportCreateRequest) GetFraudDate() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.FraudDate\n}", "title": "" }, { "docid": "a9481a7eb5496e85fb5cb2c7f864c5d2", "score": "0.42681172", "text": "func (dateRange DateRange) Normalise() DateRange {\n\tif dateRange.days < 0 {\n\t\treturn DateRange{dateRange.mark.Add(dateRange.days), -dateRange.days}\n\t}\n\treturn dateRange\n}", "title": "" }, { "docid": "c6e307ebb4a5ef959ad37177c7c41870", "score": "0.42657763", "text": "func (i *DateIter) Date() string {\n\treturn i.day.Format(\"20060102\")\n}", "title": "" }, { "docid": "cab5831423176f246f4d5c5297214fe7", "score": "0.4249652", "text": "func ConvertToFormatDay(excelDaysString string)string{\n\t// 2006-01-02 距离 1900-01-01的天数\n\tbaseDiffDay := 38719 //在网上工具计算的天数需要加2天,什么原因没弄清楚\n\tcurDiffDay := excelDaysString\n\tb,_ := strconv.Atoi(curDiffDay)\n\t// 获取excel的日期距离2006-01-02的天数\n\trealDiffDay := b - baseDiffDay\n\t//fmt.Println(\"realDiffDay:\",realDiffDay)\n\t// 距离2006-01-02 秒数\n\trealDiffSecond := realDiffDay * 24 * 3600\n\t//fmt.Println(\"realDiffSecond:\",realDiffSecond)\n\t// 2006-01-02 15:04:05距离1970-01-01 08:00:00的秒数 网上工具可查出\n\tbaseOriginSecond := 1136185445\n\tresultTime := time.Unix(int64(baseOriginSecond + realDiffSecond), 0).Format(\"2006-01-02\")\n\treturn resultTime\n}", "title": "" }, { "docid": "78eb2ae61b814df9ea6778dee892b257", "score": "0.42428988", "text": "func DateCheck(d string) error {\n\t_, err := time.Parse(\"2006.01.02\", d)\n\treturn err\n}", "title": "" }, { "docid": "aa34123b779926895f2bb2e3120b0177", "score": "0.42344522", "text": "func CheckDateYMD(yyyymmdd string) bool {\n\treturn CheckDate(\"yyyymmdd\", yyyymmdd)\n}", "title": "" }, { "docid": "42fdcbe725a15f155e68d2fde748e3ac", "score": "0.4232277", "text": "func dateParseCheck(d string) (valid bool, today bool, diff int) {\n\tlayout := \"2006-01-02\"\n\ttoday = false\n\tvalid = true\n\tcheck, err := time.Parse(layout, d)\n\tif err != nil {\n\t\tlog.Printf(\"Failed to parse date format: %v\", err)\n\t\tdiff = 0\n\t\tvalid = false\n\t\treturn\n\t}\n\tcurrent := time.Now()\n\tdiff = int(current.Sub(check).Hours() / 24)\n\tif diff < 1 && diff != 0 {\n\t\tlog.Printf(\"Birthday date not correct, difference fromt today: %v\", diff)\n\t\tvalid = false\n\t\treturn\n\t} else if diff == 0 {\n\t\tvalid = false\n\t\ttoday = true\n\t\treturn\n\t}\n\treturn\n\n}", "title": "" }, { "docid": "250e9946e72e02ff1a27fe12c3880030", "score": "0.42307574", "text": "func (S *SkippyScraper) AToBNearDate(A, B string, date time.Time) {\n\tS.AToBDuring(A, B, date, date.AddDate(0, 0, 10 /*days ahead*/))\n}", "title": "" }, { "docid": "c1e112d603151237c13964ef231b56a7", "score": "0.42220265", "text": "func (o *BeaconReportCreateResponse) GetFraudDate() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.FraudDate\n}", "title": "" }, { "docid": "e6518aead110f88dfa0a0deabd04558b", "score": "0.42211026", "text": "func fakeDate(y, m, d int) time.Time {\n\tif y == 0 && m == 0 && d == 0 {\n\t\treturn midnight(time.Time{})\n\t}\n\treturn time.Date(y, time.Month(m), d, 0, 0, 0, 0, time.UTC)\n}", "title": "" }, { "docid": "62b984b0bbca6de928db206296ec3be1", "score": "0.42016876", "text": "func (tr Row) DateErr(nn int) (val Date, err error) {\n\tswitch data := tr[nn].(type) {\n\tcase nil:\n\t\t// nop\n\tcase Date:\n\t\tval = data\n\tcase []byte:\n\t\tval, err = ParseDate(string(data))\n\t}\n\treturn\n}", "title": "" }, { "docid": "64a743e591ea31d5d02d43c25bccd482", "score": "0.4193301", "text": "func ParseDate(date string) (d Date, err error) {\n\treturn parseDate(date, fullDate)\n}", "title": "" }, { "docid": "ab658b659a6913630f4ac86819ba5b70", "score": "0.41875532", "text": "func Ɀ_Date_(year int, month int, day int) Date {\n\tdate, err := NewDate(year, month, day)\n\tif err != nil {\n\t\tpanic(\"Operation failed!\")\n\t}\n\treturn date\n}", "title": "" }, { "docid": "28fbea29738e26c327b93076bd28d2ac", "score": "0.4185818", "text": "func schdlParseDate(atDate, atTime string) (uint32, error) {\n\tstampUtc := fmt.Sprintf(\"%sT%s.000Z\", atDate, atTime)\n\tstamp, fail := time.Parse(time.RFC3339, stampUtc)\n\tif fail != nil {\n\t\treturn 0, fmt.Errorf(\"Failed to parse time: %s\", stampUtc)\n\t}\n\treturn uint32(stamp.Unix()), nil\n}", "title": "" }, { "docid": "8c84e2cbaa8bc98d1d53c00d7b2d9757", "score": "0.41814846", "text": "func (t Int16) ToDate64Checked() (Date64, Boolean) {\n\treturn t.ToDate64(), true\n}", "title": "" }, { "docid": "485333c534d6392d4660cf524909d0a4", "score": "0.4179765", "text": "func FormatDate(format cty.Value, timestamp cty.Value) (cty.Value, error) {\n\treturn FormatDateFunc.Call([]cty.Value{format, timestamp})\n}", "title": "" }, { "docid": "e00811893d21d237d2386bef2ecedffe", "score": "0.41792047", "text": "func IssueDateEdition(rawDate string, edition int) string {\n\treturn fmt.Sprintf(\"%s%02d\", CondensedDate(rawDate), edition)\n}", "title": "" } ]
ae23383aa04ebd602ceb4229e342cc8a
NewY returns a YNode at edge e
[ { "docid": "4137f8f4e0e8e102211bd9ff7ff8dfd6", "score": "0.81627446", "text": "func NewY(e geom.FullEdge) *Node {\n\treturn &Node{\n\t\tquery: yQuery,\n\t\tpayload: e,\n\t}\n}", "title": "" } ]
[ { "docid": "fb3303c2d1c0c836b34fbe02ad97611d", "score": "0.59208244", "text": "func newEdge() (e Edge) {\n\tdocInit(&e.Entry)\n\te.vertex = make(map[EdgeDirection]string)\n\treturn\n}", "title": "" }, { "docid": "b7415b9756f2c168128f5236aced462d", "score": "0.5616341", "text": "func newEdge(src node, dest node) *edgeImpl {\n\te := &edgeImpl{\n\t\ts: src,\n\t\td: dest,\n\t\tnotMarked: false,\n\t}\n\tsrc.addOut(e)\n\tdest.addIn(e)\n\treturn e\n}", "title": "" }, { "docid": "91fbb0b69497b2325f04ef12acd9b200", "score": "0.5427093", "text": "func NewEdge(cfg Config) (*Edge, error) {\n\tklog.Infof(\"New Edge Cloud Manager\")\n\tcloud := Edge{}\n\treturn &cloud, nil\n}", "title": "" }, { "docid": "3db5789ebcdbb4d6b111b2191f99c37b", "score": "0.53878385", "text": "func NewYDetails(labels []string) (*YDetails, error) {\n\treturn nil, errors.New(\"not implemented\")\n}", "title": "" }, { "docid": "53b1763e970ef532770c786a95bc7b40", "score": "0.53304887", "text": "func NewEdgeUpgrade() *cobra.Command {\n\tupgradeOptions := newUpgradeOptions()\n\n\tcmd := &cobra.Command{\n\t\tUse: \"upgrade\",\n\t\tShort: \"Upgrade edge component. Upgrade the edge node to the desired version.\",\n\t\tLong: \"Upgrade edge component. Upgrade the edge node to the desired version.\",\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\t// upgrade edgecore\n\t\t\treturn upgradeOptions.upgrade()\n\t\t},\n\t}\n\n\taddUpgradeFlags(cmd, upgradeOptions)\n\treturn cmd\n}", "title": "" }, { "docid": "465d9c348693874105ff5008667ff8fa", "score": "0.5221712", "text": "func NewEdge(a, b Point) Edge {\n\t// Order the points in the edge\n\tif a.X > b.X {\n\t\ta, b = b, a\n\t} else if a.X == b.X {\n\t\tif a.Y > b.Y {\n\t\t\ta, b = b, a\n\t\t}\n\t}\n\n\treturn Edge{a, b}\n}", "title": "" }, { "docid": "175a1513c503f0cff64462a5f73487a6", "score": "0.5176423", "text": "func NewZyre(ctx context.Context) *Zyre {\n\tzyre := &Zyre{\n\t\tevents: make(chan *Event, 1000),\n\t\trequests: make(chan *Cmd),\n\t\treplies: make(chan *Cmd),\n\t}\n\n\t// Create backend node\n\tnewNode(ctx, zyre.requests, zyre.replies, zyre.events)\n\treturn zyre\n}", "title": "" }, { "docid": "9825d3531090e1efedd69201feb484c2", "score": "0.5155287", "text": "func newTestNode(lg *LicenseGraph, targetName string) *TargetNode {\n\tif _, ok := lg.targets[targetName]; !ok {\n\t\tlg.targets[targetName] = &TargetNode{name: targetName}\n\t}\n\treturn lg.targets[targetName]\n}", "title": "" }, { "docid": "32994b8b254402bd27126de4c7091f07", "score": "0.5123122", "text": "func createEdge(start, end *Node) *Edge {\n\tedge := new(Edge)\n\tedge = &Edge{\n\t\tStartKey: start.Key,\n\t\tEndKey: end.Key,\n\t\tStart: start,\n\t\tEnd: end,\n\t}\n\treturn edge\n}", "title": "" }, { "docid": "9044e13c8cf17d499e1b24d0eb5f9cf4", "score": "0.5120657", "text": "func NewEdge(ed *edge.Descriptor) *Edge {\n\tne := &Edge{\n\t\tTag: ed.Tag,\n\t\tType: ed.Type,\n\t\tName: ed.Name,\n\t\tField: ed.Field,\n\t\tUnique: ed.Unique,\n\t\tInverse: ed.Inverse,\n\t\tRequired: ed.Required,\n\t\tImmutable: ed.Immutable,\n\t\tRefName: ed.RefName,\n\t\tThrough: ed.Through,\n\t\tStorageKey: ed.StorageKey,\n\t\tComment: ed.Comment,\n\t\tAnnotations: make(map[string]any),\n\t}\n\tfor _, at := range ed.Annotations {\n\t\tne.addAnnotation(at)\n\t}\n\tif ref := ed.Ref; ref != nil {\n\t\tne.Ref = NewEdge(ref)\n\t\tne.StorageKey = ne.Ref.StorageKey\n\t}\n\treturn ne\n}", "title": "" }, { "docid": "d94fdcd7251eaae6226526059e3ba257", "score": "0.50933236", "text": "func NewEdge(v int, w int, weight int) *Edge {\n\treturn &Edge{v: v, w: w, weight: weight}\n}", "title": "" }, { "docid": "711dbf88f15b86c92c954d52cbe9b8db", "score": "0.50776863", "text": "func NewEdge(id, sourceNodeId, targetNodeId string) *Edge {\n\treturn &Edge{\n\t\tId: id,\n\t\tSourceNodeId: sourceNodeId,\n\t\tTargetNodeId: targetNodeId,\n\t}\n}", "title": "" }, { "docid": "206e2abc2404b77b8c69ad5b87155123", "score": "0.506129", "text": "func (E *ECP2) GetY() *FP2 {\n\tW := NewECP2()\n\tW.Copy(E)\n\tW.Affine()\n\treturn W.y\n}", "title": "" }, { "docid": "4a3038398084f4aeb746cc3efd7656f0", "score": "0.502623", "text": "func NewYDetails(cvsAr image.Rectangle, yp *YProperties) (*YDetails, error) {\n\tcvsWidth := cvsAr.Dx()\n\tcvsHeight := cvsAr.Dy()\n\tmaxWidth := cvsWidth - 1 // Reserve one column for the line chart itself.\n\tif req := RequiredWidth(yp.Min, yp.Max); maxWidth < req {\n\t\treturn nil, fmt.Errorf(\"the available maxWidth %d is smaller than the reported required width %d\", maxWidth, req)\n\t}\n\n\tgraphHeight := cvsHeight - yp.ReqXHeight\n\tscale, err := NewYScale(yp.Min, yp.Max, graphHeight, nonZeroDecimals, yp.ScaleMode)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// See how the labels would look like on the entire maxWidth.\n\tmaxLabelWidth := maxWidth - axisWidth\n\tlabels, err := yLabels(scale, maxLabelWidth)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar width int\n\t// Determine the largest label, which might be less than maxWidth.\n\t// Such case would allow us to save more space for the line chart itself.\n\twidest := longestLabel(labels)\n\tif widest < maxLabelWidth {\n\t\t// Save the space and recalculate the labels, since they need to be realigned.\n\t\tl, err := yLabels(scale, widest)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tlabels = l\n\t\twidth = widest + axisWidth // One for the axis itself.\n\t} else {\n\t\twidth = maxWidth\n\t}\n\n\treturn &YDetails{\n\t\tWidth: width,\n\t\tStart: image.Point{width - 1, 0},\n\t\tEnd: image.Point{width - 1, graphHeight},\n\t\tScale: scale,\n\t\tLabels: labels,\n\t}, nil\n}", "title": "" }, { "docid": "a65553389b2a6cd246bef525e75f3d63", "score": "0.50246567", "text": "func NewEdge(v, w int, weight float64) Edge {\n\treturn Edge{weight: weight, from: v, to: w}\n}", "title": "" }, { "docid": "e643ffd521dc2e92d32706c4f64b0db2", "score": "0.50119627", "text": "func Y(value float64) *SimpleElement {\n\treturn newSEFloat(\"y\", value)\n}", "title": "" }, { "docid": "88591f78adc4480cfe4a561a6861387b", "score": "0.49950173", "text": "func (s *DataMap) AllocNewY(y int) {\n\ts.lock.Lock()\n\ts.data[y] = make(map[int]Link)\n\ts.lock.Unlock()\n}", "title": "" }, { "docid": "1354bcd0ad5ceb1491124a4f04fe9a34", "score": "0.49854127", "text": "func (e *Entity) SetY(n float64) {\n\te.y = n\n}", "title": "" }, { "docid": "7fc24f27910b2468a6c9e6d0f208e155", "score": "0.4981488", "text": "func (e *Euler) SetY(v float64) *Euler {\n\treturn e.Set(e.X, v, e.Z, e.Order)\n}", "title": "" }, { "docid": "fb1f96433ba97489b52f319408cc053c", "score": "0.49775043", "text": "func (e *Entity) SetY(y int) {\n\te.y = y\n}", "title": "" }, { "docid": "bdd107497d94a96da2fc567edabed545", "score": "0.49734616", "text": "func (m *Model) NewRailEdge(f *RailNode, t *RailNode) *RailEdge {\n\tre := &RailEdge{\n\t\tBase: m.NewBase(RAILEDGE, f.O),\n\t\tPersistence: NewPersistence(),\n\t}\n\tre.Init(m)\n\tre.Resolve(f.O, f, t)\n\tre.Marshal()\n\tm.Add(re)\n\tre.O.ReRouting = true\n\treturn re\n}", "title": "" }, { "docid": "0e9b5994976da8d91866a72f00dfdf17", "score": "0.49517077", "text": "func (g *dotGraph) NewEdge(from, to graph.Node) graph.Edge {\n\te := g.WeightedUndirectedGraph.NewWeightedEdge(from, to, math.NaN()).(simple.WeightedEdge)\n\treturn &weightedEdge{WeightedEdge: e}\n}", "title": "" }, { "docid": "c5aac3ebd706b77ac7c3339e6f91dc0c", "score": "0.49172577", "text": "func (su *SettlementUpdate) AddY(i int) *SettlementUpdate {\n\tsu.mutation.AddY(i)\n\treturn su\n}", "title": "" }, { "docid": "d591855d23a0f76291a0b36639c31532", "score": "0.49010158", "text": "func DownY(child Renderer) Renderer { return &downY{child} }", "title": "" }, { "docid": "2a1af1e3d812a12983b5e58eed0059b3", "score": "0.48626864", "text": "func newHollowEdgeNodeCommand() *cobra.Command {\n\ts := &hollowEdgeNodeConfig{\n\t\tNodeLabels: make(map[string]string),\n\t}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"edgemark\",\n\t\tLong: \"edgemark\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\trun(s)\n\t\t},\n\t\tArgs: func(cmd *cobra.Command, args []string) error {\n\t\t\tfor _, arg := range args {\n\t\t\t\tif len(arg) > 0 {\n\t\t\t\t\treturn fmt.Errorf(\"%q does not take any arguments, got %q\", cmd.CommandPath(), args)\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t}\n\n\tfs := cmd.Flags()\n\tfs.AddGoFlagSet(flag.CommandLine) // for flags like --docker-only\n\ts.addFlags(fs)\n\n\treturn cmd\n}", "title": "" }, { "docid": "4cfade67683fc1754f04bdd68ce1011f", "score": "0.48289654", "text": "func (suo *SettlementUpdateOne) AddY(i int) *SettlementUpdateOne {\n\tsuo.mutation.AddY(i)\n\treturn suo\n}", "title": "" }, { "docid": "9da974c27b495c7468c41067744a6f9d", "score": "0.48223874", "text": "func (g Graph) NewEdge(from, to graph.Node) graph.Edge {\n\treturn &dotEdge{Edge: g.DirectedGraph.NewEdge(from, to), attrs: make(map[string]string)}\n}", "title": "" }, { "docid": "17d3f4bcef0fe243b67eee64c1f6116a", "score": "0.48177087", "text": "func (g GridType) Y(dataY float64) float64 {\n\treturn g.ym*dataY + g.yb\n}", "title": "" }, { "docid": "777624612b367441eba6de12d4a0d67e", "score": "0.47993812", "text": "func (nestImpl *NestImpl) Y() int64 {\n\treturn nestImpl.yImpl\n}", "title": "" }, { "docid": "0d8619346b16031b639357b18b4cdb9c", "score": "0.47859064", "text": "func (E *ECP2) gety() *FP2 {\n\treturn E.y\n}", "title": "" }, { "docid": "cd80060abd9a58c69e72cc76b1d6e117", "score": "0.47619256", "text": "func NewGoExpr(expr string) goast.Expr {\n\te, err := parser.ParseExpr(expr)\n\tif err != nil {\n\t\tpanic(\"programming error: \" + expr)\n\t}\n\n\treturn e\n}", "title": "" }, { "docid": "b0e4c0a383f63ef144fa13e69b0e87ac", "score": "0.47542188", "text": "func NewEdge(g Handle, nh1, nh2 NodeHandle) (EdgeHandle, error) {\n\tgraphsLock.Lock()\n\tdefer graphsLock.Unlock()\n\n\tgidx, ok := graphHandles[g]\n\tif !ok {\n\t\treturn EdgeHandle(\"\"), fmt.Errorf(errGraphNotFound, g)\n\t}\n\n\tvar edgeIndex uint32\n\tvar found bool\n\n\t// do we have any items in our free index\n\tfor k := range graphs[gidx].freeEdgeSet {\n\t\tdelete(graphs[gidx].freeEdgeSet, k)\n\t\tfound = true\n\t\tedgeIndex = k\n\t}\n\n\tif !found {\n\t\tedgeIndex = uint32(len(graphs[gidx].Edges))\n\t\tgraphs[gidx].Edges = append(graphs[gidx].Edges, Edge{})\n\t}\n\n\t// resolve n1 and n2\n\tn1, ok := graphs[gidx].nodeHandles[nh1]\n\tif !ok {\n\t\treturn EdgeHandle(\"\"), fmt.Errorf(errNodeNotFound, nh1)\n\t}\n\n\tn2, ok := graphs[gidx].nodeHandles[nh2]\n\tif !ok {\n\t\treturn EdgeHandle(\"\"), fmt.Errorf(errNodeNotFound, nh2)\n\t}\n\n\t// set it\n\tgraphs[gidx].Edges[edgeIndex] = Edge{Node1Index: n1, Node2Index: n2}\n\n\t// get a handle\n\tvar handle = EdgeHandle(uuid.NewV4().String())\n\n\t// map it\n\tgraphs[gidx].edgeHandles[handle] = edgeIndex\n\n\t// index it\n\tindexEdge(gidx, handle, []NodeHandle{nh1, nh2})\n\n\treturn handle, nil\n}", "title": "" }, { "docid": "8781ac4e6ea0ed6b196745a2c4978ac7", "score": "0.47516537", "text": "func NewDependency(source, target Node, version string) []Edge {\n\treturn []Edge{\n\t\t{\n\t\t\tSource: source,\n\t\t\tTarget: target,\n\t\t\tLabels: Attributes{},\n\t\t\tVersion: version,\n\t\t\tRelationship: Dependency,\n\t\t},\n\t\t{\n\t\t\tSource: source,\n\t\t\tTarget: target,\n\t\t\tVersion: version,\n\t\t\tLabels: Attributes{},\n\t\t\tRelationship: Dependant,\n\t\t},\n\t}\n}", "title": "" }, { "docid": "155165089a4902d9d3e841837d832e2b", "score": "0.47474858", "text": "func (ntdmuo *NetTopologyDeviceMapUpdateOne) AddPositionY(i int) *NetTopologyDeviceMapUpdateOne {\n\tntdmuo.mutation.AddPositionY(i)\n\treturn ntdmuo\n}", "title": "" }, { "docid": "d486eb6df0f2d85e8367ad44341d763f", "score": "0.4744971", "text": "func NewNext(id, outV, inV int) *Next {\n\treturn &Next{\n\t\tEdge: Edge{\n\t\t\tElement: Element{\n\t\t\t\tID: id,\n\t\t\t\tType: ElementEdge,\n\t\t\t},\n\t\t\tLabel: EdgeNext,\n\t\t},\n\t\tOutV: outV,\n\t\tInV: inV,\n\t}\n}", "title": "" }, { "docid": "96c4e62676b931a11bdbb551b82510a8", "score": "0.47397262", "text": "func New(token string) *Egnyte {\n\treturn &Egnyte{\n\t\ttoken: token,\n\t}\n}", "title": "" }, { "docid": "8e6c6793c0e66774073b2dd97a409cb3", "score": "0.4729572", "text": "func (tc *TestContext) NewEdgeNode(opts ...EdgeNodeOption) *device.Ctx {\n\td := device.CreateEdgeNode()\n\tfor _, opt := range opts {\n\t\topt(d)\n\t}\n\tif tc.project == nil {\n\t\tlog.Fatal(\"You must setup project before add node\")\n\t}\n\ttc.ConfigSync(d)\n\treturn d\n}", "title": "" }, { "docid": "0cbcb4d2041b4cabdeb1f43fd52e85f0", "score": "0.47204575", "text": "func (e *Euler) SetY(value float64) *Euler {\n\te.p.Set(\"y\", value)\n\treturn e\n}", "title": "" }, { "docid": "b706667f1571de9e6af384d670779211", "score": "0.46903276", "text": "func NewEnemy() {\n\tx, y := enemyPos()\n\n\tenemyFrame := sheet[0][0].Copy()\n\tenemyR := render.NewSwitch(\"left\", map[string]render.Modifiable{\n\t\t\"left\": enemyFrame,\n\t\t\"right\": enemyFrame.Copy().Modify(mod.FlipX),\n\t})\n\tenemy := entities.NewSolid(x, y, 16, 16,\n\t\tenemyR,\n\t\tnil, 0)\n\n\trender.Draw(enemy.R, 1, 2)\n\n\tenemy.UpdateLabel(Enemy)\n\n\tenemy.Bind(func(id int, _ interface{}) int {\n\t\tenemy := event.GetEntity(id).(*entities.Solid)\n\t\t// move towards the player\n\t\tx, y := enemy.GetPos()\n\t\tpt := floatgeom.Point2{x, y}\n\t\tpt2 := floatgeom.Point2{playerPos.X(), playerPos.Y()}\n\t\tdelta := pt2.Sub(pt).Normalize().MulConst(EnemySpeed)\n\t\tenemy.ShiftPos(delta.X(), delta.Y())\n\n\t\t// update animation\n\t\tswtch := enemy.R.(*render.Switch)\n\t\tif delta.X() > 0 {\n\t\t\tif swtch.Get() == \"left\" {\n\t\t\t\tswtch.Set(\"right\")\n\t\t\t}\n\t\t} else if delta.X() < 0 {\n\t\t\tif swtch.Get() == \"right\" {\n\t\t\t\tswtch.Set(\"left\")\n\t\t\t}\n\t\t}\n\t\treturn 0\n\t}, event.Enter)\n\n\tenemy.Bind(func(id int, _ interface{}) int {\n\t\tenemy := event.GetEntity(id).(*entities.Solid)\n\t\tenemy.Destroy()\n\t\treturn 0\n\t}, \"Destroy\")\n}", "title": "" }, { "docid": "1b88ec4235d54d121e1e54ef3219aa14", "score": "0.46581557", "text": "func newEvent(b *Bot, ev *slack.MessageEvent) *event {\n\treturn &event{\n\t\tbot: b,\n\t\tevent: ev,\n\t}\n}", "title": "" }, { "docid": "0d2819c32f5580142bf2ea690f1cd6b9", "score": "0.46546608", "text": "func (fprpc *FloorPlanReferencePointCreate) SetY(i int) *FloorPlanReferencePointCreate {\n\tfprpc.mutation.SetY(i)\n\treturn fprpc\n}", "title": "" }, { "docid": "28e29aca06c5bb851647bf7bb4f43857", "score": "0.46529892", "text": "func (e *Entity) Y() int {\n\treturn e.y\n}", "title": "" }, { "docid": "18547aecf9c8f6223fa9930efe417ae4", "score": "0.46516207", "text": "func (fprpu *FloorPlanReferencePointUpdate) AddY(i int) *FloorPlanReferencePointUpdate {\n\tfprpu.mutation.AddY(i)\n\treturn fprpu\n}", "title": "" }, { "docid": "e72840451db574c786d4e14553838fa6", "score": "0.46398696", "text": "func (fprpuo *FloorPlanReferencePointUpdateOne) AddY(i int) *FloorPlanReferencePointUpdateOne {\n\tfprpuo.mutation.AddY(i)\n\treturn fprpuo\n}", "title": "" }, { "docid": "830ba5d578e8eab99a39540e29e37728", "score": "0.46306297", "text": "func (r *GoodrichAndTamassia) edge(x, y int) *edge {\n\tvar xy *edge\n\tfor _, e := range r.edges {\n\t\tif sameEdge(e, x, y) {\n\t\t\txy = e\n\t\t\tbreak\n\t\t}\n\t}\n\tif xy == nil {\n\t\tvx := r.vertex(x)\n\t\tvy := r.vertex(y)\n\t\txy = &edge{\n\t\t\tvertex: [2]*vertex{vx, vy},\n\t\t}\n\t\tr.edges = append(r.edges, xy)\n\t}\n\treturn xy\n}", "title": "" }, { "docid": "325980d86a6effb26c100c4bc1a5360a", "score": "0.46204615", "text": "func (ntdmu *NetTopologyDeviceMapUpdate) AddPositionY(i int) *NetTopologyDeviceMapUpdate {\n\tntdmu.mutation.AddPositionY(i)\n\treturn ntdmu\n}", "title": "" }, { "docid": "e02fb9a6e2b7368a27f05ee8010295d1", "score": "0.46179977", "text": "func New(name string) *Yec {\n\treturn &Yec{\n\t\tappName: name,\n\t\tconfigName: \"config\",\n\t\tenvKeyReplacer: strings.NewReplacer(\"_\", \"-\"),\n\t\tconfig: make(map[string]interface{}),\n\t\tenv: make(map[string][]string),\n\t}\n}", "title": "" }, { "docid": "dab253839bbf151649c3c1cc0b5baa70", "score": "0.4617271", "text": "func NewEWGraph() *EWGraph {\n\treturn &EWGraph{NewGraph(), make(map[edge]int)}\n}", "title": "" }, { "docid": "b63eb4a5a475e8889f375a2ea45deecd", "score": "0.4616325", "text": "func (e Entity) Y() float64 {\n\treturn e.y\n}", "title": "" }, { "docid": "5099ba810ff3c350c65d966a94cc22de", "score": "0.45966768", "text": "func (dc *DeathCertifier) ToEdge(order *DeathCertifierOrder) *DeathCertifierEdge {\n\tif order == nil {\n\t\torder = DefaultDeathCertifierOrder\n\t}\n\treturn &DeathCertifierEdge{\n\t\tNode: dc,\n\t\tCursor: order.Field.toCursor(dc),\n\t}\n}", "title": "" }, { "docid": "58ef6b0dc0a9f9b462ab2876e36ad7c0", "score": "0.45874834", "text": "func (dc *DeathCertificate) ToEdge(order *DeathCertificateOrder) *DeathCertificateEdge {\n\tif order == nil {\n\t\torder = DefaultDeathCertificateOrder\n\t}\n\treturn &DeathCertificateEdge{\n\t\tNode: dc,\n\t\tCursor: order.Field.toCursor(dc),\n\t}\n}", "title": "" }, { "docid": "ac1cf6fa18904074df47b399cbfc2248", "score": "0.4585712", "text": "func TestCreateEdgeReturnNew(t *testing.T) {\n\tctx := context.Background()\n\tc := createClient(t, nil)\n\tskipBelowVersion(c, \"3.4\", t) // See https://github.com/arangodb/arangodb/issues/2363\n\tdb := ensureDatabase(ctx, c, \"edge_test\", nil, t)\n\tprefix := \"create_edge_return_new_\"\n\tg := ensureGraph(ctx, db, prefix+\"graph\", nil, t)\n\tec := ensureEdgeCollection(ctx, g, prefix+\"citiesPerState\", []string{prefix + \"city\"}, []string{prefix + \"state\"}, t)\n\tcities := ensureCollection(ctx, db, prefix+\"city\", nil, t)\n\tstates := ensureCollection(ctx, db, prefix+\"state\", nil, t)\n\tfrom := createDocument(ctx, cities, map[string]interface{}{\"name\": \"Venlo\"}, t)\n\tto := createDocument(ctx, states, map[string]interface{}{\"name\": \"Limburg\"}, t)\n\n\tdoc := RouteEdge{\n\t\tFrom: from.ID.String(),\n\t\tTo: to.ID.String(),\n\t\tDistance: 7,\n\t}\n\tvar newDoc RouteEdge\n\tmeta, err := ec.CreateDocument(driver.WithReturnNew(ctx, &newDoc), doc)\n\tif err != nil {\n\t\tt.Fatalf(\"Failed to create new edge: %s\", describe(err))\n\t}\n\t// NewDoc must equal doc\n\tif !reflect.DeepEqual(doc, newDoc) {\n\t\tt.Errorf(\"Got wrong ReturnNew document. Expected %+v, got %+v\", doc, newDoc)\n\t}\n\t// Document must exists now\n\tvar readDoc RouteEdge\n\tif _, err := ec.ReadDocument(ctx, meta.Key, &readDoc); err != nil {\n\t\tt.Fatalf(\"Failed to read document '%s': %s\", meta.Key, describe(err))\n\t}\n\tif !reflect.DeepEqual(doc, readDoc) {\n\t\tt.Errorf(\"Got wrong document. Expected %+v, got %+v\", doc, readDoc)\n\t}\n}", "title": "" }, { "docid": "b336cdd79960adcedf932bda7f688e12", "score": "0.45780665", "text": "func (m Matrix4) Y(up Vector4) {\n\tm[4], m[5], m[6], m[7] = up[0], up[1], up[2], up[3]\n}", "title": "" }, { "docid": "bea352457b34337cec0868d2ac2ae2aa", "score": "0.45707285", "text": "func New(config config.Config) Edge {\n\treturn Edge{\n\t\tclient: http.NewClient(config),\n\t\tlogger: config.GetLogger(),\n\t}\n}", "title": "" }, { "docid": "594d698810917e2047d5530baa8d5742", "score": "0.45616326", "text": "func (_this *DOMPoint) SetY(value float64) {\n\tinput := value\n\t_this.Value_JS.Set(\"y\", input)\n}", "title": "" }, { "docid": "b4a528ae7238b0e8bca621f6b3f15e1d", "score": "0.45410404", "text": "func NewNodeDownEvent(index, duration int) *NodeDown {\n\treturn &NodeDown{\n\t\tindex: index,\n\t\tduration: duration,\n\t}\n}", "title": "" }, { "docid": "5dbe198b50638b630628fe36bcaf7c05", "score": "0.4539151", "text": "func ep2(buf *token.Buffer, tree ast.Node) (newTree ast.Node, err error) {\n\torigPos := buf.Pos()\n\tdefer func() {\n\t\tif err != nil {\n\t\t\tbuf.MustSeek(origPos)\n\t\t}\n\t}()\n\tnewTree = tree.Copy()\n\texpr := ast.New()\n\tnewTree.AddChild(expr)\n\tif err := termOpenParen(buf); err != nil {\n\t\treturn nil, err\n\t}\n\tif buf.Pos() >= buf.Len() {\n\t\treturn nil, io.EOF\n\t}\n\teTree, err := e(buf, tree.Copy())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\texpr.AddChildren(eTree.Children())\n\tif buf.Pos() >= buf.Len() {\n\t\treturn nil, io.EOF\n\t}\n\tif err := termCloseParen(buf); err != nil {\n\t\treturn nil, err\n\t}\n\treturn newTree, nil\n}", "title": "" }, { "docid": "c71ef8e0b52dafa41ba3f261cd5ca65d", "score": "0.45253295", "text": "func Y(m Matcher) Matcher {\n\treturn func(np *ast.Node, tx *Tx) bool {\n\t\tn := *np\n\t\tmatched := false\n\t\tswitch n := n.(type) {\n\t\tcase *ast.Expr:\n\t\t\tcopy := *n\n\t\t\tn = &copy\n\t\t\tmatched = m(&n.Y, tx)\n\t\tcase *ast.Paren:\n\t\t\tcopy := *n\n\t\t\tn = &copy\n\t\t\tmatched = m(&n.Y, tx)\n\t\tcase *ast.Set:\n\t\t\tcopy := *n\n\t\t\tn = &copy\n\t\t\tmatched = m(&n.Y, tx)\n\t\tcase *ast.Seq:\n\t\t\tcopy := *n\n\t\t\tn = &copy\n\t\t\tmatched = m(&n.Y, tx)\n\t\t}\n\t\tif matched {\n\t\t\t*np = n\n\t\t}\n\n\t\treturn matched\n\t}\n}", "title": "" }, { "docid": "c1cc1f39d2d50d12d0a47b47b98482f4", "score": "0.45230642", "text": "func createNode() *node {\n\treturn &node{\n\t\tendOfWord: false,\n\t}\n}", "title": "" }, { "docid": "7adc12f31457479111f27e52d55fe12e", "score": "0.4482144", "text": "func NewUpper(e sql.Expression) sql.Expression {\n\treturn &Upper{expression.UnaryExpression{Child: e}}\n}", "title": "" }, { "docid": "5754a3b68e9848bbc8020727de1dbfe6", "score": "0.44697404", "text": "func (e *Events) ToEdge(order *EventsOrder) *EventsEdge {\n\tif order == nil {\n\t\torder = DefaultEventsOrder\n\t}\n\treturn &EventsEdge{\n\t\tNode: e,\n\t\tCursor: order.Field.toCursor(e),\n\t}\n}", "title": "" }, { "docid": "403bbb59ff754ab4c6deb511eb3f6205", "score": "0.4462987", "text": "func (hc *HexCreate) SetY(i int) *HexCreate {\n\thc.mutation.SetY(i)\n\treturn hc\n}", "title": "" }, { "docid": "33372143e2a71023fcc4f322f61906e5", "score": "0.44597822", "text": "func NewNode(x, y float64) *Node {\n\tpos := vec.New(x, y)\n\tn := &Node{\n\t\tpos: pos,\n\t\taccl: vec.Zero(),\n\t}\n\treturn n\n}", "title": "" }, { "docid": "ce466f5cdee1d48cea0e5cc5738195ec", "score": "0.44544688", "text": "func newNode() *node {\n\treturn &node{}\n}", "title": "" }, { "docid": "4d5b5decdf0acec75830be5173ab76d7", "score": "0.44535214", "text": "func addReverseEdge(t *testing.T, attr string, src uint64,\n\tdst uint64) {\n\tedge := &taskp.DirectedEdge{\n\t\tValueId: dst,\n\t\tLabel: \"testing\",\n\t\tAttr: attr,\n\t\tEntity: src,\n\t\tOp: taskp.DirectedEdge_SET,\n\t}\n\taddReverseMutation(context.Background(), edge)\n}", "title": "" }, { "docid": "d47ff39085211ad1d758a4e793f9f4bc", "score": "0.44356692", "text": "func NewGeComparator(compiler *Compiler, x, y, r []*Wire) error {\n\treturn comparator(compiler, compiler.OneWire(), x, y, r)\n}", "title": "" }, { "docid": "810671352ff2dac2fe7c9e04869afec7", "score": "0.44229016", "text": "func NewYAGR(opts ...YAGROption) *yagr {\n\tyagr := yagr{\n\t\trouter: NewRouter(),\n\t}\n\n\tfor _, opt := range opts {\n\t\topt(&yagr)\n\t}\n\n\treturn &yagr\n}", "title": "" }, { "docid": "f673b958d42c31b84d9f7d4b6741161c", "score": "0.4422789", "text": "func (lvl *level) newEnergyLossAnimation() animation {\n\treturn &energyLossAnimation{hd: lvl.hd, ticks: 25}\n}", "title": "" }, { "docid": "63eed6038849f9ad6c4244de77d09928", "score": "0.44221622", "text": "func newLeaf() *node {\n\treturn &node{leaf: true}\n}", "title": "" }, { "docid": "299fb7fae5e50074664e76a1346c63d0", "score": "0.44208702", "text": "func (e *Euler) Y() float64 {\n\treturn e.p.Get(\"y\").Float()\n}", "title": "" }, { "docid": "e74f3a711de85b3d2fc3971537200dc8", "score": "0.44191667", "text": "func newKafkaOutNode(wants EdgeType, url string, topic string) *KafkaOutNode {\n\n\n\n\treturn &KafkaOutNode{\n\t\tchainnode: newBasicChainNode(\"kafkaOut\", wants, wants),\n\t\tTopic: topic,\n\t\tUrl: url,\n\t}\n}", "title": "" }, { "docid": "548b1e71915624726aa3c5a5cbacef2b", "score": "0.44172594", "text": "func NewNextMonikerEdge(id, outV, inV int) *NextMonikerEdge {\n\treturn &NextMonikerEdge{\n\t\tEdge: Edge{\n\t\t\tElement: Element{\n\t\t\t\tID: id,\n\t\t\t\tType: ElementEdge,\n\t\t\t},\n\t\t\tLabel: EdgeNextMoniker,\n\t\t},\n\t\tOutV: outV,\n\t\tInV: inV,\n\t}\n}", "title": "" }, { "docid": "75d2b3e366344218cfc216d0a4fefdc8", "score": "0.43992838", "text": "func (v *Vector2) SetY(y float64) {\n\tv.Y = y\n}", "title": "" }, { "docid": "1977b583c8f5ddd04879e51ab48a26d6", "score": "0.43971726", "text": "func New(ptype string, negatable bool, g PtypeGenerator) rql.ASTNode {\n\te := &expression{\n\t\tbase: base{},\n\t\tptype: ptype,\n\t\tg: g,\n\t\tnegatable: negatable,\n\t}\n\te.ValuePredicateBase = meta.NewValuePredicate(e)\n\treturn e\n}", "title": "" }, { "docid": "618d7b7f173aac44a64765ef00986980", "score": "0.43896735", "text": "func NewLast(e sql.Expression) *Last {\n\treturn &Last{expression.UnaryExpression{Child: e}}\n}", "title": "" }, { "docid": "1fd66856a8447873645dbfe4ba5fa05e", "score": "0.4386055", "text": "func NewE2ETest(e2eCfg *cfg.E2eCfg,\n\ttestcfg E2ETestCfg, naplesContainer AppEngine,\n\tsrcEp *cfg.Endpoint, dstEp *cfg.Endpoint) *E2ETest {\n\n\te2eTest := &E2ETest{\n\t\tE2eCfg: e2eCfg,\n\t\tTestCfg: testcfg,\n\t\tNaplesContainer: naplesContainer,\n\t}\n\n\tif srcEp != nil && dstEp != nil {\n\t\te2eTest.SrcEp = NewEndpoint(srcEp.EndpointMeta.Name,\n\t\t\t*srcEp, e2eCfg)\n\t\te2eTest.DstEp = NewEndpoint(dstEp.EndpointMeta.Name,\n\t\t\t*dstEp, e2eCfg)\n\t}\n\treturn e2eTest\n}", "title": "" }, { "docid": "9b9ae79414d124ae98bce0572bbb3d59", "score": "0.43789855", "text": "func (s *BaseVerilogParserListener) EnterEnd_edge_offset(ctx *End_edge_offsetContext) {}", "title": "" }, { "docid": "19375009d48c807fdd72c8b2f4b58318", "score": "0.4372814", "text": "func (c *BufferAttribute) Y(index int) {\n\tc.Call(\"getY\", index)\n}", "title": "" }, { "docid": "dda114998d9e433449fc10eca8236258", "score": "0.43603456", "text": "func NewStatsEdge(e Edge) StatsEdge {\n\tswitch e.Type() {\n\tcase pipeline.StreamEdge:\n\t\treturn &streamStatsEdge{\n\t\t\tstatsEdge: statsEdge{\n\t\t\t\tedge: e,\n\t\t\t\tgroupStats: make(map[models.GroupID]*GroupStats),\n\t\t\t\tcollected: new(expvar.Int),\n\t\t\t\temitted: new(expvar.Int),\n\t\t\t},\n\t\t}\n\tcase pipeline.BatchEdge:\n\t\treturn &batchStatsEdge{\n\t\t\tstatsEdge: statsEdge{\n\t\t\t\tedge: e,\n\t\t\t\tgroupStats: make(map[models.GroupID]*GroupStats),\n\t\t\t\tcollected: new(expvar.Int),\n\t\t\t\temitted: new(expvar.Int),\n\t\t\t},\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3de667690ffe729322b14267dba49dfd", "score": "0.4359721", "text": "func (suo *SettlementUpdateOne) SetY(i int) *SettlementUpdateOne {\n\tsuo.mutation.ResetY()\n\tsuo.mutation.SetY(i)\n\treturn suo\n}", "title": "" }, { "docid": "8756e04b91f6359d638ca0283c2367ed", "score": "0.43574744", "text": "func NewEulerOp() Node {\n\treturn &constantExp{name: \"e\", t: FLOAT, value: math.E}\n}", "title": "" }, { "docid": "0fb5d26efd736025c37813fac45317fd", "score": "0.4351496", "text": "func (fprpuo *FloorPlanReferencePointUpdateOne) SetY(i int) *FloorPlanReferencePointUpdateOne {\n\tfprpuo.mutation.ResetY()\n\tfprpuo.mutation.SetY(i)\n\treturn fprpuo\n}", "title": "" }, { "docid": "0a0585282ab5ebffb0a9d42f1451d17a", "score": "0.4342718", "text": "func NewEdgeNodePhase() workflow.Phase {\n\treturn workflow.Phase{\n\t\tName: \"Join edge-node to OpenYurt cluster. \",\n\t\tShort: \"Join edge-node\",\n\t\tRun: runJoinEdgeNode,\n\t}\n}", "title": "" }, { "docid": "36c346d3d693cbb213e498569b00b401", "score": "0.43389258", "text": "func Y(y float64) Coord3D {\n\treturn Coord3D{Y: y}\n}", "title": "" }, { "docid": "a0fb90360c3fb06d311105b0cfd4c99f", "score": "0.43374836", "text": "func (self *AttributeImpl) addOut(e *edgeImpl) {\n\tif !e.marked() {\n\t\tpanic(\"badly constructed edge, not marked (OUT)\")\n\t}\n\tself.edge = append(self.edge, e)\n}", "title": "" }, { "docid": "bdf8541f96fd030c96cd6e0410b3373c", "score": "0.43367147", "text": "func generateGraphAndDerives(newEq string) {\n\t// Initialise the transform matrix with the identity matrix\n\ttransformMatrix = identityMatrix\n\n\t// Add the X/Y axes object to the world space\n\tworldSpace = []Object{}\n\tworldSpace = append(worldSpace, importObject(axes, 0.0, 0.0, 0.0))\n\n\t// Create a graph object with the main data points on it\n\tvar graph Object\n\tvar p Point\n\terrOccurred := false\n\tgraphLabeled := false\n\tevalState := eq.NewEvalState()\n\texpr := eq.Interp(fmt.Sprintf(\"f[x_] := %s\", newEq), evalState)\n\tresult := expr.Eval(evalState)\n\tresult = evalState.ProcessTopLevelResult(expr, result)\n\tfor x := -2.1; x <= 2.1; x += 0.05 {\n\t\texpr = eq.Interp(fmt.Sprintf(\"x=%.2f\", x), evalState)\n\t\tresult := expr.Eval(evalState)\n\t\tresult = evalState.ProcessTopLevelResult(expr, result)\n\t\texpr = eq.Interp(\"f[x]\", evalState)\n\t\tresult = expr.Eval(evalState)\n\t\tresult = evalState.ProcessTopLevelResult(expr, result)\n\t\ty, err := strconv.ParseFloat(result.StringForm(eq.ActualStringFormArgsFull(\"InputForm\", evalState)), 64)\n\t\tif err != nil {\n\t\t\ty = -1 // Set this to -1 to visually indicate something went wrong\n\t\t\terrOccurred = true\n\t\t\tfmt.Printf(\"Error: %v\\n\", err)\n\t\t}\n\t\tp = Point{X: x, Y: y}\n\t\tif !graphLabeled {\n\t\t\tp.Label = fmt.Sprintf(\" Equation: y = %s \", mathFormat(newEq))\n\t\t\tp.LabelAlign = \"right\"\n\t\t\tgraphLabeled = true\n\t\t}\n\t\tgraph.P = append(graph.P, p)\n\t}\n\tif errOccurred {\n\t\tgraph.C = \"red\" // Draw the line in red if an error occurred with the calculation\n\t} else {\n\t\tgraph.C = \"blue\"\n\t}\n\tgraph.Name = \"Equation\"\n\tgraph.Eq = fmt.Sprintf(\"y = %s\", mathFormat(newEq))\n\tworldSpace = append(worldSpace, importObject(graph, 0.0, 0.0, 0.0))\n\n\t// Graph the derivatives of the equation\n\tderivNum := 1\n\tstraightLine := true\n\tfor derivNum == 1 || straightLine != true { // Make sure at least one derivative gets calculated\n\t\tstraightLine = true // The slope check further on will toggle this back off if the derivative isn't a straight line\n\n\t\t// Retrieve the human readable string for the derivative\n\t\ttmpStr := fmt.Sprintf(\"D[%s, x]\", newEq)\n\t\ttmpState := eq.NewEvalState()\n\t\ttmpExpr := eq.Interp(tmpStr, tmpState)\n\t\ttmpResult := tmpExpr.Eval(tmpState)\n\t\ttmpResult = tmpState.ProcessTopLevelResult(tmpExpr, tmpResult)\n\t\tderivStr = tmpResult.StringForm(eq.ActualStringFormArgsFull(\"OutputForm\", tmpState))\n\n\t\t// Variables used to determine if the derivative is a straight line\n\t\tgotSlope := false\n\t\tgotFirstPoint := false\n\t\tvar slope, slope2, slopeP1, slopeP2 float64\n\n\t\t// Create a graph object with the derivative points on it\n\t\terrOccurred = false\n\t\tgraphLabeled = false\n\t\tderivState := eq.NewEvalState()\n\t\tvar deriv Object\n\t\tvar derivExpr, derivResult eq.Ex\n\t\tfor x := -2.1; x <= 2.1; x += pointStep {\n\t\t\tderivEq := fmt.Sprintf(\"D[%s,x] /. x -> %.2f\", newEq, x)\n\t\t\tderivExpr = eq.Interp(derivEq, derivState)\n\t\t\tderivResult = derivExpr.Eval(derivState)\n\t\t\tderivResult = derivState.ProcessTopLevelResult(derivExpr, derivResult)\n\t\t\ttmp := derivResult.StringForm(eq.ActualStringFormArgsFull(\"OutputForm\", derivState))\n\t\t\tif debug {\n\t\t\t\tfmt.Printf(\"Val: %0.2f Derivative String: %v Result: %v\\n\", x, derivStr, tmp)\n\t\t\t}\n\t\t\ty, err := strconv.ParseFloat(tmp, 64)\n\t\t\tif err != nil {\n\t\t\t\ty = -1 // Set this to -1 to visually indicate something went wrong\n\t\t\t\terrOccurred = true\n\t\t\t\tfmt.Printf(\"Error: %v\\n\", err)\n\t\t\t}\n\n\t\t\t// Determine if the derivative is a straight line\n\t\t\tif !gotSlope {\n\t\t\t\tif !gotFirstPoint {\n\t\t\t\t\tslopeP1 = math.Round(y*10000) / 10000 // Round off, but keep a few decimal places of precision\n\t\t\t\t\tgotFirstPoint = true\n\t\t\t\t} else {\n\t\t\t\t\tslopeP2 = math.Round(y*10000) / 10000\n\t\t\t\t\triseOverRun := (slopeP2 - slopeP1) / pointStep\n\t\t\t\t\tslope = math.Round(riseOverRun*10000) / 10000\n\t\t\t\t\tif debug {\n\t\t\t\t\t\tfmt.Printf(\"Slope: (%v - %v) / %v = %v\\n\", slopeP2, slopeP1, pointStep, slope)\n\t\t\t\t\t}\n\t\t\t\t\tslopeP1 = slopeP2\n\t\t\t\t\tgotSlope = true\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tslopeP2 = math.Round(y*10000) / 10000\n\t\t\t\triseOverRun := (slopeP2 - slopeP1) / pointStep\n\t\t\t\tslope2 = math.Round(riseOverRun*10000) / 10000\n\t\t\t\tif debug {\n\t\t\t\t\tfmt.Printf(\"Slope2: (%v - %v) / %v = %v\", slopeP2, slopeP1, pointStep, slope2)\n\t\t\t\t}\n\t\t\t\tslopeP1 = slopeP2\n\t\t\t\tif slope != slope2 {\n\t\t\t\t\tstraightLine = false\n\t\t\t\t}\n\t\t\t\tif debug {\n\t\t\t\t\tfmt.Printf(\" Straight line: %v\\n\", straightLine)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tp = Point{X: x, Y: y}\n\t\t\tif !graphLabeled {\n\t\t\t\tp.Label = fmt.Sprintf(\" %s order derivative: y = %s \", strDeriv(derivNum), mathFormat(derivStr))\n\t\t\t\tp.LabelAlign = \"right\"\n\t\t\t\tgraphLabeled = true\n\t\t\t}\n\t\t\tderiv.P = append(deriv.P, p)\n\t\t}\n\t\tif errOccurred {\n\t\t\tderiv.C = \"red\" // Draw the line in red if an error occurred with the calculation\n\t\t} else {\n\t\t\tderiv.C = colDeriv(derivNum)\n\t\t}\n\t\tderiv.Name = fmt.Sprintf(\"%s order derivative\", strDeriv(derivNum))\n\t\tderiv.Eq = fmt.Sprintf(\"y = %s\", mathFormat(derivStr))\n\t\tworldSpace = append(worldSpace, importObject(deriv, 0.0, 0.0, 0.0))\n\t\tnewEq = derivStr\n\t\tderivNum++\n\t}\n}", "title": "" }, { "docid": "77fc0e15640d7401c763f87fa4efa5e0", "score": "0.43355605", "text": "func NewAea(system *core.System, desc *core.OperationDescription) (core.IConvertLPToXY, error) {\n\top := &Aea{\n\t\tisLambert: false,\n\t}\n\top.System = system\n\n\terr := op.aeaSetup(system)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn op, nil\n}", "title": "" }, { "docid": "445d045e41a5f28944239d0ec57f88cd", "score": "0.43355367", "text": "func (su *SettlementUpdate) SetY(i int) *SettlementUpdate {\n\tsu.mutation.ResetY()\n\tsu.mutation.SetY(i)\n\treturn su\n}", "title": "" }, { "docid": "192a8fe609e49e8ce1fab49da6f62ed0", "score": "0.43319985", "text": "func (_this *DOMRect) SetY(value float64) {\n\tinput := value\n\t_this.Value_JS.Set(\"y\", input)\n}", "title": "" }, { "docid": "b60aa1c4049f0365251ffd6119c242c2", "score": "0.43300024", "text": "func NewEvent(etype, eselector string, efx EventHandler) *Event {\n\tex := Event{\n\t\tMeta: &guevents.EventMetable{EventType: etype, EventTarget: eselector},\n\t}\n\n\t// wireup the function to get the ev and tree.\n\tex.Fx = func(ev guevents.Event) {\n\t\tif efx != nil {\n\t\t\tefx(ev, ex.tree)\n\t\t}\n\t}\n\n\treturn &ex\n}", "title": "" }, { "docid": "d7fb2eb4012155845f62500bd9c54d97", "score": "0.43253025", "text": "func (dp *DeathPlace) ToEdge(order *DeathPlaceOrder) *DeathPlaceEdge {\n\tif order == nil {\n\t\torder = DefaultDeathPlaceOrder\n\t}\n\treturn &DeathPlaceEdge{\n\t\tNode: dp,\n\t\tCursor: order.Field.toCursor(dp),\n\t}\n}", "title": "" }, { "docid": "15495881187652845575c9448bd2d582", "score": "0.43229842", "text": "func (g *RemTopology) ObtainEdge(source, target uint32) *Edge {\n\tedge := g.GetEdgeFromID(source, target)\n\n\tg.WGr.Wait()\n\n\tif edge == nil {\n\t\ttryUUID := rand.Int63()\n\t\tfor g.GetEdgeFromUUID(tryUUID) != nil {\n\t\t\ttryUUID = rand.Int63()\n\t\t}\n\n\t\tedge = &Edge{\n\t\t\tSource: g.ObtainNode(source),\n\t\t\tTarget: g.ObtainNode(target),\n\t\t\tlastUpdated: time.Now().Unix(),\n\t\t\tUUID: tryUUID,\n\t\t\tisOld: false,\n\t\t}\n\n\t\t// edge.lastUpdated = time.Now().Unix()\n\t\t// edge.Source.lastUpdated = edge.lastUpdated\n\t\t// edge.Target.lastUpdated = edge.lastUpdated\n\n\t\tg.WGr.Add(1)\n\t\tg.Edges = append(g.Edges, edge)\n\t\tg.WGr.Done()\n\t} else {\n\t\tedge.lastUpdated = time.Now().Unix()\n\t\tedge.isOld = false\n\t\tedge.Source.lastUpdated = edge.lastUpdated\n\t\tedge.Source.isOld = false\n\t\tedge.Target.lastUpdated = edge.lastUpdated\n\t\tedge.Target.isOld = false\n\t}\n\n\treturn edge\n}", "title": "" }, { "docid": "9157d6b34ae94fb42aba465f93b64a70", "score": "0.43226987", "text": "func NewPopeye(f Factory) *Popeye {\n\ta := Popeye{}\n\ta.Init(f, client.NewGVR(\"popeye\"))\n\n\treturn &a\n}", "title": "" }, { "docid": "cd03b0fc753f595122919d8722302413", "score": "0.43127924", "text": "func (fprpu *FloorPlanReferencePointUpdate) SetY(i int) *FloorPlanReferencePointUpdate {\n\tfprpu.mutation.ResetY()\n\tfprpu.mutation.SetY(i)\n\treturn fprpu\n}", "title": "" }, { "docid": "92103a49074fa4ae5ec1cf1328d63862", "score": "0.43062824", "text": "func (ec *Constructor) NewEnemy(secid, idx int64) (*BasicEnemy, error) {\n\tif ec.Dimensions == (floatgeom.Point2{}) {\n\t\treturn nil, errors.New(\"Dimensions must be provided\")\n\t}\n\tfor _, s := range requiredAnimations {\n\t\tif _, ok := ec.AnimationMap[s]; !ok {\n\t\t\treturn nil, errors.New(\"Animation name \" + s + \" must be provided\")\n\t\t}\n\t}\n\tbe := &BasicEnemy{}\n\tbe.pushBack = physics.NewVector(0, 0)\n\tnewMp := map[string]render.Modifiable{}\n\tfor animKey, anim := range ec.AnimationMap {\n\t\tnewMp[animKey] = anim.Copy()\n\t}\n\tbe.swtch = render.NewSwitch(\"standLT\", newMp)\n\tif ec.SpaceOffset != (physics.Vector{}) {\n\n\t\tfor animKey := range ec.AnimationMap {\n\t\t\tbe.swtch.SetOffsets(animKey, ec.SpaceOffset)\n\t\t}\n\t}\n\tbe.Interactive = entities.NewInteractive(\n\t\tec.Position.X(),\n\t\tec.Position.Y(),\n\t\tec.Dimensions.X(),\n\t\tec.Dimensions.Y(),\n\t\tbe.swtch,\n\t\tnil,\n\t\tbe.Init(),\n\t\t0,\n\t)\n\t// be.swtch.SetOffsets(\"walkLT\", )\n\tbe.Health = ec.Health\n\tbe.Speed = physics.NewVector(ec.Speed.X(), ec.Speed.Y())\n\tbe.baseSpeed = be.Speed.Copy()\n\tbe.facing = \"LT\"\n\tbe.RSpace.Label = labels.Enemy\n\tbe.CheckedBind(func(be *BasicEnemy, _ interface{}) int {\n\t\tbe.facing = \"RT\"\n\t\tbe.Speed = be.Speed.Scale(-1)\n\t\treturn 0\n\t}, \"RunBack\")\n\tbe.CheckedBind(func(be *BasicEnemy, _ interface{}) int {\n\t\t// Enemies should only do anything if they are on screen\n\t\t// Todo: other things could effect delta temporarily\n\n\t\tpush := be.pushBack.Copy()\n\n\t\tif be.facing == \"RT\" {\n\t\t\tpush.Scale(-1)\n\t\t}\n\t\tbe.Delta = be.Speed.Copy().Add(push)\n\t\tbe.pushBack.Scale(0.95)\n\t\tif be.X() <= float64(oak.ScreenWidth+oak.ViewPos.X) &&\n\t\t\tbe.X()+be.W >= float64(oak.ViewPos.X) {\n\t\t\t//be.RSpace.Label = labels.Enemy\n\t\t\tbe.ShiftPos(be.Delta.X(), be.Delta.Y())\n\t\t\t// Default behavior is to flip when hitting the ceiling\n\t\t\tif be.Y() < float64(oak.ScreenHeight)*1/3 ||\n\t\t\t\tbe.Y() > (float64(oak.ScreenHeight)-be.H) {\n\t\t\t\tbe.Speed.SetY(be.Speed.Y() * -1)\n\t\t\t\t// Adjust so we don't exist in the wall for a frame\n\t\t\t\tbe.ShiftPos(0, be.Speed.Y())\n\t\t\t}\n\t\t}\n\t\tif be.Delta.X() != 0 || be.Delta.Y() != 0 {\n\t\t\tbe.swtch.Set(\"walk\" + be.facing)\n\t\t} else {\n\t\t\tbe.swtch.Set(\"stand\" + be.facing)\n\t\t}\n\t\t<-be.RSpace.CallOnHits()\n\t\treturn 0\n\t}, \"EnterFrame\")\n\n\tbe.GetReactiveSpace().Add(labels.EffectsEnemy, func(s, bf *collision.Space) {\n\t\tbe, ok := s.CID.E().(*BasicEnemy)\n\t\tif !ok {\n\t\t\tdlog.Error(\"Non-enemy affected??\")\n\t\t\tfmt.Printf(\"%T\\n\", s.CID.E())\n\t\t\treturn\n\t\t}\n\n\t\tfmt.Println(\"Consider moving this effect to trigger vie the attacked event\", be)\n\n\t\tbe.DeathEffect(secid, idx)\n\t})\n\tbe.CheckedBind(func(be *BasicEnemy, data interface{}) int {\n\n\t\teffectMap, ok := data.(map[string]float64)\n\t\tif !ok {\n\t\t\tdlog.Warn(\"Data sent on attack was not in the right format\")\n\t\t\treturn 0\n\t\t}\n\n\t\tfor k, v := range effectMap {\n\t\t\tswitch k {\n\t\t\tcase \"pushback\":\n\t\t\t\tbe.PushBack(physics.NewVector(v, 0))\n\t\t\tcase \"damage\":\n\t\t\t\tbe.Health -= int(v)\n\t\t\t\tif be.Health < 1 {\n\t\t\t\t\tevent.Trigger(\"EnemyDeath\", []int64{secid, idx})\n\t\t\t\t\tbe.Destroy()\n\t\t\t\t}\n\t\t\tcase \"frost\":\n\t\t\t\tendDebuff := time.Now().Add(time.Second * 3)\n\t\t\t\tbe.CheckedBind(func(be *BasicEnemy, data interface{}) int {\n\t\t\t\t\tif !time.Now().After(endDebuff) {\n\t\t\t\t\t\treturn 0\n\t\t\t\t\t}\n\t\t\t\t\tbe.Speed = be.Speed.Scale(v)\n\t\t\t\t\treturn event.UnbindSingle\n\t\t\t\t}, \"EnterFrame\")\n\t\t\t\tbe.Speed = be.Speed.Scale(1 / v)\n\t\t\t\tdlog.Verb(\"BE speed is now\", be.Speed)\n\t\t\t}\n\t\t}\n\n\t\treturn 0\n\t}, \"Attacked\")\n\t// be.RSpace.Add(labels.PlayerAttack, func(s, _ *collision.Space) {\n\t// \tbe, ok := s.CID.E().(*BasicEnemy)\n\t// \tif !ok {\n\t// \t\tdlog.Error(\"On hit for basic enemy called on non-basic enemy\")\n\t// \t\treturn\n\t// \t}\n\t// \t// TODO: track changes?\n\t// \tevent.Trigger(\"EnemyDeath\", []int64{secid, idx})\n\t// \tbe.Destroy()\n\t// })\n\tfor ev, b := range ec.Bindings {\n\t\tbe.CheckedBind(b, ev)\n\t}\n\treturn be, nil\n}", "title": "" }, { "docid": "69ac6a471da5ae3e819a1750198a9385", "score": "0.43061602", "text": "func NewExp(lambda float64) (x *Exp){\n x = &Exp{Continuous{},lambda}\n // Discrete.discreter self reference X\n // It allows to implement more general variables\n x.Continuous = Continuous{x}\n return\n}", "title": "" }, { "docid": "2dedb854545509cafdcb35ee82cb7063", "score": "0.43029463", "text": "func newEvent() *event {\n\treturn &event{\n\t\tpriority: -1,\n\t\tpid: -1,\n\t\tmonth: -1,\n\t\tday: -1,\n\t\thour: -1,\n\t\tminute: -1,\n\t\tsecond: -1,\n\t\tyear: time.Now().Year(),\n\t\tsequence: -1,\n\t}\n}", "title": "" }, { "docid": "60d0e3ee60ffb992cc3c0efac26e58dd", "score": "0.42913923", "text": "func NewGoE(uri, token string, cache time.Duration) (api.Charger, error) {\n\tc := &GoE{}\n\n\tlog := util.NewLogger(\"go-e\")\n\n\tif token != \"\" {\n\t\tc.api = goe.NewCloud(log, token, cache)\n\t} else {\n\t\tc.api = goe.NewLocal(log, uri)\n\t}\n\n\tif c.api.IsV2() {\n\t\tvar phases func(int) error\n\t\tif sponsor.IsAuthorized() {\n\t\t\tphases = c.phases1p3p\n\t\t} else {\n\t\t\tlog.WARN.Println(\"automatic 1p3p phase switching requires sponsor token\")\n\t\t}\n\n\t\treturn decorateGoE(c, c.totalEnergy, phases), nil\n\t}\n\n\treturn c, nil\n}", "title": "" }, { "docid": "bb771bf38f586a1c17d531cf57da79f3", "score": "0.42906636", "text": "func NewEye(engine string, config Config, logger *onelog.Logger) (Eye, error) {\n\tswitch engine {\n\tcase \"show\", \"stream\", \"capture\", \"mplayer\":\n\t\treturn NewShowEye(logger)\n\tcase \"opencv\", \"gocv\", \"face\":\n\t\treturn NewOpenCVEye(logger, config)\n\tdefault:\n\t\treturn nil, errors.New(\"unknown engine: \" + engine)\n\t}\n}", "title": "" }, { "docid": "48f76517edcc63ba705b01d14c7dd2de", "score": "0.42867675", "text": "func (s *Sex) ToEdge(order *SexOrder) *SexEdge {\n\tif order == nil {\n\t\torder = DefaultSexOrder\n\t}\n\treturn &SexEdge{\n\t\tNode: s,\n\t\tCursor: order.Field.toCursor(s),\n\t}\n}", "title": "" } ]
36e1288b3ad982a79565692f2eb5e82f
IsPodMetricsNotSupported returns true if error is pod metrics not supported.
[ { "docid": "654b2c57a805955190fbac37c82b83db", "score": "0.88852173", "text": "func IsPodMetricsNotSupported(err error) bool {\n\te, ok := err.(noPodMetricsSupport)\n\treturn ok && e.NoPodMetricsSupport()\n}", "title": "" } ]
[ { "docid": "609e159697b5e9d67ef977a27d99c7f2", "score": "0.57973313", "text": "func IsNotSupported(err error) bool {\n\terr = getInnerError(err)\n\t// If Platform doesn't recognize or support the request sent, below errors are seen\n\treturn err == ErrVmcomputeInvalidJSON ||\n\t\terr == ErrInvalidData ||\n\t\terr == ErrNotSupported ||\n\t\terr == ErrVmcomputeUnknownMessage\n}", "title": "" }, { "docid": "b8933c78ca457363129d69abe76063cc", "score": "0.5771693", "text": "func (vbvm *vsphereBlockVolumeMapper) SupportsMetrics() bool {\n\treturn true\n}", "title": "" }, { "docid": "a19d4ca5db5855d626a8e8346cc68e7f", "score": "0.5647686", "text": "func (c *client) IsMetricsEnabled(proxy *models.Proxy) (bool, error) {\n\tpod, err := c.getPodForProxy(proxy)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tval, ok := pod.Annotations[constants.PrometheusScrapeAnnotation]\n\tif !ok {\n\t\treturn false, nil\n\t}\n\n\treturn strconv.ParseBool(val)\n}", "title": "" }, { "docid": "eb82e2fd1790c58446249b7ff3df84d4", "score": "0.54351705", "text": "func (m *metricsContainer) anyMetricsEnabled() bool {\n\treturn m.runtimeMetricsEnabled() || m.proxyMetricsEnabled()\n}", "title": "" }, { "docid": "c65139eb7836dad1315d37a1ada41739", "score": "0.52991223", "text": "func isUnsupportedUnsafeEtcd(spec *operatorv1.StaticPodOperatorSpec) (bool, error) {\n\tunsupportedConfig := map[string]interface{}{}\n\tif spec.UnsupportedConfigOverrides.Raw == nil {\n\t\treturn false, nil\n\t}\n\n\tconfigJson, err := kyaml.ToJSON(spec.UnsupportedConfigOverrides.Raw)\n\tif err != nil {\n\t\tklog.Warning(err)\n\t\t// maybe it's just json\n\t\tconfigJson = spec.UnsupportedConfigOverrides.Raw\n\t}\n\n\tif err := json.NewDecoder(bytes.NewBuffer(configJson)).Decode(&unsupportedConfig); err != nil {\n\t\tklog.V(4).Infof(\"decode of unsupported config failed with error: %v\", err)\n\t\treturn false, err\n\t}\n\n\t// 1. this violates operational best practices for etcd - unstable\n\t// 2. this allows non-HA configurations which we cannot support in\n\t// production - unsafe and non-HA\n\t// 3. this allows a situation where we can get stuck unable to re-achieve\n\t// quorum, resulting in cluster-death - unsafe, non-HA, non-production,\n\t// unstable\n\t// 4. the combination of all these things makes the situation\n\t// unsupportable.\n\tvalue, found, err := unstructured.NestedFieldNoCopy(unsupportedConfig, \"useUnsupportedUnsafeNonHANonProductionUnstableEtcd\")\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tif !found {\n\t\treturn false, nil\n\t}\n\tswitch value.(type) {\n\tcase bool:\n\t\treturn value.(bool), nil\n\tcase string:\n\t\treturn strconv.ParseBool(value.(string))\n\tdefault:\n\t\treturn false, nil\n\t}\n}", "title": "" }, { "docid": "f51640a7153c70e104bc998d6a06f8a8", "score": "0.52355665", "text": "func IsErrConditionNotSupported(err error) bool {\n\tif err == nil {\n\t\treturn false\n\t}\n\t_, ok := err.(*ErrConditionNotSupported)\n\treturn ok\n}", "title": "" }, { "docid": "9b2e20dea0cf8cec276efaa0c82a6db4", "score": "0.5198785", "text": "func IsNotSupported(err error) bool {\n\t// check if direct hit that err implements the behaviour.\n\tif isNotSupported(err) {\n\t\treturn true\n\t}\n\t// unwrap until we get the root cause which might also implement the\n\t// behaviour.\n\treturn isNotSupported(Cause(err))\n}", "title": "" }, { "docid": "720dd69c520a1cda2a29ee3f27b0896d", "score": "0.5177435", "text": "func HasPodSpec(gr unversioned.GroupResource) (unversioned.GroupKind, bool) {\n\tgk, ok := resourcesToCheck[gr]\n\treturn gk, ok\n}", "title": "" }, { "docid": "808808df9a733b2f3a5493dfb9373e2c", "score": "0.5135531", "text": "func (o *SloCreate) HasMetricNumerator() bool {\n\tif o != nil && o.MetricNumerator != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "8961f47a76180498f323edd3fad6ab88", "score": "0.51244116", "text": "func IsMonitoringPermissionError(err error) (bool, string) {\n\tif gerr, ok := err.(*googleapi.Error); ok {\n\t\tif gerr.Code == 403 {\n\t\t\treturn true, \"Waiting for project to be ready for metrics scope\"\n\t\t}\n\t}\n\treturn false, \"\"\n}", "title": "" }, { "docid": "daf71d47d33e1d34f20502e96be09c19", "score": "0.50939685", "text": "func (s *SparkApplication) PrometheusMonitoringEnabled() bool {\n\treturn s.Spec.Monitoring != nil && s.Spec.Monitoring.Prometheus != nil\n}", "title": "" }, { "docid": "1972c92f670012c098e910b77c7ed0c2", "score": "0.5043821", "text": "func IsStatusUnsupportedMediaType(err error) bool {\n\treturn IsHttpStatus(err, http.StatusUnsupportedMediaType)\n}", "title": "" }, { "docid": "c63de26b84561f793c41e40b10185131", "score": "0.5041042", "text": "func (o *AzureMonitoredMetric) HasDimensions() bool {\n\tif o != nil && o.Dimensions != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "8b5270a2787217b7888289dd8c23ee97", "score": "0.5037989", "text": "func testPrometheusMetricsOnPods(t *testing.T, data *TestData, component string, metrics []string) {\n\ttoken := getMonitoringAuthToken(t, data)\n\n\tlistOptions := metav1.ListOptions{\n\t\tLabelSelector: \"app=antrea,component=\" + component,\n\t}\n\tpods, err := data.clientset.CoreV1().Pods(antreaNamespace).List(context.TODO(), listOptions)\n\tif err != nil {\n\t\tt.Fatalf(\"Error fetching agent Pods: %v\", err)\n\t}\n\n\tvar hostIP = \"\"\n\tvar hostPort int32\n\tvar address = \"\"\n\tvar parser expfmt.TextParser\n\n\t// Find Pods' API endpoints, check for metrics existence on each of them\n\tfor _, pod := range pods.Items {\n\t\thostIP = pod.Status.HostIP\n\t\tmetricsFound := true\n\n\t\tfor _, container := range pod.Spec.Containers {\n\t\t\tfor _, port := range container.Ports {\n\t\t\t\thostPort = port.HostPort\n\t\t\t\taddress := net.JoinHostPort(hostIP, fmt.Sprint(hostPort))\n\t\t\t\tt.Logf(\"Found %s\", address)\n\t\t\t\trespBody := getMetricsFromAPIServer(t, fmt.Sprintf(\"https://%s/metrics\", address), token)\n\n\t\t\t\tparsed, err := parser.TextToMetricFamilies(strings.NewReader(respBody))\n\t\t\t\tif err != nil {\n\t\t\t\t\tt.Fatalf(\"Parsing Prometheus metrics failed with: %v\", err)\n\t\t\t\t}\n\n\t\t\t\t// Create a map of all the metrics which were found on the server\n\t\t\t\ttestMap := make(map[string]bool)\n\t\t\t\tfor _, mf := range parsed {\n\t\t\t\t\ttestMap[mf.GetName()] = true\n\t\t\t\t}\n\n\t\t\t\t// Validate that all the required metrics exist in the server's output\n\t\t\t\tfor _, metric := range metrics {\n\t\t\t\t\tif !testMap[metric] {\n\t\t\t\t\t\tmetricsFound = false\n\t\t\t\t\t\tt.Errorf(\"Metric %s not found on %s\", metric, address)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif !metricsFound {\n\t\t\tt.Fatalf(\"Some metrics do not exist in pods on %s\", address)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "99a85d9a7bd39100881a02b88a0361f4", "score": "0.50320536", "text": "func (r *RecordReport) HasMetrics() bool {\n\treturn r.hasMetrics\n}", "title": "" }, { "docid": "e0c623c85e6240ebd89e4f17280664f1", "score": "0.503119", "text": "func (p *BackendPlugin) supportsDiagnostics() bool {\n\treturn p.diagnostics != nil\n}", "title": "" }, { "docid": "9815cfb98fc6741bea27fc4eafd47f0a", "score": "0.50127274", "text": "func (o *BrowserSyntheticMonitorUpdate) GetKeyPerformanceMetricsOk() (*KeyPerformanceMetrics, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.KeyPerformanceMetrics, true\n}", "title": "" }, { "docid": "d61e1170ca0d23d9672ee514c02ef6b5", "score": "0.5010905", "text": "func (s *ServiceV1alpha1) ListPodMetricsDemo(ctx context.Context, in *ApiMetrics.ListPodMetricsRequest) (*ApiMetrics.ListPodMetricsResponse, error) {\n\tscope.Debug(\"Request received from ListPodMetricsDemo grpc function: \" + AlamedaUtils.InterfaceToString(in))\n\n\tdemoPodMetricList := make([]*ApiMetrics.PodMetric, 0)\n\tendTime := in.GetQueryCondition().GetTimeRange().GetEndTime().GetSeconds()\n\n\tif endTime == 0 {\n\t\treturn &ApiMetrics.ListPodMetricsResponse{\n\t\t\tStatus: &status.Status{\n\t\t\t\tCode: int32(code.Code_INVALID_ARGUMENT),\n\t\t\t},\n\t\t\tPodMetrics: demoPodMetricList,\n\t\t}, errors.Errorf(\"Invalid EndTime\")\n\t}\n\n\tif endTime%3600 != 0 {\n\t\tendTime = endTime - (endTime % 3600) + 3600\n\t}\n\n\t//step := int(in.GetQueryCondition().GetTimeRange().GetStep().GetSeconds())\n\tstep := 3600\n\tif step == 0 {\n\t\tstep = 3600\n\t}\n\n\ttempObjectMeta := K8sMetadata.ObjectMeta{\n\t\tNamespace: in.ObjectMeta[0].Namespace,\n\t\tName: in.ObjectMeta[0].Name,\n\t}\n\n\tdemoContainerMetricList := make([]*ApiMetrics.ContainerMetric, 0)\n\tdemoContainerMetric := ApiMetrics.ContainerMetric{\n\t\tName: in.ObjectMeta[0].Name,\n\t\tMetricData: make([]*ApiCommon.MetricData, 0),\n\t}\n\tdemoContainerMetricList = append(demoContainerMetricList, &demoContainerMetric)\n\n\tdemoMetricDataCPU := ApiCommon.MetricData{\n\t\tMetricType: ApiCommon.MetricType_CPU_MILLICORES_USAGE,\n\t\tData: make([]*ApiCommon.Sample, 0),\n\t}\n\n\tdemoMetricDataMem := ApiCommon.MetricData{\n\t\tMetricType: ApiCommon.MetricType_MEMORY_BYTES_USAGE,\n\t\tData: make([]*ApiCommon.Sample, 0),\n\t}\n\n\tdemoDataMapCPU, _ := DatahubUtils.ReadCSV(\"metric_cpu.csv\")\n\tdemoDataMapMem, _ := DatahubUtils.ReadCSV(\"metric_memory.csv\")\n\n\tdemoKey := in.ObjectMeta[0].Namespace + \"_\" + in.ObjectMeta[0].Name\n\n\tstartTime := endTime - int64(step*len(demoDataMapCPU[demoKey]))\n\tfor index, value := range demoDataMapCPU[demoKey] {\n\t\tsecond := startTime + int64(index*step)\n\t\tdemoMetricDataCPU.Data = append(demoMetricDataCPU.Data, &ApiCommon.Sample{\n\t\t\tTime: &timestamp.Timestamp{Seconds: int64(second)},\n\t\t\tNumValue: value,\n\t\t})\n\t}\n\n\tfor index, value := range demoDataMapMem[demoKey] {\n\t\tsecond := startTime + int64(index*step)\n\t\tdemoMetricDataMem.Data = append(demoMetricDataMem.Data, &ApiCommon.Sample{\n\t\t\tTime: &timestamp.Timestamp{Seconds: int64(second)},\n\t\t\tNumValue: value,\n\t\t})\n\t}\n\n\tdemoContainerMetric.MetricData = append(demoContainerMetric.MetricData, &demoMetricDataCPU)\n\tdemoContainerMetric.MetricData = append(demoContainerMetric.MetricData, &demoMetricDataMem)\n\n\tdemoPodMetric := ApiMetrics.PodMetric{\n\t\tObjectMeta: resources.NewObjectMeta(&tempObjectMeta),\n\t\tContainerMetrics: demoContainerMetricList,\n\t}\n\tdemoPodMetricList = append(demoPodMetricList, &demoPodMetric)\n\n\treturn &ApiMetrics.ListPodMetricsResponse{\n\t\tStatus: &status.Status{\n\t\t\tCode: int32(code.Code_OK),\n\t\t},\n\t\tPodMetrics: demoPodMetricList,\n\t}, nil\n}", "title": "" }, { "docid": "23a3c1052faebab88da04da176efc73e", "score": "0.50074667", "text": "func IsQuotaExceeded(err error) bool {\n\tif k8serrors.IsForbidden(err) {\n\t\tif strings.Contains(err.Error(), \"exceeded quota\") {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "e95636d699b1b8549332e83af59d267f", "score": "0.49953726", "text": "func IsPodExceedingNodeResources(pod *corev1.Pod) bool {\n\tfor _, podStatus := range pod.Status.Conditions {\n\t\tif podStatus.Reason == corev1.PodReasonUnschedulable && strings.Contains(podStatus.Message, \"Insufficient\") {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "e95636d699b1b8549332e83af59d267f", "score": "0.49953726", "text": "func IsPodExceedingNodeResources(pod *corev1.Pod) bool {\n\tfor _, podStatus := range pod.Status.Conditions {\n\t\tif podStatus.Reason == corev1.PodReasonUnschedulable && strings.Contains(podStatus.Message, \"Insufficient\") {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "dc8a448029e94a848e7960a6efffd92f", "score": "0.49872604", "text": "func (o *AzureMonitoredMetric) GetDimensionsOk() (*[]string, bool) {\n\tif o == nil || o.Dimensions == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Dimensions, true\n}", "title": "" }, { "docid": "6894fe2984884cce21210e1fe0d7865b", "score": "0.49772912", "text": "func (c *PolicyCollector) HasPolicySpecificMetrics() bool {\n\tif c.policy.DescribeMetrics() == nil {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "b5d0557021b48de10faa7157304a8f4b", "score": "0.49772882", "text": "func TestPrometheusMetrics(t *testing.T) {\n\tapi.RequestTimeHistogram.WithLabelValues(\"x\")\n\tmetrics.RequestTimeHistogramUsec.WithLabelValues(\"x\", \"y\", \"z\")\n\tmetrics.ErrorTotal.WithLabelValues(\"x\")\n\tmetrics.RejectionCount.WithLabelValues(\"x\")\n\t// TODO(https://github.com/m-lab/annotation-service/issues/266)\n\t// Some metrics no longer pass the linter.\n\t//promtest.LintMetrics(t)\n\tpromtest.LintMetrics(nil)\n}", "title": "" }, { "docid": "9b9795a2d296f6f994ff38154e353dce", "score": "0.49622092", "text": "func (o *SLOHistoryMetricsSeriesMetadata) HasMetric() bool {\n\tif o != nil && o.Metric != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "494ad4578ca670442e61919c399a4de8", "score": "0.49365702", "text": "func (o *StorageNetAppNode) HasAvgPerformanceMetrics() bool {\n\tif o != nil && o.AvgPerformanceMetrics != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "95ce361715c7518a0472383b14ff46de", "score": "0.49303177", "text": "func (i *Info) IsUnsupported() bool {\n\treturn !i.IsSupported()\n}", "title": "" }, { "docid": "421d1babdb7ca547d1f747feb558045b", "score": "0.49078745", "text": "func HasLiveness(spec core.PodSpec) bool {\n\tif len(spec.Containers) <= 0 {\n\t\treturn false\n\t}\n\n\tfor _, container := range spec.Containers {\n\t\tprobe := container.LivenessProbe\n\t\tif probe == nil {\n\t\t\treturn false\n\t\t}\n\n\t\th := probe.Handler\n\t\tif !hasDefinedHandler(h) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "f130305a43314897f8de627ac78e9266", "score": "0.4868686", "text": "func IsPodRestarting(pod *v1.Pod) (bool, datadoghqv1alpha1.ExtendedDaemonSetStatusReason) {\n\tvar maxRestartCount int\n\tvar reason datadoghqv1alpha1.ExtendedDaemonSetStatusReason\n\tfor _, s := range pod.Status.ContainerStatuses {\n\t\tif maxRestartCount < int(s.RestartCount) {\n\t\t\tmaxRestartCount = int(s.RestartCount)\n\t\t\tif s.LastTerminationState != (v1.ContainerState{}) && *s.LastTerminationState.Terminated != (v1.ContainerStateTerminated{}) {\n\t\t\t\tif s.LastTerminationState.Terminated.Reason == string(datadoghqv1alpha1.ExtendedDaemonSetStatusReasonCLB) {\n\t\t\t\t\treason = datadoghqv1alpha1.ExtendedDaemonSetStatusReasonCLB\n\t\t\t\t} else if s.LastTerminationState.Terminated.Reason == string(datadoghqv1alpha1.ExtendedDaemonSetStatusReasonOOM) {\n\t\t\t\t\treason = datadoghqv1alpha1.ExtendedDaemonSetStatusReasonOOM\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treason = datadoghqv1alpha1.ExtendedDaemonSetStatusReasonUnknown\n\t\t\t}\n\t\t}\n\t}\n\tif maxRestartCount > 0 {\n\t\treturn true, reason\n\t}\n\treturn false, \"\"\n}", "title": "" }, { "docid": "184a7ec7349b6cf5221f50f996a2e1c4", "score": "0.48635465", "text": "func (o *Series) GetMetricOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Metric, true\n}", "title": "" }, { "docid": "cbab00b163287962bb0cca4f5cf4c789", "score": "0.48581955", "text": "func checkPrometheusMetrics() {\n\tf := frisby.Create(\"Check the Prometheus metrics API endpoint\").Get(apiURL + \"metrics\")\n\tf.Send()\n\tf.ExpectStatus(200)\n\t// the content type header set by metrics handler is a bit complicated\n\t// but it must start with \"text/plain\" in any case\n\tf.Expect(func(F *frisby.Frisby) (bool, string) {\n\t\theader := F.Resp.Header.Get(contentTypeHeader)\n\t\tif strings.HasPrefix(header, \"text/plain\") {\n\t\t\treturn true, server.OkStatusPayload\n\t\t}\n\t\treturn false, fmt.Sprintf(\"Expected Header %q to be %q, but got %q\", contentTypeHeader, \"text/plain\", header)\n\t})\n\tf.PrintReport()\n}", "title": "" }, { "docid": "3b873bff0fe3f547763c39ca4aa32032", "score": "0.4845061", "text": "func (o *V0037JobResponseProperties) HasMemoryPerNode() bool {\n\tif o != nil && o.MemoryPerNode != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "a1e011ffc03926e42bcf90ce5da3bcac", "score": "0.4818093", "text": "func PayloadSizeErr(err error) bool {\n\tif err == nil {\n\t\treturn false\n\t}\n\tif err == ErrPayloadSize {\n\t\treturn true\n\t}\n\n\tif er, ok := err.(*net.OpError); ok {\n\t\tif er.Err == ErrPayloadSize {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "4ffccfe4218530679945b5bf916879b8", "score": "0.4813329", "text": "func IsErrSlackPayload(err error) bool {\n\t_, ok := err.(ErrSlackPayload)\n\treturn ok\n}", "title": "" }, { "docid": "2b7008c6704f7d37146746d3ecb3f818", "score": "0.48115093", "text": "func (s *Series) HasMetric() bool {\n\tif s != nil && s.Metric != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ef80010815f8064bfc784a3957d03d5c", "score": "0.47976628", "text": "func (tt *MPIJobTrainer) IsSupported(name, ns string) bool {\n\tif !tt.enabled {\n\t\treturn false\n\t}\n\n\tmpiJobs, err := tt.getMpiJobs(ns, metav1.ListOptions{\n\t\tLabelSelector: fmt.Sprintf(\"release=%s\", name),\n\t})\n\n\tif err == nil && len(mpiJobs) > 0 {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "9fa1bad5ba40bb96d123d379bae66823", "score": "0.47963586", "text": "func IsRateLimit(\n\terr error,\n) bool {\n\treturn containsErrorMessage(err, errRateLimit)\n}", "title": "" }, { "docid": "b3c51f0197605fef6b729c405f192478", "score": "0.47647864", "text": "func IsFormatSupported(inputParameters *StreamParameters, outputParameters *StreamParameters, sampleRate float64) Error {\n\tcinputParameters, _ := inputParameters.PassRef()\n\tcoutputParameters, _ := outputParameters.PassRef()\n\tcsampleRate, _ := (C.double)(sampleRate), cgoAllocsUnknown\n\t__ret := C.Pa_IsFormatSupported(cinputParameters, coutputParameters, csampleRate)\n\t__v := (Error)(__ret)\n\treturn __v\n}", "title": "" }, { "docid": "07149d1e8bd0a4bedc4a3c23f1717faa", "score": "0.47644788", "text": "func IsMetric(ev core.Event) bool {\n\treturn ev.Label(0).Key() == keys.Metric\n}", "title": "" }, { "docid": "ed164dff71316c0afa55249a6a7ab4bc", "score": "0.47528622", "text": "func (r *ResourceMetricsProvider) GetPodMetrics(pods ...*metav1.PartialObjectMetadata) ([]metrics.PodMetrics, error) {\n\tvar queryData []metrics.PodMetrics\n\tif len(pods) == 0 {\n\t\treturn queryData, nil\n\t}\n\tvar err error\n\t// In the previous step, we construct the annotations, so it couldn't be nil\n\tif _, ok := pods[0].Annotations[labelSelectorAnnotationInternal]; ok {\n\t\tnamespace := pods[0].Annotations[namespaceSpecifiedAnnotation]\n\t\tselectorStr := pods[0].Annotations[labelSelectorAnnotationInternal]\n\t\tqueryData, err = r.queryPodMetricsBySelector(selectorStr, namespace)\n\t} else {\n\t\tqueryData, err = r.queryPodMetricsByName(pods[0].Name, pods[0].Namespace)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn queryData, nil\n}", "title": "" }, { "docid": "1bf8b634273aaec0d76491fbf64f122a", "score": "0.47505835", "text": "func (o *V0037JobResponseProperties) HasMemoryPerCpu() bool {\n\tif o != nil && o.MemoryPerCpu != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "5c2acd5d27a9856af69e4ffe42aa9d98", "score": "0.47370434", "text": "func (o *StorageNetAppNode) GetAvgPerformanceMetricsOk() (*StorageNetAppPerformanceMetricsAverage, bool) {\n\tif o == nil || o.AvgPerformanceMetrics == nil {\n\t\treturn nil, false\n\t}\n\treturn o.AvgPerformanceMetrics, true\n}", "title": "" }, { "docid": "9b58d7fb363e8f92f68d0c49ad5dff50", "score": "0.47296435", "text": "func (o *SLOHistoryMetricsSeriesMetadata) GetMetricOk() (*string, bool) {\n\tif o == nil || o.Metric == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Metric, true\n}", "title": "" }, { "docid": "ffcb128923671bf209e90d9fdb9fa632", "score": "0.46965158", "text": "func (s *Series) GetMetricOk() (string, bool) {\n\tif s == nil || s.Metric == nil {\n\t\treturn \"\", false\n\t}\n\treturn *s.Metric, true\n}", "title": "" }, { "docid": "763445853ae1dbb18993232da098b759", "score": "0.46926883", "text": "func (n *Namespace) HasMaxDevices() bool {\n\treturn n.MaxDevices > 0\n}", "title": "" }, { "docid": "5aa44d380cdd5b11dca1611ab14ddea1", "score": "0.4692223", "text": "func (o *SloCreate) HasMetricDenominator() bool {\n\tif o != nil && o.MetricDenominator != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "343a257f41b70fcca627da3055fec738", "score": "0.46767822", "text": "func (o *InlineResponse2002Items) GetMetricsOk() (*InlineResponse2002Metrics, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Metrics, true\n}", "title": "" }, { "docid": "f6d333e337c2812f9879d8096fcc38af", "score": "0.46737748", "text": "func (t *TileDefRequest) HasMetric() bool {\n\tif t != nil && t.Metric != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ce17e43db3a82f20b55ae3f72e57a21f", "score": "0.4669556", "text": "func (o *NvmeCollectionPerformanceMetricsGetOK) IsSuccess() bool {\n\treturn true\n}", "title": "" }, { "docid": "653654a3774a7f2551fa30ddf44e4b43", "score": "0.4650222", "text": "func (n *Namespace) IsPod() bool {\n\treturn n.NSMode == FromPod\n}", "title": "" }, { "docid": "8c422045462c63f18e4d7b865517212a", "score": "0.46494704", "text": "func (o *SloCreate) GetMetricNumeratorOk() (*string, bool) {\n\tif o == nil || o.MetricNumerator == nil {\n\t\treturn nil, false\n\t}\n\treturn o.MetricNumerator, true\n}", "title": "" }, { "docid": "7c57d44bee96c6054036a665014cdd01", "score": "0.46347824", "text": "func (m *MetricsSpecification) getPodMetrics(c echo.Context) error {\n\n\tprometheusOp := c.Param(\"op\")\n\t// podId := c.Param(\"podId\")\n\tcnsiList := strings.Split(c.Request().Header.Get(\"x-cap-cnsi-list\"), \",\")\n\n\t// get the user\n\tuserGUID, err := m.portalProxy.GetSessionStringValue(c, \"user_id\")\n\tif err != nil {\n\t\treturn errors.New(\"Could not find session user_id\")\n\t}\n\n\t// For each CNSI, find the metrics endpoint that we need to talk to\n\tmetrics, err2 := m.getMetricsEndpoints(userGUID, cnsiList)\n\tif err2 != nil {\n\t\tlog.Error(\"Error getting metrics\", err2)\n\n\t\treturn errors.New(\"Can not get metric endpoint metadata\")\n\t}\n\n\t// Construct the metadata for proxying\n\trequests := makePrometheusRequestInfos(c, userGUID, metrics, prometheusOp, \"\", false)\n\tresponses, err := m.portalProxy.DoProxyRequest(requests)\n\treturn m.portalProxy.SendProxiedResponse(c, responses)\n}", "title": "" }, { "docid": "b804f65d62c790ef5aa91bc511f5b225", "score": "0.4629705", "text": "func (s *SparkApplication) ExposeDriverMetrics() bool {\n\treturn s.Spec.Monitoring != nil && s.Spec.Monitoring.ExposeDriverMetrics\n}", "title": "" }, { "docid": "8ec4032f3c6f763dc665c9e0f54954cb", "score": "0.4623565", "text": "func (u *MediaMetadata) IsMediaMetadata() {}", "title": "" }, { "docid": "f5f751ae6a40cf1be48d5e3a9596ef4e", "score": "0.46230164", "text": "func (inst *Instance) resourceHasMetrics(auth autorest.Authorizer, resourceID string) (bool, error) {\n\tmetricsNamespaceClient := insights.NewMetricNamespacesClient(inst.cfg.Azure.SubscriptionID)\n\tmetricsNamespaceClient.Authorizer = auth\n\tif err := metricsNamespaceClient.AddToUserAgent(inst.cfg.Azure.UserAgent); err != nil {\n\t\tinst.logger.Warn().Err(err).Msg(\"adding user agent to client\")\n\t}\n\tts := time.Now().Add(-5 * time.Hour)\n\t// ref: https://godoc.org/github.com/Azure/azure-sdk-for-go/services/preview/monitor/mgmt/2018-09-01/insights#MetricNamespacesClient.List\n\tresult, err := metricsNamespaceClient.List(inst.ctx, resourceID, ts.Format(time.RFC3339))\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\treturn len(*result.Value) > 0, nil\n}", "title": "" }, { "docid": "ce8f60f961b2e690af69ab27baa3aee9", "score": "0.46121502", "text": "func HasLimits(spec core.PodSpec) bool {\n\tif len(spec.Containers) <= 0 {\n\t\treturn false\n\t}\n\n\tfor _, container := range spec.Containers {\n\t\tr := container.Resources.Limits\n\t\tif !hasAllLimits(r) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "31e70078e24aad3af25ffac4145a89a5", "score": "0.46083242", "text": "func HasNotImplemented(err error) bool {\n\treturn errors.HasType(err, (*notImplementedError)(nil))\n}", "title": "" }, { "docid": "e488c294a2d1a597240fc56aa33b10d0", "score": "0.46082345", "text": "func (m *MetricMetadata) HasPerUnit() bool {\n\tif m != nil && m.PerUnit != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "5f05e1cffe8875f97c1d5cae9b35c268", "score": "0.46012008", "text": "func isMeshedPod(pod corev1.Pod) bool {\n\t// osm-controller adds a unique label to each pod that belongs to a mesh\n\t_, proxyLabelSet := pod.Labels[constants.EnvoyUniqueIDLabelName]\n\treturn proxyLabelSet\n}", "title": "" }, { "docid": "33c335d27b77bbdd7c434c64f7fc24b0", "score": "0.45929307", "text": "func (o *SloCreate) HasMetricRate() bool {\n\tif o != nil && o.MetricRate != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "228d6bd47c9664db849ddb60f4fc8e39", "score": "0.4587876", "text": "func metricsSkipper(c echo.Context) bool {\n\tif strings.HasPrefix(c.Path(), \"/metrics\") {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "6a2d52ed18a5ebdef6a9c36d3fb34296", "score": "0.45799404", "text": "func (o *V0037JobResponseProperties) GetMemoryPerNodeOk() (*int32, bool) {\n\tif o == nil || o.MemoryPerNode == nil {\n\t\treturn nil, false\n\t}\n\treturn o.MemoryPerNode, true\n}", "title": "" }, { "docid": "1532c602e472cf5b9629111293ae8184", "score": "0.45797926", "text": "func (m *Metric) HasMetric() bool {\n\tif m != nil && m.Metric != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "26f4f1e2f023b0f794c6b483d3335638", "score": "0.45653662", "text": "func ValidMetric(sample UDPMetric) bool {\n\tret := true\n\tret = ret && sample.Name != \"\"\n\tret = ret && sample.Value != nil\n\treturn ret\n}", "title": "" }, { "docid": "b6aadf66c0ed0b6f4ac82e8995c58b9f", "score": "0.45402348", "text": "func (m *MetricFieldSpec) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateDataType(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6530a6ca978760b914b2caa47adecbc7", "score": "0.45361358", "text": "func (o *V0037JobResponseProperties) HasMemoryPerTres() bool {\n\tif o != nil && o.MemoryPerTres != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "0aa98f202e02a4ae11315db4ba702cac", "score": "0.45302948", "text": "func HasMinikube() bool {\n\twhichOut, err := Where(\"minikube\")\n\n\tif len(whichOut) == 0 && err != nil {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "fc86515a66c0b69c38b917e2c6b1ee7e", "score": "0.4527041", "text": "func (p *Proxy) HasPodMetadata() bool {\n\treturn p.PodMetadata != nil\n}", "title": "" }, { "docid": "ad722c1de986a6261f222936bceed6c8", "score": "0.4526975", "text": "func Is429QuotaError(err error) (bool, string) {\n\tif gerr, ok := err.(*googleapi.Error); ok {\n\t\tif gerr.Code == 429 {\n\t\t\treturn true, \"429s are not retryable for this resource\"\n\t\t}\n\t}\n\treturn false, \"\"\n}", "title": "" }, { "docid": "f61724f064f174d7f8898d83f493056a", "score": "0.4524854", "text": "func metricEnabled() bool {\n\treturn enabled\n}", "title": "" }, { "docid": "4d66d73be2baf377a2a5d1d687d28820", "score": "0.45225647", "text": "func apiMetricEnabled() bool {\n\treturn apiEnabled\n}", "title": "" }, { "docid": "2d784f65a530a59cc9deb517b91ba82a", "score": "0.45212787", "text": "func (p *CoreProvider) GetPodMetrics(pods ...apitypes.NamespacedName) ([]api.TimeInfo, [][]metrics.ContainerMetrics, error) {\n\ttimeInfo := make([]api.TimeInfo, len(pods))\n\tcoreMetrics := make([][]metrics.ContainerMetrics, len(pods))\n\tresourceNames := make([]string, len(pods))\n\tfor i, pod := range pods {\n\t\tresourceNames[i] = fmt.Sprintf(\"%q\", pod.Name)\n\t}\n\n\tcpuMetrics, cpuTimeInfo, err := p.client.getContainerCPU(resourceNames)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tramMetrics, _, err := p.client.getContainerRAM(resourceNames)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tfor i := range pods {\n\t\tpodKey := pods[i].Namespace + \":\" + pods[i].Name\n\n\t\tcpuContainers, ok := cpuMetrics[podKey]\n\t\tif !ok {\n\t\t\tklog.V(4).Infof(\"Metric cpu not found for pod '%s'\", podKey)\n\t\t\tcontinue\n\t\t}\n\t\tramContainers, ok := ramMetrics[podKey]\n\t\tif !ok {\n\t\t\tklog.V(4).Infof(\"Metric ram not found for pod '%s'\", podKey)\n\t\t\tcontinue\n\t\t}\n\n\t\tcoreMetrics[i] = make([]metrics.ContainerMetrics, 0)\n\t\tfor container, cpu := range cpuContainers {\n\t\t\tram, ok := ramContainers[container]\n\t\t\tif !ok { // cpu and ram should be present in the container\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tcoreMetrics[i] = append(coreMetrics[i], metrics.ContainerMetrics{Name: container, Usage: corev1.ResourceList{\n\t\t\t\tcorev1.ResourceCPU: cpu,\n\t\t\t\tcorev1.ResourceMemory: ram,\n\t\t\t}})\n\n\t\t\ttimeInfo[i], ok = cpuTimeInfo[podKey] // TODO(holubowicz): query about the same time segment in cpu and ram (now it can be slightly different)\n\t\t\tif !ok {\n\t\t\t\treturn nil, nil, apierr.NewInternalError(fmt.Errorf(\"TimeInfo should be set for every pod with metrics\"))\n\t\t\t}\n\t\t}\n\t\tif len(coreMetrics[i]) == 0 {\n\t\t\tcoreMetrics[i] = nil\n\t\t}\n\t}\n\n\treturn timeInfo, coreMetrics, nil\n}", "title": "" }, { "docid": "cc8d3870e0c6a2a8736090da7309699d", "score": "0.4513", "text": "func shouldDeletePod(health agent.Health) (bool, error) {\n\tstatus, ok := health.ProcessPlans[getHostname()]\n\tif !ok {\n\t\treturn false, fmt.Errorf(\"hostname %s was not in the process plans\", getHostname())\n\t}\n\treturn isWaitingToBeDeleted(status), nil\n}", "title": "" }, { "docid": "6455037b6cec2611d275de2d11fbcd0b", "score": "0.4512644", "text": "func TestPubSubStackDriverMetrics(t *testing.T) {\n\tt.Skip(\"See issues https://github.com/google/knative-gcp/issues/317 and https://github.com/cloudevents/sdk-go/pull/234\")\n\tcancel := logstream.Start(t)\n\tdefer cancel()\n\tPubSubWithTargetTestImpl(t, true /*assertMetrics */)\n}", "title": "" }, { "docid": "7421a895a8c2d8b22fac95233878033d", "score": "0.45064282", "text": "func (o *StorageNetAppStorageVmAllOf) HasAvgPerformanceMetrics() bool {\n\tif o != nil && o.AvgPerformanceMetrics != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "d1122b51ad92e8f7d56a633881d08851", "score": "0.45035264", "text": "func (o *CityMetrics) HasOtherMetrics() bool {\n\tif o != nil && o.OtherMetrics != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "be3dd0bcee5218c7e229d0d3c9da73a0", "score": "0.4503417", "text": "func (e *Endpoint) AreQueryOptionsSupported(queryOptions *QueryOptions) (bool, []error) {\n\tif queryOptions == nil {\n\t\treturn true, nil\n\t}\n\n\tvar errorList []error\n\tif queryOptions.QueryTop != nil {\n\t\tif !e.SupportsQueryOptionType(queryOptions.QueryTop.GetQueryOptionType()) {\n\t\t\terrorList = append(errorList, errors.New(CreateQueryError(QueryTopInvalid, e.Name)))\n\t\t}\n\t}\n\n\tif queryOptions.QuerySkip != nil {\n\t\tif !e.SupportsQueryOptionType(queryOptions.QuerySkip.GetQueryOptionType()) {\n\t\t\terrorList = append(errorList, errors.New(CreateQueryError(QuerySkipNotAvailable, queryOptions.QuerySkip.GetQueryOptionType().String(), e.Name)))\n\t\t}\n\t}\n\n\tif queryOptions.QuerySelect != nil {\n\t\tif !e.SupportsQueryOptionType(queryOptions.QuerySelect.GetQueryOptionType()) {\n\t\t\t//ToDo: Create error message\n\t\t}\n\t}\n\n\tif queryOptions.QueryExpand != nil {\n\t\tif !e.SupportsQueryOptionType(queryOptions.QueryExpand.GetQueryOptionType()) {\n\t\t\t//ToDo: Create error message\n\t\t}\n\t}\n\n\tif queryOptions.QueryOrderBy != nil {\n\t\tif !e.SupportsQueryOptionType(queryOptions.QueryOrderBy.GetQueryOptionType()) {\n\t\t\t//ToDo: Create error message\n\t\t}\n\t}\n\n\tif queryOptions.QueryCount != nil {\n\t\tif !e.SupportsQueryOptionType(queryOptions.QueryCount.GetQueryOptionType()) {\n\t\t\t//ToDo: Create error message\n\t\t}\n\t}\n\n\tif queryOptions.QueryFilter != nil {\n\t\tif !e.SupportsQueryOptionType(queryOptions.QueryFilter.GetQueryOptionType()) {\n\t\t\t//ToDo: Create error message\n\t\t}\n\t}\n\n\tif queryOptions.QueryResultFormat != nil {\n\t\tif !e.SupportsQueryOptionType(queryOptions.QueryResultFormat.GetQueryOptionType()) {\n\t\t\t//ToDo: Create error message\n\t\t}\n\t}\n\n\tif errorList != nil {\n\t\treturn false, errorList\n\t}\n\n\treturn true, nil\n}", "title": "" }, { "docid": "1f328672f379a9d41d8b6b1c4c62c2a9", "score": "0.45029742", "text": "func (appMgr *Manager) checkValidPod(\n\tobj interface{}, operation string,\n) (bool, []*serviceQueueKey) {\n\tpod := obj.(*v1.Pod)\n\t//skip if pod belongs to coreService\n\tif appMgr.checkCoreserviceLabels(pod.Labels) {\n\t\treturn false, nil\n\t}\n\tnamespace := pod.ObjectMeta.Namespace\n\tpodkey := namespace + \"/\" + pod.Name\n\t_, ok := appMgr.getNamespaceInformer(namespace)\n\tif !ok {\n\t\t// Not watching this namespace\n\t\treturn false, nil\n\t}\n\t//delete annotations from nplstore\n\tif operation == OprTypeDelete {\n\t\tappMgr.nplStoreMutex.Lock()\n\t\tdelete(appMgr.nplStore, podkey)\n\t\tappMgr.nplStoreMutex.Unlock()\n\t} else {\n\t\tann := pod.GetAnnotations()\n\t\tvar annotations []NPLAnnotation\n\t\tif val, ok := ann[NPLPodAnnotation]; ok {\n\t\t\tif err := json.Unmarshal([]byte(val), &annotations); err != nil {\n\t\t\t\tlog.Errorf(\"key: %s, got error while unmarshaling NPL annotations: %v\", err)\n\t\t\t}\n\t\t\tappMgr.nplStoreMutex.Lock()\n\t\t\tappMgr.nplStore[podkey] = annotations\n\t\t\tappMgr.nplStoreMutex.Unlock()\n\t\t} else {\n\t\t\tlog.Debugf(\"key: %s, NPL annotation not found for Pod\", pod.Name)\n\t\t\tappMgr.nplStoreMutex.Lock()\n\t\t\tdelete(appMgr.nplStore, podkey)\n\t\t\tappMgr.nplStoreMutex.Unlock()\n\t\t}\n\t}\n\tsvcs := appMgr.GetServicesForPod(pod)\n\tvar keyList []*serviceQueueKey\n\tfor _, svc := range svcs {\n\t\tkey := &serviceQueueKey{\n\t\t\tServiceName: svc.ObjectMeta.Name,\n\t\t\tNamespace: namespace,\n\t\t\tResourceKind: Pod,\n\t\t\tResourceName: pod.Name,\n\t\t}\n\t\tkeyList = append(keyList, key)\n\t}\n\treturn true, keyList\n}", "title": "" }, { "docid": "4a67ad1a6a9d95d5ca1c426dea95fd22", "score": "0.4497903", "text": "func (o *CityMetrics) HasMetrics() bool {\n\tif o != nil && o.Metrics != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "54da0bf25f31c49dfcdd583fa8db9a61", "score": "0.44977194", "text": "func (opts *Message) IsSupported(ext string) (has bool) {\n\t_, has = opts.Extensions[ext]\n\treturn\n}", "title": "" }, { "docid": "728ee951dbd30576f8e45c3288a827c2", "score": "0.44953045", "text": "func (g *GPS) Metrics() bool {\n\treturn g.Dd >= 0\n}", "title": "" }, { "docid": "287a31915843125b1a786f27609bf23d", "score": "0.44822004", "text": "func is403QuotaExceededPerMinuteError(err error) (bool, string) {\n\tgerr, ok := err.(*googleapi.Error)\n\tif !ok {\n\t\treturn false, \"\"\n\t}\n\tvar QuotaRegex = regexp.MustCompile(`Quota exceeded for quota metric '(?P<Metric>.*)' and limit '(?P<Limit>.* per minute)' of service`)\n\tif gerr.Code == 403 && QuotaRegex.MatchString(gerr.Body) {\n\t\tmatches := QuotaRegex.FindStringSubmatch(gerr.Body)\n\t\tmetric := matches[QuotaRegex.SubexpIndex(\"Metric\")]\n\t\tlimit := matches[QuotaRegex.SubexpIndex(\"Limit\")]\n\t\tlog.Printf(\"[DEBUG] Dismissed an error as retryable based on error code 403 and error message 'Quota exceeded for quota metric `%s`: %s\", metric, err)\n\t\treturn true, fmt.Sprintf(\"Waiting for quota limit %s to refresh\", limit)\n\t}\n\treturn false, \"\"\n}", "title": "" }, { "docid": "82da5ceb919f68152ae73c9caedd4c8e", "score": "0.44651985", "text": "func (p Probe) IsSupported(op uint8) bool {\n\tfor i := uint8(0); i < p.OpsLen; i++ {\n\t\tif p.Ops[i].Op != op {\n\t\t\tcontinue\n\t\t}\n\t\treturn p.Ops[i].Flags&IO_URING_OP_SUPPORTED > 0\n\t}\n\treturn false\n}", "title": "" }, { "docid": "945e97d27a093a452bdfe875d89d1d2c", "score": "0.4460974", "text": "func IsCustomErr(err error) bool {\n\tswitch err.(type) {\n\tdefault:\n\t\treturn false\n\tcase *CustomError:\n\t\treturn true\n\t}\n}", "title": "" }, { "docid": "ce9a5973e01ef0ed7e0170e80be15de2", "score": "0.44562665", "text": "func isBadEndpointErr(err error) bool {\n\treturn false\n}", "title": "" }, { "docid": "ccb483e0b19eb38ea45bd895f61d9c70", "score": "0.44414318", "text": "func (c *sageMakerClientWrapper) isDescribeEndpointConfig404Error(err error) bool {\n\tif requestFailure, isRequestFailure := err.(awserr.RequestFailure); isRequestFailure {\n\t\treturn requestFailure.Code() == DescribeEndpointConfig404Code && strings.HasPrefix(requestFailure.Message(), DescribeEndpointConfig404MessagePrefix)\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ccb483e0b19eb38ea45bd895f61d9c70", "score": "0.44414318", "text": "func (c *sageMakerClientWrapper) isDescribeEndpointConfig404Error(err error) bool {\n\tif requestFailure, isRequestFailure := err.(awserr.RequestFailure); isRequestFailure {\n\t\treturn requestFailure.Code() == DescribeEndpointConfig404Code && strings.HasPrefix(requestFailure.Message(), DescribeEndpointConfig404MessagePrefix)\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ae7b1276a6b1c338e6e703ec4147fcfd", "score": "0.44391596", "text": "func (api API) CustomMetricExists(m string) (bool, error) {\n\tm = \"custom:\" + m\n\tmetrics, err := api.GetCustomMetrics()\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tfor _, v := range metrics {\n\t\t//log.Println(\"Checking\", v, \"against\", m)\n\t\tif v == m {\n\t\t\treturn true, nil\n\t\t}\n\t}\n\treturn false, nil\n}", "title": "" }, { "docid": "4b2186a70941eeb523046f700c5ee276", "score": "0.44338208", "text": "func (c *sageMakerClientWrapper) isDescribeHyperParameterTuningJob404Error(err error) bool {\n\tif requestFailure, isRequestFailure := err.(awserr.RequestFailure); isRequestFailure {\n\t\treturn requestFailure.Code() == DescribeHyperParameterTuningJob404Code && strings.HasPrefix(requestFailure.Message(), DescribeHyperParameterTuningJob404MessagePrefix)\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "6f492e6329c706a30c7c1a12f40519af", "score": "0.44294727", "text": "func (r Rule) HasValidatePodSecurity() bool {\n\treturn r.Validation.PodSecurity != nil && !datautils.DeepEqual(r.Validation.PodSecurity, &kyvernov1.PodSecurity{})\n}", "title": "" }, { "docid": "3e996cdbcaaf24feedf74bea00b99999", "score": "0.44266796", "text": "func IsDeadlineExceeded(err error) bool {\n\treturn internal.HasPlatformErrorCode(err, internal.DeadlineExceeded)\n}", "title": "" }, { "docid": "6f34cb96646938d3610815761e94c555", "score": "0.44214725", "text": "func (o *ComputePhysicalSummaryAllOf) HasOperability() bool {\n\tif o != nil && o.Operability != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "3b81ea678d32494eaf5e2088586ecc40", "score": "0.4413037", "text": "func ValidatePodSpec(spec *core.PodSpec, podMeta *metav1.ObjectMeta, fldPath *field.Path, opts PodValidationOptions) field.ErrorList {\n\tallErrs := field.ErrorList{}\n\n\tvols, vErrs := ValidateVolumes(spec.Volumes, podMeta, fldPath.Child(\"volumes\"), opts)\n\tallErrs = append(allErrs, vErrs...)\n\tpodClaimNames := gatherPodResourceClaimNames(spec.ResourceClaims)\n\tallErrs = append(allErrs, validatePodResourceClaims(podMeta, spec.ResourceClaims, fldPath.Child(\"resourceClaims\"))...)\n\tallErrs = append(allErrs, validateContainers(spec.Containers, vols, podClaimNames, fldPath.Child(\"containers\"), opts)...)\n\tallErrs = append(allErrs, validateInitContainers(spec.InitContainers, spec.Containers, vols, podClaimNames, fldPath.Child(\"initContainers\"), opts)...)\n\tallErrs = append(allErrs, validateEphemeralContainers(spec.EphemeralContainers, spec.Containers, spec.InitContainers, vols, podClaimNames, fldPath.Child(\"ephemeralContainers\"), opts)...)\n\tallErrs = append(allErrs, validatePodHostNetworkDeps(spec, fldPath, opts)...)\n\tallErrs = append(allErrs, validateRestartPolicy(&spec.RestartPolicy, fldPath.Child(\"restartPolicy\"))...)\n\tallErrs = append(allErrs, validateDNSPolicy(&spec.DNSPolicy, fldPath.Child(\"dnsPolicy\"))...)\n\tallErrs = append(allErrs, unversionedvalidation.ValidateLabels(spec.NodeSelector, fldPath.Child(\"nodeSelector\"))...)\n\tallErrs = append(allErrs, validatePodSpecSecurityContext(spec.SecurityContext, spec, fldPath, fldPath.Child(\"securityContext\"), opts)...)\n\tallErrs = append(allErrs, validateImagePullSecrets(spec.ImagePullSecrets, fldPath.Child(\"imagePullSecrets\"))...)\n\tallErrs = append(allErrs, validateAffinity(spec.Affinity, opts, fldPath.Child(\"affinity\"))...)\n\tallErrs = append(allErrs, validatePodDNSConfig(spec.DNSConfig, &spec.DNSPolicy, fldPath.Child(\"dnsConfig\"), opts)...)\n\tallErrs = append(allErrs, validateReadinessGates(spec.ReadinessGates, fldPath.Child(\"readinessGates\"))...)\n\tallErrs = append(allErrs, validateSchedulingGates(spec.SchedulingGates, fldPath.Child(\"schedulingGates\"))...)\n\tallErrs = append(allErrs, validateTopologySpreadConstraints(spec.TopologySpreadConstraints, fldPath.Child(\"topologySpreadConstraints\"), opts)...)\n\tallErrs = append(allErrs, validateWindowsHostProcessPod(spec, fldPath)...)\n\tallErrs = append(allErrs, validateHostUsers(spec, fldPath)...)\n\tif len(spec.ServiceAccountName) > 0 {\n\t\tfor _, msg := range ValidateServiceAccountName(spec.ServiceAccountName, false) {\n\t\t\tallErrs = append(allErrs, field.Invalid(fldPath.Child(\"serviceAccountName\"), spec.ServiceAccountName, msg))\n\t\t}\n\t}\n\n\tif len(spec.NodeName) > 0 {\n\t\tfor _, msg := range ValidateNodeName(spec.NodeName, false) {\n\t\t\tallErrs = append(allErrs, field.Invalid(fldPath.Child(\"nodeName\"), spec.NodeName, msg))\n\t\t}\n\t}\n\n\tif spec.ActiveDeadlineSeconds != nil {\n\t\tvalue := *spec.ActiveDeadlineSeconds\n\t\tif value < 1 || value > math.MaxInt32 {\n\t\t\tallErrs = append(allErrs, field.Invalid(fldPath.Child(\"activeDeadlineSeconds\"), value, validation.InclusiveRangeError(1, math.MaxInt32)))\n\t\t}\n\t}\n\n\tif len(spec.Hostname) > 0 {\n\t\tallErrs = append(allErrs, ValidateDNS1123Label(spec.Hostname, fldPath.Child(\"hostname\"))...)\n\t}\n\n\tif len(spec.Subdomain) > 0 {\n\t\tallErrs = append(allErrs, ValidateDNS1123Label(spec.Subdomain, fldPath.Child(\"subdomain\"))...)\n\t}\n\n\tif len(spec.Tolerations) > 0 {\n\t\tallErrs = append(allErrs, ValidateTolerations(spec.Tolerations, fldPath.Child(\"tolerations\"))...)\n\t}\n\n\tif len(spec.HostAliases) > 0 {\n\t\tallErrs = append(allErrs, ValidateHostAliases(spec.HostAliases, fldPath.Child(\"hostAliases\"))...)\n\t}\n\n\tif len(spec.PriorityClassName) > 0 {\n\t\tfor _, msg := range ValidatePriorityClassName(spec.PriorityClassName, false) {\n\t\t\tallErrs = append(allErrs, field.Invalid(fldPath.Child(\"priorityClassName\"), spec.PriorityClassName, msg))\n\t\t}\n\t}\n\n\tif spec.RuntimeClassName != nil {\n\t\tallErrs = append(allErrs, ValidateRuntimeClassName(*spec.RuntimeClassName, fldPath.Child(\"runtimeClassName\"))...)\n\t}\n\n\tif spec.PreemptionPolicy != nil {\n\t\tallErrs = append(allErrs, ValidatePreemptionPolicy(spec.PreemptionPolicy, fldPath.Child(\"preemptionPolicy\"))...)\n\t}\n\n\tif spec.Overhead != nil {\n\t\tallErrs = append(allErrs, validateOverhead(spec.Overhead, fldPath.Child(\"overhead\"), opts)...)\n\t}\n\n\tif spec.OS != nil {\n\t\tosErrs := validateOS(spec, fldPath.Child(\"os\"), opts)\n\t\tswitch {\n\t\tcase len(osErrs) > 0:\n\t\t\tallErrs = append(allErrs, osErrs...)\n\t\tcase spec.OS.Name == core.Linux:\n\t\t\tallErrs = append(allErrs, validateLinux(spec, fldPath)...)\n\t\tcase spec.OS.Name == core.Windows:\n\t\t\tallErrs = append(allErrs, validateWindows(spec, fldPath)...)\n\t\t}\n\t}\n\treturn allErrs\n}", "title": "" }, { "docid": "b16aab928eb1dfc866d079aae51c262e", "score": "0.44091833", "text": "func isPodOrSimilar(gk unversioned.GroupKind) bool {\n\treturn gk == gkPod ||\n\t\tgk == gkJob ||\n\t\tgk == gkDeployment ||\n\t\tgk == gkDaemonSet ||\n\t\tgk == gkStatefulSet\n}", "title": "" }, { "docid": "2664a4457af0f0df327f7ea50f9b7d0a", "score": "0.44087183", "text": "func IsThrottled(err error) bool {\n\tswitch {\n\tcase err == nil:\n\t\treturn false\n\tcase strings.Contains(err.Error(), \"Throttling: Rate exceeded\"):\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}", "title": "" }, { "docid": "c4536e14e88cd8cae9701b5dd9cf7e7a", "score": "0.44031143", "text": "func (p partitions) IsSupported(partition string) bool {\n\tfor _, pt := range p {\n\t\tif pt.name == partition {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "57e30678e31c5e5dfc5880fd569ae3f2", "score": "0.4393953", "text": "func (r *TrainingJobReconciler) isSageMaker429Response(awsError awserr.RequestFailure) bool {\n\treturn (awsError.Code() == \"ThrottlingException\") && (awsError.Message() == \"Rate exceeded\")\n}", "title": "" }, { "docid": "3af433f2e6a22344eb5a50aa2be027c1", "score": "0.43923932", "text": "func (o *V0037JobResponseProperties) HasSocketsPerBoard() bool {\n\tif o != nil && o.SocketsPerBoard != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "a87b75bba1ec3446f0ced3de7fd04cc5", "score": "0.43889663", "text": "func (c *sageMakerClientWrapper) isDescribeEndpoint404Error(err error) bool {\n\tif requestFailure, isRequestFailure := err.(awserr.RequestFailure); isRequestFailure {\n\t\treturn requestFailure.Code() == DescribeEndpoint404Code && strings.HasPrefix(requestFailure.Message(), DescribeEndpoint404MessagePrefix)\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "a168d4e2ab32f104c049d2ad9b87651c", "score": "0.43883535", "text": "func IsErrSlackPost(err error) bool {\n\t_, ok := err.(ErrSlackPost)\n\treturn ok\n}", "title": "" } ]
822358bd5192007a5d1db3c2ab87dce2
ValidityPeriodOf returns VP from deadend time and singleattempt flag
[ { "docid": "714bc284eddb3c64490aba9c890a53d2", "score": "0.6847047", "text": "func ValidityPeriodOf(t time.Duration, s bool) ValidityPeriod {\n\tif t%time.Second != 0 {\n\t\tt = t.Truncate(time.Second)\n\t}\n\n\tif s {\n\t\tvp := VPEnhanced{}\n\t\tvp[0] = 0x40\n\t\tif t == 0 {\n\t\t} else if t%week == 0 && t >= week*5 && t <= week*63 {\n\t\t\tvp[0] |= 0x01\n\t\t\tvp[1] = byte(t/week) + 192\n\t\t} else if t%day == 0 && t >= day*2 && t <= day*30 {\n\t\t\tvp[0] |= 0x01\n\t\t\tvp[1] = byte(t/day) + 166\n\t\t} else if t%(time.Minute*30) == 0 && t <= day && t >= time.Hour*12+time.Minute*30 {\n\t\t\tvp[0] |= 0x01\n\t\t\tvp[1] = byte((t-time.Hour*12)/(time.Minute*30)) + 143\n\t\t} else if t%(time.Minute*5) == 0 && t <= time.Hour*12 && t >= time.Minute*5 {\n\t\t\tvp[0] |= 0x01\n\t\t\tvp[1] = byte(t/(time.Minute*5)) - 1\n\t\t} else if t <= time.Second*255 {\n\t\t\tvp[0] |= 0x02\n\t\t\tvp[1] = byte(t / time.Second)\n\t\t} else if t <= time.Hour*99+time.Minute*59+time.Second*59 {\n\t\t\tvp[0] |= 0x03\n\t\t\tvp[1] = int2SemiOctet(int(t / time.Hour))\n\t\t\tvp[2] = int2SemiOctet(int((t % time.Hour) / time.Minute))\n\t\t\tvp[3] = int2SemiOctet(int((t % time.Minute) / time.Second))\n\t\t}\n\t\treturn vp\n\t}\n\n\tif t == 0 {\n\t\treturn VPEnhanced{}\n\t} else if t%week == 0 && t >= week*5 && t <= week*63 {\n\t\treturn VPRelative(byte(t/week) + 192)\n\t} else if t%day == 0 && t >= day*2 && t <= day*30 {\n\t\treturn VPRelative(byte(t/day) + 166)\n\t} else if t%(time.Minute*30) == 0 && t <= day && t >= time.Hour*12+time.Minute*30 {\n\t\treturn VPRelative(byte((t-time.Hour*12)/(time.Minute*30)) + 143)\n\t} else if t%(time.Minute*5) == 0 && t <= time.Hour*12 && t >= time.Minute*5 {\n\t\treturn VPRelative(byte(t/(time.Minute*5)) - 1)\n\t} else if t <= time.Second*255 {\n\t\tvp := VPEnhanced{}\n\t\tvp[0] = 0x02\n\t\tvp[1] = byte(t / time.Second)\n\t\treturn vp\n\t} else if t <= time.Hour*99+time.Minute*59+time.Second*59 {\n\t\tvp := VPEnhanced{}\n\t\tvp[0] = 0x03\n\t\tvp[1] = int2SemiOctet(int(t / time.Hour))\n\t\tvp[2] = int2SemiOctet(int((t % time.Hour) / time.Minute))\n\t\tvp[3] = int2SemiOctet(int((t % time.Minute) / time.Second))\n\t\treturn vp\n\t}\n\n\tvp := marshalSCTimeStamp(time.Now().Add(t))\n\treturn VPAbsolute{vp[0], vp[1], vp[2], vp[3], vp[4], vp[5], vp[6]}\n}", "title": "" } ]
[ { "docid": "f5062912f1a2be7ffb3fbd254bddbbd5", "score": "0.6429169", "text": "func getCertValidityPeriod(secret *corev1.Secret) (*time.Time, *time.Time, error) {\n\tif secret.Data == nil {\n\t\treturn nil, nil, fmt.Errorf(\"no client certificate found in secret %q\", secret.Namespace+\"/\"+secret.Name)\n\t}\n\n\tcertData, ok := secret.Data[TLSCertFile]\n\tif !ok {\n\t\treturn nil, nil, fmt.Errorf(\"no client certificate found in secret %q\", secret.Namespace+\"/\"+secret.Name)\n\t}\n\n\tcerts, err := certutil.ParseCertsPEM(certData)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"unable to parse TLS certificates: %w\", err)\n\t}\n\n\tif len(certs) == 0 {\n\t\treturn nil, nil, errors.New(\"No cert found in certificate\")\n\t}\n\n\t// find out the validity period for all certs in the certificate chain\n\tvar notBefore, notAfter *time.Time\n\tfor index, cert := range certs {\n\t\tif index == 0 {\n\t\t\tnotBefore = &cert.NotBefore\n\t\t\tnotAfter = &cert.NotAfter\n\t\t\tcontinue\n\t\t}\n\n\t\tif notBefore.Before(cert.NotBefore) {\n\t\t\tnotBefore = &cert.NotBefore\n\t\t}\n\n\t\tif notAfter.After(cert.NotAfter) {\n\t\t\tnotAfter = &cert.NotAfter\n\t\t}\n\t}\n\n\treturn notBefore, notAfter, nil\n}", "title": "" }, { "docid": "bc5258118e769f1abd985a48f69db59f", "score": "0.5988484", "text": "func StorageMining_SurprisePoStProvingPeriod() abi.ChainEpoch {\n\tPARAM_FINISH()\n\tconst PROVING_PERIOD = abi.ChainEpoch(2) // placeholder, 2 days\n\treturn PROVING_PERIOD\n}", "title": "" }, { "docid": "aafdd245e432a95cd75b5be222c4578f", "score": "0.5952512", "text": "func (p GTLDPeriod) Valid(when time.Time) error {\n\t// NOTE: We can throw away the errors from time.Parse in this function because\n\t// the zlint-gtld-update command only writes entries to the generated gTLD map\n\t// after the dates have been verified as parseable\n\tnotBefore, _ := time.Parse(GTLDPeriodDateFormat, p.DelegationDate)\n\tif when.Before(notBefore) {\n\t\treturn fmt.Errorf(`gTLD \".%s\" is not valid until %s`,\n\t\t\tp.GTLD, p.DelegationDate)\n\t}\n\t// The removal date may be empty. We only need to check `when` against the\n\t// removal when it isn't empty\n\tif p.RemovalDate != \"\" {\n\t\tnotAfter, _ := time.Parse(GTLDPeriodDateFormat, p.RemovalDate)\n\t\tif when.After(notAfter) {\n\t\t\treturn fmt.Errorf(`gTLD \".%s\" is not valid after %s`,\n\t\t\t\tp.GTLD, p.RemovalDate)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "dffa0101715cc0113203040866003c57", "score": "0.58036757", "text": "func (o *SmsTextualMessage) GetValidityPeriod() int64 {\n\tif o == nil || o.ValidityPeriod == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\treturn *o.ValidityPeriod\n}", "title": "" }, { "docid": "3552fc3c5f5264774704414fac313485", "score": "0.5769882", "text": "func (o *SmsTextualMessage) GetValidityPeriodOk() (*int64, bool) {\n\tif o == nil || o.ValidityPeriod == nil {\n\t\treturn nil, false\n\t}\n\treturn o.ValidityPeriod, true\n}", "title": "" }, { "docid": "d3d725dd43137c6750be1751a4394079", "score": "0.5762146", "text": "func (_JobsManager *JobsManagerSession) VerificationPeriod() (*big.Int, error) {\n\treturn _JobsManager.Contract.VerificationPeriod(&_JobsManager.CallOpts)\n}", "title": "" }, { "docid": "db883817ccb4fab519a406378952dcdd", "score": "0.5711135", "text": "func (m *ManagedDeviceCertificateState) GetCertificateValidityPeriod()(*int32) {\n val, err := m.GetBackingStore().Get(\"certificateValidityPeriod\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*int32)\n }\n return nil\n}", "title": "" }, { "docid": "d58ec0085c93ac2a22e70128dbe7bc66", "score": "0.5699188", "text": "func (_JobsManager *JobsManagerCallerSession) VerificationPeriod() (*big.Int, error) {\n\treturn _JobsManager.Contract.VerificationPeriod(&_JobsManager.CallOpts)\n}", "title": "" }, { "docid": "10d42d523e3e0f2bd76b34345444c796", "score": "0.56599504", "text": "func (rp RewardPeriod) Validate() error {\n\tif rp.Start.Unix() <= 0 {\n\t\treturn errors.New(\"reward period start time cannot be 0\")\n\t}\n\tif rp.End.Unix() <= 0 {\n\t\treturn errors.New(\"reward period end time cannot be 0\")\n\t}\n\tif rp.Start.After(rp.End) {\n\t\t// This is needed to ensure that the begin blocker accumulation does not panic.\n\t\treturn fmt.Errorf(\"end period time %s cannot be before start time %s\", rp.End, rp.Start)\n\t}\n\tif rp.RewardsPerSecond.Denom != USDXMintingRewardDenom {\n\t\treturn fmt.Errorf(\"reward denom must be %s, got: %s\", USDXMintingRewardDenom, rp.RewardsPerSecond.Denom)\n\t}\n\tif !rp.RewardsPerSecond.IsValid() {\n\t\treturn fmt.Errorf(\"invalid reward amount: %s\", rp.RewardsPerSecond)\n\t}\n\n\tif rp.RewardsPerSecond.Amount.IsZero() {\n\t\treturn fmt.Errorf(\"reward amount cannot be zero: %v\", rp.RewardsPerSecond)\n\t}\n\n\tif strings.TrimSpace(rp.CollateralType) == \"\" {\n\t\treturn fmt.Errorf(\"reward period collateral type cannot be blank: %v\", rp)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e5444d083ccf835aa51c60d420b0e4b4", "score": "0.55995184", "text": "func (_JobsManager *JobsManagerCaller) VerificationPeriod(opts *bind.CallOpts) (*big.Int, error) {\n\tvar (\n\t\tret0 = new(*big.Int)\n\t)\n\tout := ret0\n\terr := _JobsManager.contract.Call(opts, out, \"verificationPeriod\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "052623e51897e85642c88e95c26200ec", "score": "0.54989994", "text": "func isValidBreakPeriod(breakId uuid.UUID) (bool, error) {\n _, err := persistence.getBreakPeriod(breakId)\n if err != nil {\n switch err {\n case pgx.ErrNoRows:\n return false, nil\n default:\n return false, err\n }\n }\n return true, nil\n}", "title": "" }, { "docid": "aace12baf9aa69118ef1ede76389d91d", "score": "0.54798913", "text": "func (o *GetLostRewardsAggParams) validatePeriod(formats strfmt.Registry) error {\n\n\tif err := validate.Enum(\"period\", \"query\", o.Period, []interface{}{\"day\", \"week\", \"month\"}); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "d1a75e58421a357056ff8657b1cff658", "score": "0.54165715", "text": "func (au *LocalAuthenticator) Period() time.Duration {\n\tif au.Stopped() {\n\t\treturn -1\n\t}\n\n\tau.mux.RLock()\n\tdefer au.mux.RUnlock()\n\n\treturn au.period\n}", "title": "" }, { "docid": "94c271f2a778265a5442e899ebf4e950", "score": "0.5362851", "text": "func ErrInvalidValidityPeriod() sdk.Error {\n\treturn types.NewError(types.CodeInvalidValidityPeriod, fmt.Sprintf(\"invalid grant validity period\"))\n}", "title": "" }, { "docid": "f821987f4d3c75df998fac03ea4dffae", "score": "0.5343644", "text": "func (mrp MultiRewardPeriod) Validate() error {\n\tif mrp.Start.IsZero() {\n\t\treturn errors.New(\"reward period start time cannot be 0\")\n\t}\n\tif mrp.End.IsZero() {\n\t\treturn errors.New(\"reward period end time cannot be 0\")\n\t}\n\tif mrp.Start.After(mrp.End) {\n\t\t// This is needed to ensure that the begin blocker accumulation does not panic.\n\t\treturn fmt.Errorf(\"end period time %s cannot be before start time %s\", mrp.End, mrp.Start)\n\t}\n\n\t// This also ensures there are no 0 amount coins.\n\tif !mrp.RewardsPerSecond.IsValid() {\n\t\treturn fmt.Errorf(\"invalid reward amount: %s\", mrp.RewardsPerSecond)\n\t}\n\tif strings.TrimSpace(mrp.CollateralType) == \"\" {\n\t\treturn fmt.Errorf(\"reward period collateral type cannot be blank: %v\", mrp)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "740b51b327f5ded581e6f4510c3f10af", "score": "0.53335863", "text": "func (o *SmsTextualMessage) SetValidityPeriod(v int64) {\n\to.ValidityPeriod = &v\n}", "title": "" }, { "docid": "dd4f984af8dc8dfd6959d9ecd98a040b", "score": "0.53309774", "text": "func isValidWorkPeriod(periodId uuid.UUID) (bool, error) {\n _, err := persistence.getWorkPeriod(periodId)\n if err != nil {\n switch err {\n case pgx.ErrNoRows:\n return false, nil\n default:\n return false, err\n }\n }\n return true, nil\n}", "title": "" }, { "docid": "afffc582cf8119664d8a5681a5e9d1a0", "score": "0.5319408", "text": "func TestIsValid(t *testing.T) {\n\tassert := audit.NewTestingAssertion(t, true)\n\tassert.Logf(\"testing time validation\")\n\tnow := time.Now()\n\tleeway := time.Minute\n\tkey := []byte(\"secret\")\n\t// Create token with no times set, encode, decode, validate ok.\n\tclaims := jwt.NewClaims()\n\tjwtEncode, err := jwt.Encode(claims, key, jwt.HS512)\n\tassert.Nil(err)\n\tjwtDecode, err := jwt.Decode(jwtEncode.String())\n\tassert.Nil(err)\n\tok := jwtDecode.IsValid(leeway)\n\tassert.True(ok)\n\t// Now a token with a long timespan, still valid.\n\tclaims = jwt.NewClaims()\n\tclaims.SetNotBefore(now.Add(-time.Hour))\n\tclaims.SetExpiration(now.Add(time.Hour))\n\tjwtEncode, err = jwt.Encode(claims, key, jwt.HS512)\n\tassert.Nil(err)\n\tjwtDecode, err = jwt.Decode(jwtEncode.String())\n\tassert.Nil(err)\n\tok = jwtDecode.IsValid(leeway)\n\tassert.True(ok)\n\t// Now a token with a long timespan in the past, not valid.\n\tclaims = jwt.NewClaims()\n\tclaims.SetNotBefore(now.Add(-2 * time.Hour))\n\tclaims.SetExpiration(now.Add(-time.Hour))\n\tjwtEncode, err = jwt.Encode(claims, key, jwt.HS512)\n\tassert.Nil(err)\n\tjwtDecode, err = jwt.Decode(jwtEncode.String())\n\tassert.Nil(err)\n\tok = jwtDecode.IsValid(leeway)\n\tassert.False(ok)\n\t// And at last a token with a long timespan in the future, not valid.\n\tclaims = jwt.NewClaims()\n\tclaims.SetNotBefore(now.Add(time.Hour))\n\tclaims.SetExpiration(now.Add(2 * time.Hour))\n\tjwtEncode, err = jwt.Encode(claims, key, jwt.HS512)\n\tassert.Nil(err)\n\tjwtDecode, err = jwt.Decode(jwtEncode.String())\n\tassert.Nil(err)\n\tok = jwtDecode.IsValid(leeway)\n\tassert.False(ok)\n}", "title": "" }, { "docid": "6452107174c14b1c4a74d2f6005084ec", "score": "0.53190535", "text": "func (o *SmsTextualMessage) HasValidityPeriod() bool {\n\tif o != nil && o.ValidityPeriod != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "75126130d64a7ed516b621099475575f", "score": "0.522457", "text": "func (o WindowsVirtualMachineScaleSetAutomaticInstanceRepairPtrOutput) GracePeriod() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *WindowsVirtualMachineScaleSetAutomaticInstanceRepair) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.GracePeriod\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "fd62ffa7e3ef22c2257f48efda98b4a6", "score": "0.52009785", "text": "func (o WindowsVirtualMachineScaleSetAutomaticInstanceRepairOutput) GracePeriod() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v WindowsVirtualMachineScaleSetAutomaticInstanceRepair) *string { return v.GracePeriod }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "48551a515249c1fe4bee059872b80ba8", "score": "0.52006596", "text": "func (o SigningProfileOutput) SignatureValidityPeriod() SigningProfileSignatureValidityPeriodPtrOutput {\n\treturn o.ApplyT(func(v *SigningProfile) SigningProfileSignatureValidityPeriodPtrOutput {\n\t\treturn v.SignatureValidityPeriod\n\t}).(SigningProfileSignatureValidityPeriodPtrOutput)\n}", "title": "" }, { "docid": "ba62f3072ef7e1ac9b1000b4d4fa55d0", "score": "0.5157778", "text": "func checkPollValidity(start time.Time, end time.Time) bool {\n\tnow := time.Now()\n\t//Vote must be casted before 5 min of expiry\n\tif end.Sub(now).Minutes() > 5 {\n\t\t//Vote must be casted after 5 min of start\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "a95bcc18eea2115abec907a0b7a85619", "score": "0.5150468", "text": "func StorageMining_PoStNoChallengePeriod() abi.ChainEpoch {\n\tPARAM_FINISH()\n\tconst SURPRISE_NO_CHALLENGE_PERIOD = abi.ChainEpoch(0) // placeholder, 2 hours\n\treturn SURPRISE_NO_CHALLENGE_PERIOD\n}", "title": "" }, { "docid": "a97547f69c97670b3af37582914a2064", "score": "0.51406866", "text": "func (o LinuxVirtualMachineScaleSetAutomaticInstanceRepairPtrOutput) GracePeriod() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *LinuxVirtualMachineScaleSetAutomaticInstanceRepair) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.GracePeriod\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "0daa672f0ddd99326aaf9030776ed330", "score": "0.51396924", "text": "func (o LinuxVirtualMachineScaleSetAutomaticInstanceRepairOutput) GracePeriod() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v LinuxVirtualMachineScaleSetAutomaticInstanceRepair) *string { return v.GracePeriod }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "c4bb67ee29b597c192c9ff09f56a63c4", "score": "0.51233006", "text": "func ValidateCertPeriod(cert *x509.Certificate, offset time.Duration) error {\n\tperiod := fmt.Sprintf(\"NotBefore: %v, NotAfter: %v\", cert.NotBefore, cert.NotAfter)\n\tnow := time.Now().Add(offset)\n\tif now.Before(cert.NotBefore) {\n\t\treturn errors.Errorf(\"the certificate is not valid yet: %s\", period)\n\t}\n\tif now.After(cert.NotAfter) {\n\t\treturn errors.Errorf(\"the certificate has expired: %s\", period)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "fa957531eb6562d5ccf7d3644a287e61", "score": "0.50985277", "text": "func (p Period) Validate() error {\n\tif p.Start.IsZero() {\n\t\treturn fmt.Errorf(\"start is zero\")\n\t}\n\n\tif p.End.IsZero() {\n\t\treturn fmt.Errorf(\"end is zero\")\n\t}\n\n\tif p.End.Equal(p.Start) {\n\t\treturn fmt.Errorf(\"end must be after start; is the same (%v)\", p.End)\n\t}\n\n\tif p.End.Before(p.Start) {\n\t\treturn fmt.Errorf(\"end (%v) is %v before start (%v)\", p.End, p.Start.Sub(p.End), p.Start)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "431429af94b50b59afeacb35c72ddff4", "score": "0.5079959", "text": "func (t *Ticket) Valid(d time.Duration) (bool, error) {\n\t// Check for future tickets or invalid tickets\n\ttime := time.Now().UTC()\n\tif t.DecryptedEncPart.StartTime.Sub(time) > d || types.IsFlagSet(&t.DecryptedEncPart.Flags, flags.Invalid) {\n\t\treturn false, NewKRBError(t.SName, t.Realm, errorcode.KRB_AP_ERR_TKT_NYV, \"service ticket provided is not yet valid\")\n\t}\n\n\t// Check for expired ticket\n\tif time.Sub(t.DecryptedEncPart.EndTime) > d {\n\t\treturn false, NewKRBError(t.SName, t.Realm, errorcode.KRB_AP_ERR_TKT_EXPIRED, \"service ticket provided has expired\")\n\t}\n\n\treturn true, nil\n}", "title": "" }, { "docid": "54a4d8857174f24fe72c992f9943c46c", "score": "0.5078464", "text": "func (m *Domain) GetPasswordValidityPeriodInDays()(*int32) {\n val, err := m.GetBackingStore().Get(\"passwordValidityPeriodInDays\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*int32)\n }\n return nil\n}", "title": "" }, { "docid": "9d191b7b8326fed0f592ffd669afc54c", "score": "0.50780505", "text": "func GenVotingPeriod(r *rand.Rand) time.Duration {\n\treturn time.Duration(simulation.RandIntBetween(r, expeditedMaxVotingPeriod, 2*expeditedMaxVotingPeriod)) * time.Second\n}", "title": "" }, { "docid": "91eb464e331f8c4d44debd785e85dd2b", "score": "0.5071368", "text": "func (o OrchestratedVirtualMachineScaleSetAutomaticInstanceRepairOutput) GracePeriod() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v OrchestratedVirtualMachineScaleSetAutomaticInstanceRepair) *string { return v.GracePeriod }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "9e53ecb7fdabf11070476abb095849f6", "score": "0.50679374", "text": "func (o OrchestratedVirtualMachineScaleSetAutomaticInstanceRepairPtrOutput) GracePeriod() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *OrchestratedVirtualMachineScaleSetAutomaticInstanceRepair) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.GracePeriod\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "f77de7cee10e286563ac744482806985", "score": "0.5041354", "text": "func (tc TRCalendar) NotablePeriod(date time.Time) (bool, string) {\n\t// TODO:\n\t// summer/winter/spring/autumn time\n\t// etc\n\treturn false, \"\"\n}", "title": "" }, { "docid": "0991764e37102541d267beef3ee59032", "score": "0.50404805", "text": "func (k Keeper) GetVotingPeriod(ctx sdk.Context, content gov.Content) (votingPeriod time.Duration) {\n\tif _, ok := content.(types.DelistProposal); ok {\n\t\tvotingPeriod = k.GetParams(ctx).DelistVotingPeriod\n\t}\n\treturn\n}", "title": "" }, { "docid": "6567d461a279356686ca915a00e55eac", "score": "0.50183624", "text": "func (backend *JWTAuthenticationBackend) getTokenRemainingValidity(timestamp interface{}) int {\n\tif validity, ok := timestamp.(float64); ok {\n\t\ttm := time.Unix(int64(validity), 0)\n\t\tremainer := tm.Sub(time.Now())\n\t\tif remainer > 0 {\n\t\t\treturn int(remainer.Seconds() + expireOffset)\n\t\t}\n\t}\n\treturn expireOffset\n}", "title": "" }, { "docid": "93d86438019ae4e67a6c2571ed56d542", "score": "0.5017348", "text": "func (p PercentageDecisionPolicy) GetVotingPeriod() time.Duration {\n\treturn p.Windows.VotingPeriod\n}", "title": "" }, { "docid": "12233e26e96c8272f8f08820506d2f70", "score": "0.49806023", "text": "func (m *ManagedDeviceCertificateState) SetCertificateValidityPeriod(value *int32)() {\n err := m.GetBackingStore().Set(\"certificateValidityPeriod\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "286b9fc1c788d110c20f312948449818", "score": "0.49270377", "text": "func (p RefreshPayload) Valid() error {\n\tif time.Now().After(time.Unix(p.EXP, 0)) {\n\t\treturn ErrExpiredToken\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "49aa3f98aedbe1ead57a6cda41ad815b", "score": "0.4917328", "text": "func TestExpirePeriod(t *testing.T) {\n\toneSecond := time.Second\n\ttwoSeconds := 2 * oneSecond\n\n\t// create a new collection\n\tc := &Collection{}\n\n\t// check if we change it we get back the value we provided\n\tc.SetExpirePeriod(oneSecond)\n\tif c.ExpirePeriod() != oneSecond {\n\t\tt.Errorf(\"TestExpirePeriod: did not get back oneSecond\")\n\t}\n\n\t// change the period and check again\n\tc.SetExpirePeriod(twoSeconds)\n\tif c.ExpirePeriod() != twoSeconds {\n\t\tt.Errorf(\"TestExpirePeriod: did not get back twoSeconds\")\n\t}\n}", "title": "" }, { "docid": "2e406d02f34e1e170d7986d7fa9a61c0", "score": "0.49056095", "text": "func (_JobsManager *JobsManagerSession) VerificationSlashingPeriod() (*big.Int, error) {\n\treturn _JobsManager.Contract.VerificationSlashingPeriod(&_JobsManager.CallOpts)\n}", "title": "" }, { "docid": "80594759ac43050fd83656f2d99a678d", "score": "0.48972043", "text": "func verifyReplayTime(replayTime time.Duration, f *flow.Flow, numReplays int64, t *testing.T) {\n\tminReplayTime, maxReplayTime := flowReplayTolerances(f)\n\tminReplayTime = minReplayTime * numReplays\n\tmaxReplayTime = maxReplayTime * numReplays\n\n\tif replayTime.Nanoseconds() > maxReplayTime {\n\t\tt.Errorf(\"replay time %v exceeds max replay time %v (ns)\\n\", replayTime, maxReplayTime)\n\t}\n\tif replayTime.Nanoseconds() < minReplayTime {\n\t\tt.Errorf(\"replay time %v is less than min replay time %v (ns)\\n\", replayTime, minReplayTime)\n\t}\n}", "title": "" }, { "docid": "f68a024bd022182adf2d44c861c96c1b", "score": "0.48915803", "text": "func (s ServerV50) RevalidationPending() bool {\n\treturn s.RevalApplyTime != nil && s.RevalUpdateTime != nil && s.RevalApplyTime.Before(*s.RevalUpdateTime)\n}", "title": "" }, { "docid": "ebe74fdd0d7859b81b10a2b9b7e90f48", "score": "0.48764417", "text": "func (gcp GenesisClaimPeriodID) Validate() error {\n\tif gcp.ID == 0 {\n\t\treturn errors.New(\"genesis claim period id cannot be 0\")\n\t}\n\treturn sdk.ValidateDenom(gcp.Denom)\n}", "title": "" }, { "docid": "43cd15fd986659dcf884f20df0272bfa", "score": "0.48750284", "text": "func (ej *ETJob) Duration() time.Duration {\n\ttrainingjob := ej.trainingjob\n\n\tif trainingjob.CreationTimestamp.IsZero() {\n\t\treturn 0\n\t}\n\n\tif ej.isFailed() {\n\t\tcond := getPodLatestCondition(ej.chiefPod)\n\t\tif !cond.LastTransitionTime.IsZero() {\n\t\t\treturn cond.LastTransitionTime.Time.Sub(trainingjob.CreationTimestamp.Time)\n\t\t} else {\n\t\t\tlog.Debugf(\"the latest condition's time is zero of pod %s\", ej.chiefPod.Name)\n\t\t}\n\t}\n\n\treturn metav1.Now().Sub(trainingjob.CreationTimestamp.Time)\n}", "title": "" }, { "docid": "5141d8e5d4d7e93066297f816f235f66", "score": "0.48739895", "text": "func (u *UserFull) GetTTLPeriod() (value int, ok bool) {\n\tif !u.Flags.Has(14) {\n\t\treturn value, false\n\t}\n\treturn u.TTLPeriod, true\n}", "title": "" }, { "docid": "8e4fe47a6bdf713dcab02feba987a894", "score": "0.48628646", "text": "func (v *ValidCert) RequireLifetime(expectNotBefore time.Time, expectNotAfter time.Time, delta time.Duration) {\n\tv.t.Helper()\n\trequire.WithinDuration(v.t, expectNotBefore, v.parsed.NotBefore, delta)\n\trequire.WithinDuration(v.t, expectNotAfter, v.parsed.NotAfter, delta)\n}", "title": "" }, { "docid": "9e8ad8afe2ee3eb0dfc0ede7ab50b844", "score": "0.48617408", "text": "func (_JobsManager *JobsManagerCaller) VerificationSlashingPeriod(opts *bind.CallOpts) (*big.Int, error) {\n\tvar (\n\t\tret0 = new(*big.Int)\n\t)\n\tout := ret0\n\terr := _JobsManager.contract.Call(opts, out, \"verificationSlashingPeriod\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "37738943b244831d29cc8542226e3395", "score": "0.4828087", "text": "func (p ThresholdDecisionPolicy) GetVotingPeriod() time.Duration {\n\treturn p.Windows.VotingPeriod\n}", "title": "" }, { "docid": "3a6e6b46f10c661adf22b12f0c22b245", "score": "0.4827678", "text": "func IsTimePeriodValid(startDate, endDate string) bool {\n\tstartDateTime, err1 := time.Parse(timeLayout, startDate)\n\tendDateTime, err2 := time.Parse(timeLayout, endDate)\n\tif err1 != nil || err2 != nil {\n\t\treturn false\n\t}\n\tif endDateTime.After(startDateTime) {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "75b0b33065812e9ce8d8a57aa1c2c4dd", "score": "0.48176324", "text": "func (wt WorkingTime) IsApproaching() bool {\n if wt.IsZero() {\n return false\n }\n\n now := Now()\n lm := now.Add(-61 * time.Second)\n t := wt.Time(now)\n return t.After(lm) && !t.After(now)\n}", "title": "" }, { "docid": "a13bccb545a1879b227884013d4aefb2", "score": "0.48142964", "text": "func (m *WorkflowInstanceMutation) Deadline() (r time.Time, exists bool) {\n\tv := m.deadline\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}", "title": "" }, { "docid": "80ab90da5616b213b24e260d3e1ee35d", "score": "0.48107988", "text": "func (m *PTXServiceDTOTourismSpecificationV2DatePeriod) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "title": "" }, { "docid": "8524af4f006434c7ddb1fa6464f7127c", "score": "0.48092148", "text": "func (s *Config) GetCertValidityBound() time.Duration { return s.cert.validityBound }", "title": "" }, { "docid": "f7c150666988b0b34ff40f308ce22cde", "score": "0.48041782", "text": "func testNextPeriod(t *testing.T, tg *siatest.TestGroup) {\n\t// Grab the renter\n\tr := tg.Renters()[0]\n\n\t// Request RenterGET\n\trg, err := r.RenterGet()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif reflect.DeepEqual(rg.Settings.Allowance, modules.Allowance{}) {\n\t\tt.Fatal(\"test only is valid if the allowance is set\")\n\t}\n\n\t// Check Next Period\n\tcurrentPeriod, err := r.RenterCurrentPeriod()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tsettings, err := r.RenterSettings()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tperiod := settings.Allowance.Period\n\tnextPeriod := rg.NextPeriod\n\tif nextPeriod == 0 {\n\t\tt.Fatal(\"NextPeriod should not be zero for a renter with an allowance and contracts\")\n\t}\n\tif nextPeriod != currentPeriod+period {\n\t\tt.Fatalf(\"expected next period to be %v but got %v\", currentPeriod+period, nextPeriod)\n\t}\n}", "title": "" }, { "docid": "6dcd8290c3124b02ad4582b83e9aa0b2", "score": "0.48022696", "text": "func (_JobsManager *JobsManagerCallerSession) VerificationSlashingPeriod() (*big.Int, error) {\n\treturn _JobsManager.Contract.VerificationSlashingPeriod(&_JobsManager.CallOpts)\n}", "title": "" }, { "docid": "2310e42122cbe19eb74fa764affbc07f", "score": "0.47991076", "text": "func TestTravelPeriod(t *testing.T) {\n\tpeak := \"peak\"\n\tnight := \"night\"\n\tnonpeak := \"nonpeak\"\n\n\ttimeLayout := \"2006-01-02T15:04\"\n\tsun := \"2020-11-08T\"\n\tmon := \"2020-11-09T\"\n\tfri := \"2020-11-13T\"\n\tsat := \"2020-11-14T\"\n\tfor _, testCase := range []struct {\n\t\tt string\n\t\th string\n\t}{\n\t\t{t: mon + \"05:59\", h: night},\n\t\t{t: fri + \"05:59\", h: night},\n\t\t{t: sat + \"05:59\", h: night},\n\t\t{t: sun + \"05:59\", h: night},\n\n\t\t{t: mon + \"06:01\", h: peak},\n\t\t{t: fri + \"06:01\", h: peak},\n\t\t{t: sat + \"06:01\", h: nonpeak},\n\t\t{t: sun + \"06:01\", h: nonpeak},\n\n\t\t{t: mon + \"08:59\", h: peak},\n\t\t{t: fri + \"08:59\", h: peak},\n\t\t{t: sat + \"08:59\", h: nonpeak},\n\t\t{t: sun + \"08:59\", h: nonpeak},\n\n\t\t{t: mon + \"09:01\", h: nonpeak},\n\t\t{t: fri + \"09:01\", h: nonpeak},\n\t\t{t: sat + \"09:01\", h: nonpeak},\n\t\t{t: sun + \"09:01\", h: nonpeak},\n\n\t\t{t: mon + \"17:59\", h: nonpeak},\n\t\t{t: fri + \"17:59\", h: nonpeak},\n\t\t{t: sat + \"17:59\", h: nonpeak},\n\t\t{t: sun + \"17:59\", h: nonpeak},\n\n\t\t{t: mon + \"18:01\", h: peak},\n\t\t{t: fri + \"18:01\", h: peak},\n\t\t{t: sat + \"18:01\", h: nonpeak},\n\t\t{t: sun + \"18:01\", h: nonpeak},\n\n\t\t{t: mon + \"20:59\", h: peak},\n\t\t{t: fri + \"20:59\", h: peak},\n\t\t{t: sat + \"20:59\", h: nonpeak},\n\t\t{t: sun + \"20:59\", h: nonpeak},\n\n\t\t{t: mon + \"21:01\", h: nonpeak},\n\t\t{t: fri + \"21:01\", h: nonpeak},\n\t\t{t: sat + \"21:01\", h: nonpeak},\n\t\t{t: sun + \"21:01\", h: nonpeak},\n\n\t\t{t: mon + \"21:59\", h: nonpeak},\n\t\t{t: fri + \"21:59\", h: nonpeak},\n\t\t{t: sat + \"21:59\", h: nonpeak},\n\t\t{t: sun + \"21:59\", h: nonpeak},\n\n\t\t{t: mon + \"22:01\", h: night},\n\t\t{t: fri + \"22:01\", h: night},\n\t\t{t: sat + \"22:01\", h: night},\n\t\t{t: sun + \"22:01\", h: night},\n\t} {\n\t\tp, err := time.Parse(timeLayout, testCase.t)\n\t\tif err != nil {\n\t\t\tt.Error(err)\n\t\t}\n\t\tvar actual string\n\t\tswitch {\n\t\tcase isPeakHours(p):\n\t\t\tactual = peak\n\t\tcase isNightHours(p):\n\t\t\tactual = night\n\t\tdefault:\n\t\t\tactual = nonpeak\n\t\t}\n\t\tif actual != testCase.h {\n\t\t\tt.Errorf(\"%s %s expected: %s, actual: %s\", p.Weekday(), testCase.t, testCase.h, actual)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "4614ed657899a66993e6a9caec4298c6", "score": "0.47973", "text": "func determineGuilty(appID string, instance int, j *judgement) bool {\n\tt := time.Now().Unix()\n\tj.RLock()\n\tarray, ok := j.record[appID]\n\tj.RUnlock()\n\tif !ok {\n\t\tj.Lock()\n\t\tj.record[appID] = []int64{t}\n\t\tj.Unlock()\n\t\treturn false\n\t}\n\tif len(array) >= instance*TIMES {\n\t\tpermutation(array, t)\n\t\tj.Lock()\n\t\tj.record[appID] = array\n\t\tj.Unlock()\n\n\t\tbegin := len(array) - instance*TIMES\n\t\tif t-array[begin] <= PERIOD && t-array[begin] >= 0 {\n\t\t\treturn true\n\t\t}\n\t} else {\n\t\tarray = append(array, t)\n\t\tj.Lock()\n\t\tj.record[appID] = array\n\t\tj.Unlock()\n\t}\n\treturn false\n}", "title": "" }, { "docid": "c1901c3368baffd68182d5d0cb4ac748", "score": "0.47897473", "text": "func (k Keeper) GetVotingPeriod(ctx sdk.Context, content gov.Content) (votingPeriod time.Duration) {\n\tswitch content.(type) {\n\tcase types.AppUpgradeProposal:\n\t\tvotingPeriod = k.GetParams(ctx).AppUpgradeVotingPeriod\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "d031f6df89bb47baa3b253b415b30edb", "score": "0.47796533", "text": "func (o ElastigroupScalingDownPolicyOutput) Period() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v ElastigroupScalingDownPolicy) *int { return v.Period }).(pulumi.IntPtrOutput)\n}", "title": "" }, { "docid": "65f426b53e35bdebcaecd7ef7f6d2e66", "score": "0.47743365", "text": "func (p *PercentageDecisionPolicy) Validate(g GroupInfo, config Config) error {\n\tif p.Windows.MinExecutionPeriod > p.Windows.VotingPeriod+config.MaxExecutionPeriod {\n\t\treturn errorsmod.Wrap(errors.ErrInvalid, \"min_execution_period should be smaller than voting_period + max_execution_period\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a895d55d5ba7aee591be66a45c02ee5a", "score": "0.47634137", "text": "func (c ProcessesChecker) Periodicity() *time.Duration {\n\treturn c.cfg.Periodicity()\n}", "title": "" }, { "docid": "2a372c7793ffc81ab85a1fbf2b236755", "score": "0.47597167", "text": "func TestAdvanceTimeTxTimestampTooLate(t *testing.T) {\n\tvm, _ := defaultVM()\n\tvm.Ctx.Lock.Lock()\n\n\t// Case: Timestamp is after next validator start time\n\t// Add a pending validator\n\tpendingValidatorStartTime := defaultGenesisTime.Add(1 * time.Second)\n\tpendingValidatorEndTime := pendingValidatorStartTime.Add(MinimumStakingDuration)\n\tnodeIDKey, _ := vm.factory.NewPrivateKey()\n\tnodeID := nodeIDKey.PublicKey().Address()\n\taddPendingValidatorTx, err := vm.newAddValidatorTx(\n\t\tvm.minStake,\n\t\tuint64(pendingValidatorStartTime.Unix()),\n\t\tuint64(pendingValidatorEndTime.Unix()),\n\t\tnodeID,\n\t\tnodeID,\n\t\tNumberOfShares,\n\t\t[]*crypto.PrivateKeySECP256K1R{keys[0]},\n\t)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif err := vm.enqueueStaker(vm.DB, constants.PrimaryNetworkID, addPendingValidatorTx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttx, err := vm.newAdvanceTimeTx(pendingValidatorStartTime.Add(1 * time.Second))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t} else if _, _, _, _, err = tx.UnsignedTx.(UnsignedProposalTx).SemanticVerify(vm, vm.DB, tx); err == nil {\n\t\tt.Fatal(\"should've failed verification because proposed timestamp is after pending validator start time\")\n\t}\n\tvm.Shutdown()\n\tvm.Ctx.Lock.Unlock()\n\n\t// Case: Timestamp is after next validator end time\n\tvm, _ = defaultVM()\n\tvm.Ctx.Lock.Lock()\n\tdefer func() {\n\t\tvm.Shutdown()\n\t\tvm.Ctx.Lock.Unlock()\n\t}()\n\n\t// fast forward clock to 10 seconds before genesis validators stop validating\n\tvm.clock.Set(defaultValidateEndTime.Add(-10 * time.Second))\n\n\t// Proposes advancing timestamp to 1 second after genesis validators stop validating\n\tif tx, err := vm.newAdvanceTimeTx(defaultValidateEndTime.Add(1 * time.Second)); err != nil {\n\t\tt.Fatal(err)\n\t} else if _, _, _, _, err = tx.UnsignedTx.(UnsignedProposalTx).SemanticVerify(vm, vm.DB, tx); err == nil {\n\t\tt.Fatal(\"should've failed verification because proposed timestamp is after pending validator start time\")\n\t}\n}", "title": "" }, { "docid": "e119ac2268e676346f3500c61aa597b3", "score": "0.47544038", "text": "func TestBasicFeeValidAllow(t *testing.T) {\n\tconfigOnce.Do(apptypes.ConfigureSDK)\n\t_, _, app, _ := mustGetEmApp(mustGetAccAddress(\"emoney1kt0vh0ttget0xx77g6d3ttnvq2lnxx6vp3uyl0\"))\n\n\tctx := app.BaseApp.NewContext(false, tmproto.Header{})\n\tbadTime := ctx.BlockTime().AddDate(0, 0, -1)\n\tallowace := &feegrant.BasicAllowance{\n\t\tExpiration: &badTime,\n\t}\n\trequire.Error(t, allowace.ValidateBasic())\n\n\tctx = app.BaseApp.NewContext(\n\t\tfalse, tmproto.Header{\n\t\t\tTime: time.Now(),\n\t\t},\n\t)\n\teth := sdk.NewCoins(sdk.NewInt64Coin(\"eth\", 10))\n\tatom := sdk.NewCoins(sdk.NewInt64Coin(\"atom\", 555))\n\tsmallAtom := sdk.NewCoins(sdk.NewInt64Coin(\"atom\", 43))\n\tbigAtom := sdk.NewCoins(sdk.NewInt64Coin(\"atom\", 1000))\n\tleftAtom := sdk.NewCoins(sdk.NewInt64Coin(\"atom\", 512))\n\tnow := ctx.BlockTime()\n\toneHour := now.Add(1 * time.Hour)\n\n\tcases := map[string]struct {\n\t\tallowance *feegrant.BasicAllowance\n\t\t// all other checks are ignored if valid=false\n\t\tfee sdk.Coins\n\t\tblockTime time.Time\n\t\tvalid bool\n\t\taccept bool\n\t\tremove bool\n\t\tremains sdk.Coins\n\t}{\n\t\t\"empty\": {\n\t\t\tallowance: &feegrant.BasicAllowance{},\n\t\t\taccept: true,\n\t\t},\n\t\t\"small fee without expire\": {\n\t\t\tallowance: &feegrant.BasicAllowance{\n\t\t\t\tSpendLimit: atom,\n\t\t\t},\n\t\t\tfee: smallAtom,\n\t\t\taccept: true,\n\t\t\tremove: false,\n\t\t\tremains: leftAtom,\n\t\t},\n\t\t\"all fee without expire\": {\n\t\t\tallowance: &feegrant.BasicAllowance{\n\t\t\t\tSpendLimit: smallAtom,\n\t\t\t},\n\t\t\tfee: smallAtom,\n\t\t\taccept: true,\n\t\t\tremove: true,\n\t\t},\n\t\t\"wrong fee\": {\n\t\t\tallowance: &feegrant.BasicAllowance{\n\t\t\t\tSpendLimit: smallAtom,\n\t\t\t},\n\t\t\tfee: eth,\n\t\t\taccept: false,\n\t\t},\n\t\t\"non-expired\": {\n\t\t\tallowance: &feegrant.BasicAllowance{\n\t\t\t\tSpendLimit: atom,\n\t\t\t\tExpiration: &oneHour,\n\t\t\t},\n\t\t\tvalid: true,\n\t\t\tfee: smallAtom,\n\t\t\tblockTime: now,\n\t\t\taccept: true,\n\t\t\tremove: false,\n\t\t\tremains: leftAtom,\n\t\t},\n\t\t\"expired\": {\n\t\t\tallowance: &feegrant.BasicAllowance{\n\t\t\t\tSpendLimit: atom,\n\t\t\t\tExpiration: &now,\n\t\t\t},\n\t\t\tvalid: true,\n\t\t\tfee: smallAtom,\n\t\t\tblockTime: oneHour,\n\t\t\taccept: false,\n\t\t\tremove: true,\n\t\t},\n\t\t\"fee more than allowed\": {\n\t\t\tallowance: &feegrant.BasicAllowance{\n\t\t\t\tSpendLimit: atom,\n\t\t\t\tExpiration: &oneHour,\n\t\t\t},\n\t\t\tvalid: true,\n\t\t\tfee: bigAtom,\n\t\t\tblockTime: now,\n\t\t\taccept: false,\n\t\t},\n\t\t\"with out spend limit\": {\n\t\t\tallowance: &feegrant.BasicAllowance{\n\t\t\t\tExpiration: &oneHour,\n\t\t\t},\n\t\t\tvalid: true,\n\t\t\tfee: bigAtom,\n\t\t\tblockTime: now,\n\t\t\taccept: true,\n\t\t},\n\t\t\"expired no spend limit\": {\n\t\t\tallowance: &feegrant.BasicAllowance{\n\t\t\t\tExpiration: &now,\n\t\t\t},\n\t\t\tvalid: true,\n\t\t\tfee: bigAtom,\n\t\t\tblockTime: oneHour,\n\t\t\taccept: false,\n\t\t},\n\t}\n\n\tfor name, stc := range cases {\n\t\ttc := stc // to make scopelint happy\n\t\tt.Run(\n\t\t\tname, func(t *testing.T) {\n\t\t\t\terr := tc.allowance.ValidateBasic()\n\t\t\t\trequire.NoError(t, err)\n\n\t\t\t\tctx := app.BaseApp.NewContext(\n\t\t\t\t\tfalse, tmproto.Header{},\n\t\t\t\t).WithBlockTime(tc.blockTime)\n\n\t\t\t\t// now try to deduct\n\t\t\t\tremoved, err := tc.allowance.Accept(ctx, tc.fee, []sdk.Msg{})\n\t\t\t\tif !tc.accept {\n\t\t\t\t\trequire.Error(t, err)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\trequire.NoError(t, err)\n\n\t\t\t\trequire.Equal(t, tc.remove, removed)\n\t\t\t\tif !removed {\n\t\t\t\t\trequire.Equal(t, tc.allowance.SpendLimit, tc.remains)\n\t\t\t\t}\n\t\t\t},\n\t\t)\n\t}\n}", "title": "" }, { "docid": "191b0282fae06d67998886efab95f025", "score": "0.47540602", "text": "func (c *ChatFull) GetTTLPeriod() (value int, ok bool) {\n\tif c == nil {\n\t\treturn\n\t}\n\tif !c.Flags.Has(14) {\n\t\treturn value, false\n\t}\n\treturn c.TTLPeriod, true\n}", "title": "" }, { "docid": "e8913e1ae95c83cf7210b2bc449f4091", "score": "0.4748832", "text": "func GetValidationWaitTime() time.Duration {\n\ttimeout := viper.GetDuration(confValidationWaitTime)\n\tif timeout == 0 {\n\t\treturn defaultValidationWaitTime\n\t}\n\treturn timeout\n}", "title": "" }, { "docid": "f33ad119e5baeda33ec6274d23c54cab", "score": "0.4742393", "text": "func (t GID) Valid() bool {\n\treturn t > 103582791429521408\n}", "title": "" }, { "docid": "c91ba031504c76141d281621d2591437", "score": "0.4731354", "text": "func (vp *cVectorParty) GetValidity(offset int) bool {\n\tif vp.columnMode == common.AllValuesDefault {\n\t\treturn vp.defaultValue.Valid\n\t}\n\treturn vp.nulls == nil || vp.nulls.GetBool(offset)\n}", "title": "" }, { "docid": "7b273b2fdcff101d0ec3ca26769db20f", "score": "0.4728035", "text": "func (c Claims) Valid() error {\n\tif c.IssuedAt > 0 && time.Now().Unix() > c.IssuedAt+maxExp {\n\t\treturn errors.New(\"token expired\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "39cc4ed455d2a5fcf2be0e5d4090931a", "score": "0.47232842", "text": "func (p Params) Validate() error {\n\tif err := validateNoticePeriodDuration(p.DefaultRedemptionNoticePeriodDuration); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "cbeb68d584294d53c33c12d8556d6de6", "score": "0.47228137", "text": "func (m *ManagedDeviceCertificateState) GetCertificateValidityPeriodUnits()(*CertificateValidityPeriodScale) {\n val, err := m.GetBackingStore().Get(\"certificateValidityPeriodUnits\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*CertificateValidityPeriodScale)\n }\n return nil\n}", "title": "" }, { "docid": "b11a9b2fb8239bf31fe02a0dc6201c07", "score": "0.47220066", "text": "func (o *Billing) GetPeriodOk() (*string, bool) {\n\tif o == nil || o.Period == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Period, true\n}", "title": "" }, { "docid": "8a84e86c369fc837ea6181c595a9ff72", "score": "0.4716271", "text": "func (x *Duration) CheckValid() error {\n\tswitch x.check() {\n\tcase invalidNil:\n\t\treturn protoimpl.X.NewError(\"invalid nil Duration\")\n\tcase invalidUnderflow:\n\t\treturn protoimpl.X.NewError(\"duration (%v) exceeds -10000 years\", x)\n\tcase invalidOverflow:\n\t\treturn protoimpl.X.NewError(\"duration (%v) exceeds +10000 years\", x)\n\tcase invalidNanosRange:\n\t\treturn protoimpl.X.NewError(\"duration (%v) has out-of-range nanos\", x)\n\tcase invalidNanosSign:\n\t\treturn protoimpl.X.NewError(\"duration (%v) has seconds and nanos with different signs\", x)\n\tdefault:\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "4cd0a68b134da2f2e128d91f83839d2d", "score": "0.47059214", "text": "func (o *strategyTimeline) Validate(t time.Time) error {\n\to.mu.RLock()\n\tdefer o.mu.RUnlock()\n\tk := t.Format(\"15:04:05\")\n\tif _, ok := o.times[k]; ok {\n\t\treturn nil\n\t}\n\treturn ErrAccessTimeline\n}", "title": "" }, { "docid": "b909972f755e4679669d6b479ae91156", "score": "0.47056216", "text": "func (d *Dialog) GetTTLPeriod() (value int, ok bool) {\n\tif d == nil {\n\t\treturn\n\t}\n\tif !d.Flags.Has(5) {\n\t\treturn value, false\n\t}\n\treturn d.TTLPeriod, true\n}", "title": "" }, { "docid": "43fa65c23e65a707610f801ae06e7e37", "score": "0.46974927", "text": "func (rps RewardPeriods) Validate() error {\n\tseenPeriods := make(map[string]bool)\n\tfor _, rp := range rps {\n\t\tif seenPeriods[rp.CollateralType] {\n\t\t\treturn fmt.Errorf(\"duplicated reward period with collateral type %s\", rp.CollateralType)\n\t\t}\n\n\t\tif err := rp.Validate(); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tseenPeriods[rp.CollateralType] = true\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "45d1ddb1e51eba77bce8369be2f9805d", "score": "0.4690925", "text": "func (s *SaturationLimit) check(num int, period int64, curtime int64) bool {\n\tfor s.lock.TestAndSet() {\n\t}\n\ti := realmod(s.index-(num-1), len(s.times))\n\tb := (curtime - s.times[i]) <= period\n\ts.lock.Clear()\n\treturn b\n}", "title": "" }, { "docid": "30d68db56738d814dfd73a06fd811c08", "score": "0.46886098", "text": "func (rc *RestrictionClaim) Valid() bool {\n\treturn *rc < maxRestrictionClaim && *rc >= 0\n}", "title": "" }, { "docid": "6a80930530c6cbb706266e0f65de9efe", "score": "0.46757904", "text": "func TestAdvanceTimeTxUpdateValidators(t *testing.T) {\n\tvm, _ := defaultVM()\n\tvm.Ctx.Lock.Lock()\n\tdefer func() {\n\t\tvm.Shutdown()\n\t\tvm.Ctx.Lock.Unlock()\n\t}()\n\n\t// Case: Timestamp is after next validator start time\n\t// Add a pending validator\n\tpendingValidatorStartTime := defaultGenesisTime.Add(1 * time.Second)\n\tpendingValidatorEndTime := pendingValidatorStartTime.Add(MinimumStakingDuration)\n\tnodeIDKey, _ := vm.factory.NewPrivateKey()\n\tnodeID := nodeIDKey.PublicKey().Address()\n\taddPendingValidatorTx, err := vm.newAddValidatorTx(\n\t\tvm.minStake,\n\t\tuint64(pendingValidatorStartTime.Unix()),\n\t\tuint64(pendingValidatorEndTime.Unix()),\n\t\tnodeID,\n\t\tnodeID,\n\t\tNumberOfShares,\n\t\t[]*crypto.PrivateKeySECP256K1R{keys[0]},\n\t)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif err := vm.enqueueStaker(vm.DB, constants.PrimaryNetworkID, addPendingValidatorTx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttx, err := vm.newAdvanceTimeTx(pendingValidatorStartTime)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tonCommit, onAbort, _, _, err := tx.UnsignedTx.(UnsignedProposalTx).SemanticVerify(vm, vm.DB, tx)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif validatorTx, isValidator, err := vm.isValidator(onCommit, constants.PrimaryNetworkID, nodeID); err != nil {\n\t\tt.Fatal(err)\n\t} else if !isValidator {\n\t\tt.Fatalf(\"Should have added the validator to the validator set\")\n\t} else if !validatorTx.ID().Equals(addPendingValidatorTx.ID()) {\n\t\tt.Fatalf(\"Added the wrong tx to the validator set\")\n\t} else if _, willBeValidator, err := vm.willBeValidator(onCommit, constants.PrimaryNetworkID, nodeID); err != nil {\n\t\tt.Fatal(err)\n\t} else if willBeValidator {\n\t\tt.Fatalf(\"Should have removed the validator from the pending validator set\")\n\t}\n\n\tif _, isValidator, err := vm.isValidator(onAbort, constants.PrimaryNetworkID, nodeID); err != nil {\n\t\tt.Fatal(err)\n\t} else if isValidator {\n\t\tt.Fatalf(\"Shouldn't have added the validator to the validator set\")\n\t} else if validatorTx, willBeValidator, err := vm.willBeValidator(onAbort, constants.PrimaryNetworkID, nodeID); err != nil {\n\t\tt.Fatal(err)\n\t} else if !willBeValidator {\n\t\tt.Fatalf(\"Shouldn't have removed the validator from the pending validator set\")\n\t} else if !validatorTx.ID().Equals(addPendingValidatorTx.ID()) {\n\t\tt.Fatalf(\"Added the wrong tx to the pending validator set\")\n\t}\n}", "title": "" }, { "docid": "6e7aaf5b9a4672d4e13f2e297ef2fe3c", "score": "0.46647328", "text": "func (_m *MockConfig) TLFValidDuration() time.Duration {\n\tret := _m.ctrl.Call(_m, \"TLFValidDuration\")\n\tret0, _ := ret[0].(time.Duration)\n\treturn ret0\n}", "title": "" }, { "docid": "3ac711283033023a794482c46b7c7d4a", "score": "0.46619907", "text": "func (e TimeRangeValidationError) Reason() string { return e.reason }", "title": "" }, { "docid": "6d25fbe1a6d7ad2e979ecc173ee5d8fa", "score": "0.46552962", "text": "func (gs gameState) IsWarmupPeriod() bool {\n\treturn gs.isWarmupPeriod\n}", "title": "" }, { "docid": "53f0e01b04d08a94f1c923c09602aef2", "score": "0.4653411", "text": "func (jm *Controller) pastActiveDeadline(job *batch.Job) bool {\n\tif job.Spec.ActiveDeadlineSeconds == nil || job.Status.StartTime == nil || jobSuspended(job) {\n\t\treturn false\n\t}\n\tduration := jm.clock.Since(job.Status.StartTime.Time)\n\tallowedDuration := time.Duration(*job.Spec.ActiveDeadlineSeconds) * time.Second\n\treturn duration >= allowedDuration\n}", "title": "" }, { "docid": "5de6cb3d4eedcf5169f46811a0a0532c", "score": "0.46469378", "text": "func (q TimeQuantum) Valid() bool {\n\tswitch q {\n\tcase \"Y\", \"YM\", \"YMD\", \"YMDH\",\n\t\t\"M\", \"MD\", \"MDH\",\n\t\t\"D\", \"DH\",\n\t\t\"H\",\n\t\t\"\":\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}", "title": "" }, { "docid": "a9e249da52abee9b4adc202194b22350", "score": "0.4628929", "text": "func tokenDueForRenewal(token *oauth2.Token) bool {\n\tif token == nil {\n\t\treturn true\n\t}\n\n\t// Some tokens may never expire\n\tif token.Expiry.IsZero() {\n\t\treturn false\n\t}\n\n\texpiry := token.Expiry.Round(0)\n\tdelta := tokenExpiryDelta\n\tnow := time.Now()\n\texpiresWithinTenMinutes := expiry.Add(-delta).Before(now)\n\n\t// Try to parse the token claims to retrieve the issuedAt time\n\tif claims, err := claims.ParseClaims(token); err == nil {\n\t\tif claims.IssuedAt > 0 {\n\t\t\tissued := time.Unix(claims.IssuedAt, 0)\n\t\t\tvalidity := expiry.Sub(issued)\n\n\t\t\t// If the validity period is less than double the expiry delta, then instead\n\t\t\t// determine whether >50% of the validity period has elapsed\n\t\t\tif validity < delta*2 {\n\t\t\t\thalfValidityHasElapsed := issued.Add(validity / 2).Before(now)\n\t\t\t\treturn halfValidityHasElapsed\n\t\t\t}\n\t\t}\n\t}\n\n\treturn expiresWithinTenMinutes\n}", "title": "" }, { "docid": "98c189da985564f6f94db9dd0dcfa71d", "score": "0.46281585", "text": "func (p *Payload) Valid() error {\n\tif int64(p.ExpiresAt) < time.Now().Unix() {\n\t\treturn fmt.Errorf(\"token expired\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f5967da303d4c30a8f4fee8a223176ea", "score": "0.4625684", "text": "func (js *JobStatus) JobDeadline() time.Time {\n return private_gestatus.GetDeadline(&js.js)\n}", "title": "" }, { "docid": "fb08d28989b5c0b5500b5a4699511758", "score": "0.46176547", "text": "func (m *DeviceComplianceActionItem) GetGracePeriodHours()(*int32) {\n return m.gracePeriodHours\n}", "title": "" }, { "docid": "1fb9701ef0dc627220075d83e96619ac", "score": "0.46169844", "text": "func validExpiration(expiration int32) bool {\n\tfor _, ttl := range []int32{3600, 86400, 604800} {\n\t\tif ttl == expiration {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "a50b4db09ea52d7419bf60ddc59933bc", "score": "0.4598566", "text": "func MachineDeploymentPauseTimeIsValid(val string) bool {\n\td, err := iso8601.ParseDuration(val)\n\tif err != nil {\n\t\treturn false\n\t}\n\n\t// AWS limits the duration to 1 hour\n\tif d.Hours() > 1.0 {\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "4555ed6fcc7cf2c1efc9e5b94d95644e", "score": "0.4591045", "text": "func (cs StoreType) TimeoutVPCValidity() time.Duration {\n\treturn cs.timeoutVPCValidity\n}", "title": "" }, { "docid": "363a5d96afce2ce09b1392def338e56d", "score": "0.4588739", "text": "func (kl *Kubelet) pastActiveDeadline(pod *api.Pod) bool {\n\tif pod.Spec.ActiveDeadlineSeconds != nil {\n\t\tpodStatus, ok := kl.statusManager.GetPodStatus(pod.UID)\n\t\tif !ok {\n\t\t\tpodStatus = pod.Status\n\t\t}\n\t\tif !podStatus.StartTime.IsZero() {\n\t\t\tstartTime := podStatus.StartTime.Time\n\t\t\tduration := kl.clock.Since(startTime)\n\t\t\tallowedDuration := time.Duration(*pod.Spec.ActiveDeadlineSeconds) * time.Second\n\t\t\tif duration >= allowedDuration {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "2733488073136bf8b40ffdd64510c570", "score": "0.45857123", "text": "func validAssumeRoleDuration(v interface{}, k string) (ws []string, errors []error) {\n\tduration, err := time.ParseDuration(v.(string))\n\n\tif err != nil {\n\t\terrors = append(errors, fmt.Errorf(\"%q cannot be parsed as a duration: %w\", k, err))\n\t\treturn\n\t}\n\n\tif duration.Minutes() < 15 || duration.Hours() > 12 {\n\t\terrors = append(errors, fmt.Errorf(\"duration %q must be between 15 minutes (15m) and 12 hours (12h), inclusive\", k))\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "f9b0519e82d6118d2c99abbabcc0c5c3", "score": "0.45852888", "text": "func StorageMining_SurprisePoStChallengeDuration() abi.ChainEpoch {\n\tPARAM_FINISH()\n\tconst CHALLENGE_DURATION = abi.ChainEpoch(4) // placeholder, 2 hours\n\treturn CHALLENGE_DURATION\n}", "title": "" }, { "docid": "a923d69001aacb15f99b3f277eabc2f1", "score": "0.45838967", "text": "func (x *Duration) IsValid() bool {\n\treturn x.check() == 0\n}", "title": "" }, { "docid": "e5ffa891b97bc76f090a6b3e5ecb84b4", "score": "0.45742074", "text": "func (d *TimePeriod_Duration) Validate(c *validation.Context) {\n\tif _, err := d.ToSeconds(); err != nil {\n\t\tc.Errorf(\"%s\", err)\n\t}\n}", "title": "" }, { "docid": "b958705aa6185f5f36b3907c8af3cc65", "score": "0.45701048", "text": "func (p Params) Validate() error {\n\t// session count constraints\n\tif p.SessionNodeCount > 25 || p.SessionNodeCount < 1 {\n\t\treturn errors.New(\"invalid session node count\")\n\t}\n\t// claim submission window constraints\n\tif p.ClaimSubmissionWindow < 2 {\n\t\treturn errors.New(\"waiting period must be at least 2 sessions\")\n\t}\n\t// verify each supported blockchain\n\tfor _, chain := range p.SupportedBlockchains {\n\t\tif err := NetworkIdentifierVerification(chain); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// ensure replay attack burn multiplier is above 0\n\tif p.ReplayAttackBurnMultiplier < 0 {\n\t\treturn errors.New(\"invalid replay attack burn multiplier\")\n\t}\n\t// ensure claim expiration\n\tif p.ClaimExpiration < 0 {\n\t\treturn errors.New(\"invalid claim expiration\")\n\t}\n\tif p.ClaimExpiration < p.ClaimSubmissionWindow {\n\t\treturn errors.New(\"unverified Proof expiration is far too short, must be greater than Proof waiting period\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "9300a808e8b4e7c26dff7f0c9dedf84c", "score": "0.45659572", "text": "func (schedule *Schedule) Validate() error {\n\tswitch schedule.Type {\n\tcase TimingSchedule:\n\t\t// 默认间隔循环间隔是1\n\t\tif schedule.Number <= 0 {\n\t\t\tschedule.Number = 1\n\t\t}\n\t\tif err := isValidJobTime(schedule.Time); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif schedule.Frequency != setting.FrequencyDay &&\n\t\t\tschedule.Frequency != setting.FrequencyMondy &&\n\t\t\tschedule.Frequency != setting.FrequencyTuesday &&\n\t\t\tschedule.Frequency != setting.FrequencyWednesday &&\n\t\t\tschedule.Frequency != setting.FrequencyThursday &&\n\t\t\tschedule.Frequency != setting.FrequencyFriday &&\n\t\t\tschedule.Frequency != setting.FrequencySaturday &&\n\t\t\tschedule.Frequency != setting.FrequencySunday {\n\t\t\treturn fmt.Errorf(\"%s 定时任务频率错误\", e.InvalidFormatErrMsg)\n\t\t}\n\t\treturn nil\n\n\tcase GapSchedule:\n\t\tif schedule.Frequency != setting.FrequencyHours &&\n\t\t\tschedule.Frequency != setting.FrequencyMinutes {\n\t\t\treturn fmt.Errorf(\"%s 间隔任务频率错误\", e.InvalidFormatErrMsg)\n\t\t}\n\t\tif schedule.Number <= 0 {\n\t\t\treturn fmt.Errorf(\"%s 间隔循环时间间隔不能小于等于0\", e.InvalidFormatErrMsg)\n\t\t}\n\t\t//if schedule.Frequency == FrequencyMinutes && schedule.Number < 1 {\n\t\t//\tlog.Info(\"minimum schedule minutes must >= 30\")\n\t\t//\treturn fmt.Errorf(\"%s 定时任务最短为30分钟\", e.InvalidFormatErrMsg)\n\t\t//\n\t\t//}\n\t\treturn nil\n\n\tdefault:\n\t\treturn fmt.Errorf(\"%s 间隔任务模式未设置\", e.InvalidFormatErrMsg)\n\t}\n}", "title": "" }, { "docid": "f84ca10755d957548e24a7af145a0a84", "score": "0.45656705", "text": "func (m *JobPolicyInterval) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateBegin(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateEnd(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateImpact(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "130faa4b213050359d9d321093a0569f", "score": "0.45632535", "text": "func (s *session) validateEndTime() {\n\telapsed := s.getElapsedTimeInSeconds()\n\n\t// If the elapsed time is greater than the duration\n\t// of the session, the end time must be normalised\n\t// to a time that will fulfill the exact duration\n\t// of the session\n\tif elapsed > s.Duration*60 {\n\t\t// secondsBeforeLastPart represents the number of seconds\n\t\t// elapsed without including the concluding part of the\n\t\t// session timeline\n\t\tvar secondsBeforeLastPart int\n\n\t\tfor i := 0; i < len(s.Timeline)-1; i++ {\n\t\t\tv := s.Timeline[i]\n\t\t\tsecondsBeforeLastPart += int(v.EndTime.Sub(v.StartTime).Seconds())\n\t\t}\n\n\t\tlastIndex := len(s.Timeline) - 1\n\t\tlastPart := s.Timeline[lastIndex]\n\n\t\tsecondsLeft := (60 * s.Duration) - secondsBeforeLastPart\n\t\tend := lastPart.StartTime.Add(\n\t\t\ttime.Duration(secondsLeft * int(time.Second)),\n\t\t)\n\t\ts.Timeline[lastIndex].EndTime = end\n\t\ts.EndTime = end\n\t\ts.Completed = true\n\t}\n}", "title": "" } ]
da8b7b7c5ba172209ecf8327c5d100ea
LoggingMiddleware takes a logger as a dependency and returns a AuthService Middleware.
[ { "docid": "06c155a520d681bc0f25e1926aac88fc", "score": "0.82569265", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next AuthService) AuthService {\n\t\treturn &loggingMiddleware{logger, next}\n\t}\n\n}", "title": "" } ]
[ { "docid": "00e85983d8e086a3443e3f3a8d84f15d", "score": "0.81303465", "text": "func LoggingMiddleware(svc authz.Service, logger log.Logger) authz.Service {\n\treturn &loggingMiddleware{logger, svc}\n}", "title": "" }, { "docid": "00e85983d8e086a3443e3f3a8d84f15d", "score": "0.81303465", "text": "func LoggingMiddleware(svc authz.Service, logger log.Logger) authz.Service {\n\treturn &loggingMiddleware{logger, svc}\n}", "title": "" }, { "docid": "8540a7e3844ba8ad6e16b001a6b984ed", "score": "0.80269855", "text": "func LoggingMiddleware(svc authn.Service, logger log.Logger) authn.Service {\n\treturn &loggingMiddleware{logger, svc}\n}", "title": "" }, { "docid": "6e00a387d425c357820fefa4c2b6c863", "score": "0.7925298", "text": "func LoggingMiddleware(svc things.Service, logger log.Logger) things.Service {\n\treturn &loggingMiddleware{logger, svc}\n}", "title": "" }, { "docid": "df7c8aa5539b12824286d80003014826", "score": "0.77722335", "text": "func LoggingMiddleware(svc pms.Service, logger log.Logger) pms.Service {\n\treturn &loggingMiddleware{logger, svc}\n}", "title": "" }, { "docid": "0ebb924f8a435209a638cdfa685a3c5e", "score": "0.77007043", "text": "func LoggingMiddleware(logger log.Logger) service.ServiceMiddleware {\n\treturn func(next service.Service) service.Service {\n\t\treturn loggingMiddleware{next, logger}\n\t}\n}", "title": "" }, { "docid": "0ebb924f8a435209a638cdfa685a3c5e", "score": "0.77007043", "text": "func LoggingMiddleware(logger log.Logger) service.ServiceMiddleware {\n\treturn func(next service.Service) service.Service {\n\t\treturn loggingMiddleware{next, logger}\n\t}\n}", "title": "" }, { "docid": "bb5cc7b761da80e60ddce0fae45a2645", "score": "0.7663125", "text": "func LoggingMiddleware(svc ws.Service, logger log.Logger) ws.Service {\n\treturn &loggingMiddleware{logger, svc}\n}", "title": "" }, { "docid": "1e14f030e0574f845edc670cc81307fc", "score": "0.7599548", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Service) Service {\n\t\treturn loggingMiddleware{\n\t\t\tlogger,\n\t\t\tnext,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "9b8dd761f0c29f2e43d9380f832183c3", "score": "0.7576541", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Service) Service {\n\t\treturn loggingMiddleware{logger, next}\n\t}\n}", "title": "" }, { "docid": "9b8dd761f0c29f2e43d9380f832183c3", "score": "0.7576541", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Service) Service {\n\t\treturn loggingMiddleware{logger, next}\n\t}\n}", "title": "" }, { "docid": "3f8d9e3e5b4986e713eddec787db85aa", "score": "0.7512794", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Service) Service {\n\t\treturn loggingMiddleware{\n\t\t\tnext: next,\n\t\t\tlogger: logger,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "0ec4c1697aea52c9f3975536c3f6427a", "score": "0.7494059", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Service) Service {\n\t\treturn &loggingMiddleware{\n\t\t\tlogger: logger,\n\t\t\tnext: next,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "0ec4c1697aea52c9f3975536c3f6427a", "score": "0.7494059", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Service) Service {\n\t\treturn &loggingMiddleware{\n\t\t\tlogger: logger,\n\t\t\tnext: next,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "4e26afaea62e4722d9245f3c2dc77c44", "score": "0.74927497", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Service) Service {\n\t\treturn &loggingMiddleware{\n\t\t\tnext: next,\n\t\t\tlogger: logger,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "4e26afaea62e4722d9245f3c2dc77c44", "score": "0.74927497", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Service) Service {\n\t\treturn &loggingMiddleware{\n\t\t\tnext: next,\n\t\t\tlogger: logger,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "c7d1cdba1e350aee066c89c393f54413", "score": "0.748107", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next UsersService) UsersService {\n\t\treturn &loggingMiddleware{logger, next}\n\t}\n\n}", "title": "" }, { "docid": "f1463c0db3e6f3888b19b925c042e53a", "score": "0.7461474", "text": "func LoggingMiddleware(svc k8s_client.Service, logger log.Logger) k8s_client.Service {\n\treturn &loggingMiddleware{logger, svc}\n}", "title": "" }, { "docid": "6c8fc676d83354a16676440b9ca60521", "score": "0.7333162", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next service.StringService) service.StringService {\n\t\treturn &loggingMiddleware{\n\t\t\tlogger: logger,\n\t\t\tnext: next,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "70111c49f4c53b9a7153418ea2d447ef", "score": "0.722125", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next VentasService) VentasService {\n\t\treturn &loggingMiddleware{logger, next}\n\t}\n\n}", "title": "" }, { "docid": "1b0db818f0d802bc9e9433d437611af5", "score": "0.71617097", "text": "func LoggingMiddleware(next WTSService, logger log.Logger) WTSService {\n\treturn loggingMiddleware{\n\t\tlogger: logger,\n\t\tnext: next,\n\t}\n}", "title": "" }, { "docid": "266ef491971994b8c8c0c5267d84f2e3", "score": "0.71368563", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next BalanceService) BalanceService {\n\t\treturn &loggingMiddleware{logger, next}\n\t}\n\n}", "title": "" }, { "docid": "7833f12ed91f3bef6c44c999ec50ec0f", "score": "0.7039567", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next FaultService) FaultService {\n\t\treturn &loggingMiddleware{logger, next}\n\t}\n\n}", "title": "" }, { "docid": "376ad2e904a3c3a02491226306d74d6c", "score": "0.70105916", "text": "func NewAuthMiddleware(svc interfaces.Service, r interfaces.Repository) interfaces.Service {\n\treturn &authMiddleware{\n\t\tnext: svc,\n\t\trepository: r,\n\t}\n}", "title": "" }, { "docid": "714f8b564acf5194dad831ef86ce5000", "score": "0.69499797", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next NotificationService) NotificationService {\n\t\treturn &loggingMiddleware{logger, next}\n\t}\n\n}", "title": "" }, { "docid": "a6b141ac9bfa0b055753cd38d2a3e300", "score": "0.68789047", "text": "func LoggingMiddleware(next stdhttp.Handler) stdhttp.Handler {\n\treturn LoggingMiddlewareWithOptions(Options{})(next)\n}", "title": "" }, { "docid": "2936bd0cd6a4cbcfbcfa5673b70a2449", "score": "0.67077345", "text": "func LoggingMiddleware(logger Logger) endpoint.Middleware {\n\treturn func(e endpoint.Endpoint) endpoint.Endpoint {\n\t\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t\tlogger.TraceContext(ctx, \"processing request\")\n\n\t\t\tdefer func(begin time.Time) {\n\t\t\t\tlogger.TraceContext(ctx, \"processing request finished\", map[string]interface{}{\n\t\t\t\t\t\"took\": time.Since(begin),\n\t\t\t\t})\n\t\t\t}(time.Now())\n\n\t\t\treturn e(ctx, request)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "856e09837cae2acafd4ffcadeac674ff", "score": "0.66955364", "text": "func LoggingMiddleware(logger log.Logger) Middleware {\n\treturn func(next Calculator) Calculator {\n\t\treturn loggingMiddleware{logger, next}\n\t}\n}", "title": "" }, { "docid": "33b263ec5d3d0be05ea166b2f3684cd0", "score": "0.6659244", "text": "func NewLoggingMiddleware(logger log.Logger) Middleware {\n\treturn &loggingMiddleware{middleware: &middleware{}, logger: logger}\n}", "title": "" }, { "docid": "7ee91d21a772a04cee0f4845ab8721d4", "score": "0.65999776", "text": "func (s *Service) LoggingMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t// Do stuff here\n\t\tl := log.WithFields(log.Fields{\n\t\t\t\"environment\": s.environment,\n\t\t\t\"request-path\": r.RequestURI,\n\t\t\t\"request-method\": r.Method,\n\t\t})\n\t\tl.Infoln()\n\t\t// Call the next handler, which can be another middleware in the chain, or the final handler.\n\t\tm := httpsnoop.CaptureMetrics(next, w, r)\n\t\tl.WithFields(log.Fields{\n\t\t\t\"request-duration\": m.Duration,\n\t\t\t\"response-code\": m.Code,\n\t\t}).Infoln(\"handler response\")\n\t})\n}", "title": "" }, { "docid": "de53536a18fcee596298f26e522c0dba", "score": "0.6511557", "text": "func NewLoggerMiddleware(l *logrus.Logger) Middleware {\n\treturn &loggerMiddleware{logger: l}\n}", "title": "" }, { "docid": "2f8e1427fcca6882780d14b31cc03d51", "score": "0.6481731", "text": "func NewMiddleware(l *zap.Logger) func(next http.Handler) http.Handler {\n\treturn loggerMiddleware{l}.Handler\n}", "title": "" }, { "docid": "25e90456f634333777929c518ca11f98", "score": "0.64112616", "text": "func LogServiceMiddleware(logger log.Logger, store string) ServiceMiddleware {\n\treturn func(next Service) Service {\n\t\tlogger = log.NewContext(logger).With(\n\t\t\t\"service\", \"event\",\n\t\t\t\"store\", store,\n\t\t)\n\n\t\treturn &logService{logger: logger, next: next}\n\t}\n}", "title": "" }, { "docid": "6e05ac11c875381769771490cfafce22", "score": "0.63867146", "text": "func (s *server) loggingMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tctx := r.Context()\n\t\tlogger := logging.GetLogger(ctx)\n\t\trouteName := mux.CurrentRoute(r).GetName()\n\t\tmethod := r.Method\n\t\trequestLogger := logger.With(\n\t\t\tzap.String(\"route\", routeName),\n\t\t\tzap.String(\"method\", method),\n\t\t)\n\n\t\tctx = logging.ContextWithLogger(ctx, requestLogger)\n\t\tctx = context.WithValue(ctx, \"route\", routeName)\n\t\tctx = context.WithValue(ctx, \"method\", method)\n\n\t\tlogging.GetLogger(ctx).Info(\"request started\")\n\t\tnext.ServeHTTP(w, r.WithContext(ctx))\n\t\tlogging.GetLogger(ctx).Info(\"request completed\")\n\t})\n}", "title": "" }, { "docid": "b971f140ff2c2cc1ec91ca6d528f7ff4", "score": "0.63818973", "text": "func LoggerMiddleware(handle func(w http.ResponseWriter, r *http.Request)) func(w http.ResponseWriter, r *http.Request) {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\thandle(w, r)\n\t}\n}", "title": "" }, { "docid": "535d74a8a50d113f7cb2a77b95cb69ff", "score": "0.62465036", "text": "func SetLoggerMiddleware(l *log.Entry) func(stdhttp.Handler) stdhttp.Handler {\n\treturn func(next stdhttp.Handler) stdhttp.Handler {\n\t\treturn stdhttp.HandlerFunc(func(w stdhttp.ResponseWriter, r *stdhttp.Request) {\n\t\t\tctx := r.Context()\n\t\t\tctx = log.Set(ctx, l)\n\t\t\tr = r.WithContext(ctx)\n\t\t\tnext.ServeHTTP(w, r)\n\t\t})\n\t}\n}", "title": "" }, { "docid": "5cf194eee89eedec9d7a6fa9c7d6c706", "score": "0.6238449", "text": "func newLoggingMiddleware(logger log.Logger) linkManagerMiddleware {\n\treturn func(next om.LinkManager) om.LinkManager {\n\t\treturn loggingMiddleware{next, logger}\n\t}\n}", "title": "" }, { "docid": "df51e1247834f0c86278efb1aa72cd2c", "score": "0.62232524", "text": "func AuthMiddleware() *Middleware {\n\tm := make([]*Middleware, 0, 1+len(extraAuthMiddlewares))\n\tm = append(m, RequireAuthMiddleware)\n\tm = append(m, extraAuthMiddlewares...)\n\treturn composeMiddleware(m...)\n}", "title": "" }, { "docid": "55bc4ed8ae818a55eb6ffc4948063bb2", "score": "0.6217549", "text": "func NewAuthMiddleware(l lib.LogI, ctx *lib.Context) *AuthMiddleware {\n\treturn &AuthMiddleware{\n\t\tLog: l,\n\t\tContext: ctx,\n\t}\n}", "title": "" }, { "docid": "b57cf43d309cd67138b2a535ce2aef80", "score": "0.61208946", "text": "func (s *Setup) LoggerMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tstart := time.Now()\n\n\t\ts.logger.Infow(\n\t\t\t\"request_logging\",\n\t\t\t\"method\", r.Method,\n\t\t\t\"url\", r.URL.String(),\n\t\t\t\"agent\", r.UserAgent(),\n\t\t\t\"referer\", r.Referer(),\n\t\t\t\"proto\", r.Proto,\n\t\t\t\"remote_address\", r.RemoteAddr,\n\t\t\t\"latency\", time.Since(start),\n\t\t)\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "title": "" }, { "docid": "c74d31f43559a3e0a2da14dc82a33095", "score": "0.6077583", "text": "func LoggingMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\ttoken := []modal.Token{}\n\n\t\tif _, ok := r.Header[\"Authorization\"]; !ok {\n\t\t\taccessTokenRequired.SendAPI(w, nil)\n\t\t\treturn\n\t\t}\n\n\t\ts := strings.Split(r.Header[\"Authorization\"][0], \" \")\n\t\tdb := GetDBInstance()\n\t\tdefer db.Close()\n\t\terr := db.Select(&token, \"SELECT * FROM tokens WHERE access_token='\"+s[1]+\"'\")\n\t\tif err != nil {\n\t\t\tfmt.Println(\"db error\", err)\n\t\t}\n\n\t\tif len(token) <= 0 {\n\t\t\taccessTokenInvalid.SendAPI(w, nil)\n\t\t\treturn\n\t\t}\n\n\t\tATime := ParseTimestamp(token[0].AccessTokenTime)\n\t\taccessDuration, _ := time.ParseDuration(strconv.Itoa(token[0].AccessTokenExpiry) + \"s\")\n\n\t\tif ATime.Add(accessDuration).Unix() < time.Now().Unix() {\n\t\t\taccessTokenExpired.SendAPI(w, nil)\n\t\t\treturn\n\t\t}\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "title": "" }, { "docid": "f66e5cf1999231ef38e2c93ae8badaab", "score": "0.59948397", "text": "func NewLogger(prefix string, logf func(ctx context.Context, format string, args ...interface{})) datastore.Middleware {\n\treturn &logger{Prefix: prefix, Logf: logf, counter: 1}\n}", "title": "" }, { "docid": "b1877320fa94956419492d4e4431bb32", "score": "0.59808886", "text": "func LoggingMiddleware(next http.Handler) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tstart := time.Now()\n\t\twrapped := wrapResponseWriter(w)\n\t\tnext.ServeHTTP(wrapped, r)\n\t\tLog.Info().Int(\"status\", wrapped.status).Str(\"method\", r.Method).Str(\"path\", r.URL.EscapedPath()).Str(\"duration\", time.Since(start).String()).Msg(\"Request processed\")\n\t})\n}", "title": "" }, { "docid": "9bf4f0b5c95aad909d5e9f573878e62f", "score": "0.59721905", "text": "func AuthMiddleware(repo mongodb.UserRepo) func(http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\ttoken, err := parseToken(r)\n\t\t\tif err != nil {\n\t\t\t\tnext.ServeHTTP(w, r)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tclaims, ok := token.Claims.(jwt.MapClaims)\n\n\t\t\tif !ok || !token.Valid {\n\t\t\t\tnext.ServeHTTP(w, r)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tuser, err := repo.GetUser(claims[\"jti\"].(string))\n\t\t\tif err != nil {\n\t\t\t\tnext.ServeHTTP(w, r)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tctx := context.WithValue(r.Context(), CurrentUserKey, user)\n\n\t\t\tnext.ServeHTTP(w, r.WithContext(ctx))\n\t\t})\n\t}\n}", "title": "" }, { "docid": "aed9fc1a8cff0d2be29ee1c1b49142c7", "score": "0.5971733", "text": "func LogLogger( l *log.Logger ) mux.MiddlewareFunc {\n return FormatLogger( l.Printf )\n}", "title": "" }, { "docid": "7cb7b085dfc2236917133a5f4c43e552", "score": "0.5970652", "text": "func loggerMiddleware() martini.Handler {\n\treturn func(res http.ResponseWriter, req *http.Request, c martini.Context) {\n\t\tstart := time.Now()\n\t\taddr := req.Header.Get(\"X-Real-IP\")\n\t\tif addr == \"\" {\n\t\t\taddr = req.Header.Get(\"X-Forwarded-For\")\n\t\t\tif addr == \"\" {\n\t\t\t\taddr = req.RemoteAddr\n\t\t\t}\n\t\t}\n\t\trw := res.(martini.ResponseWriter)\n\t\tc.Next()\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"method\": req.Method,\n\t\t\t\"path\": req.URL.Path,\n\t\t\t\"addr\": addr,\n\t\t\t\"status\": rw.Status(),\n\t\t\t\"status_text\": http.StatusText(rw.Status()),\n\t\t\t\"duration\": time.Since(start),\n\t\t}).Info(\"Completed\")\n\t}\n}", "title": "" }, { "docid": "0d327c841c62cd26fb7a97fca64bcfbf", "score": "0.5969753", "text": "func Logger(appName string) echo.MiddlewareFunc {\n\treturn LoggerWithConfig(DefaultLoggerConfig(appName))\n}", "title": "" }, { "docid": "0db2451ddc5bbdff12b5328f6b3a21b1", "score": "0.5954002", "text": "func NewMiddleware(router *mux.Router, options ...FuncOption) *Middleware {\n\tm := &Middleware{\n\t\toutput: os.Stdout,\n\t\toptions: Options{\n\t\t\tLogging: true,\n\t\t},\n\t\trouter: router,\n\t}\n\n\tfor _, opt := range options {\n\t\topt(m)\n\t}\n\n\tm.RegisterHook(m.loggingProcessor)\n\treturn m\n}", "title": "" }, { "docid": "0cdd174ac5083fd6ddd6a56a61bcfe82", "score": "0.5953553", "text": "func LoggerMiddleware(logger *logrus.Logger) gin.HandlerFunc {\n\tlogger.SetLevel(log.ErrorLevel)\n\tlogger.SetFormatter(&log.TextFormatter{})\n\n\treturn func(c *gin.Context) {\n\t\t// Start timer\n\t\tstart := time.Now().UTC()\n\n\t\t// log.Printf(\"%s %s %s %s\", c.Request.Method, c.Request.Host, c.Request.RequestURI, c.Request.Proto)\n\t\t// Process Request\n\t\tc.Next()\n\n\t\tif status := c.Writer.Status(); status != 200 {\n\t\t\tentry := logger.WithFields(log.Fields{\n\t\t\t\t\"client_ip\": https.GetClientIP(c),\n\t\t\t\t\"duration\": start,\n\t\t\t\t\"method\": c.Request.Method,\n\t\t\t\t\"path\": c.Request.RequestURI,\n\t\t\t\t\"status\": c.Writer.Status(),\n\t\t\t\t\"referrer\": c.Request.Referer(),\n\t\t\t\t\"request_id\": c.Writer.Header().Get(\"X-Request-Id\"),\n\t\t\t\t\"user_id\": https.GetUserID(c),\n\t\t\t})\n\n\t\t\tentry.Error(c.Errors.String())\n\t\t}\n\t}\n}", "title": "" }, { "docid": "f3e294341e8aebbd616c0015441de36e", "score": "0.59486234", "text": "func loggerMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tstart := time.Now()\n\n\t\tnext.ServeHTTP(w, r)\n\n\t\tlog.Printf(\"[%s]\\t%s\\t%s\", r.Method, r.URL.String(), time.Since(start))\n\t})\n}", "title": "" }, { "docid": "6312aaa6855da670e36866a4ed3c8726", "score": "0.59117347", "text": "func makeLogger(basePath string) middlewareFunc {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tlog := structlog.FromContext(r.Context(), nil)\n\t\t\tlog.SetDefaultKeyvals(\n\t\t\t\tdef.LogRemote, r.RemoteAddr,\n\t\t\t\tdef.LogHTTPStatus, \"\",\n\t\t\t\tdef.LogHTTPMethod, r.Method,\n\t\t\t\tdef.LogFunc, path.Join(\"/\", strings.TrimPrefix(r.URL.Path, basePath)),\n\t\t\t)\n\t\t\tr = r.WithContext(structlog.NewContext(r.Context(), log))\n\n\t\t\tnext.ServeHTTP(w, r)\n\t\t})\n\t}\n}", "title": "" }, { "docid": "79f9edcf72abd7f26da68a905f08a6e8", "score": "0.5898414", "text": "func (l *Logger) Middleware(next http.Handler) http.Handler {\n\treturn httpHandler{\n\t\tlogger: l,\n\t\tnext: next,\n\t}\n}", "title": "" }, { "docid": "86b6bf3a77930eae3809abec148c1299", "score": "0.5840835", "text": "func Logging() Middleware {\n\n\t// Create a new Middleware\n\treturn func(f http.HandlerFunc) http.HandlerFunc {\n\n\t\t// Define the http.HandlerFunc\n\t\treturn func(w http.ResponseWriter, r *http.Request) {\n\n\t\t\t// Do middleware things\n\t\t\tstart := time.Now()\n\t\t\tdefer func() { log.Println(r.URL.Path, time.Since(start)) }()\n\n\t\t\t// Call the next middleware/handler in chain\n\t\t\tf(w, r)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "96835393a28e91b6bb225721c1cccdf8", "score": "0.58328223", "text": "func Logger(log logger.Logger) func(next http.Handler) http.Handler {\n\treturn chilogger{\n\t\tlog,\n\t}.middleware\n}", "title": "" }, { "docid": "a55926454ec60300362a8dfe71910471", "score": "0.5792838", "text": "func Middleware() func(next echo.HandlerFunc) echo.HandlerFunc {\n\tl := logger.New()\n\n\treturn func(next echo.HandlerFunc) echo.HandlerFunc {\n\t\treturn func(c echo.Context) error {\n\t\t\t// Record the time start time of the middleware invocation\n\t\t\tt1 := time.Now()\n\n\t\t\t// Generate a new UUID that will be used to recognize this particular\n\t\t\t// request\n\t\t\tid, err := uuid.NewV4()\n\t\t\tif err != nil {\n\t\t\t\treturn errors.WithStack(err)\n\t\t\t}\n\n\t\t\t// Create a child logger with the unique UUID created and attach it to\n\t\t\t// the echo.Context. By attaching it to the context it can be fetched by\n\t\t\t// later middleware or handler functions to emit events with a logger\n\t\t\t// that contains this ID. This is useful as it allows us to emit all\n\t\t\t// events with the same request UUID.\n\t\t\tlog := l.ID(id.String())\n\t\t\tc.Set(key, log)\n\n\t\t\t// Execute the next middleware/handler function in the stack.\n\t\t\tif err := next(c); err != nil {\n\t\t\t\tc.Error(err)\n\t\t\t}\n\n\t\t\t// We have now succeeded executing all later middlewares in the stack and\n\t\t\t// have come back to the logger middleware. Record the time at which we\n\t\t\t// came back to this middleware. We can use the difference between t2 and\n\t\t\t// t1 to calculate the request duration.\n\t\t\tt2 := time.Now()\n\n\t\t\t// Get the request IP address.\n\t\t\tvar ipAddress string\n\t\t\tif xff := c.Request().Header.Get(\"x-forwarded-for\"); xff != \"\" {\n\t\t\t\tsplit := strings.Split(xff, \",\")\n\t\t\t\tipAddress = strings.TrimSpace(split[len(split)-1])\n\t\t\t} else {\n\t\t\t\tipAddress = c.Request().RemoteAddr\n\t\t\t}\n\n\t\t\t// Emit a log event with as much metadata as we can.\n\t\t\tlog.Root(logger.Data{\n\t\t\t\t\"status_code\": c.Response().Status,\n\t\t\t\t\"method\": c.Request().Method,\n\t\t\t\t\"path\": c.Request().URL.Path,\n\t\t\t\t\"route\": c.Path(),\n\t\t\t\t\"response_time\": t2.Sub(t1).Seconds() * 1000,\n\t\t\t\t\"referer\": c.Request().Referer(),\n\t\t\t\t\"user_agent\": c.Request().UserAgent(),\n\t\t\t\t\"ip_address\": ipAddress,\n\t\t\t\t\"trace_id\": c.Request().Header.Get(\"x-amzn-trace-id\"),\n\t\t\t}).Info(\"request handled\")\n\n\t\t\t// Succeeded executing the middleware invocation. A nil response\n\t\t\t// represents no errors happened.\n\t\t\treturn nil\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ffe2ec800ec1a98f0f359639a062c06d", "score": "0.57814723", "text": "func attachLogger(next http.HandlerFunc, logger log.Logger) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tctx := log.Context(r.Context(), logger)\n\t\tr = r.WithContext(ctx)\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "title": "" }, { "docid": "45760a1c230220310e26c8c7f2d446f9", "score": "0.5777517", "text": "func (service *middlewareProviderService) CreateLoggingMiddleware(endpointName string) endpoint.Middleware {\n\treturn func(next endpoint.Endpoint) endpoint.Endpoint {\n\t\treturn func(ctx context.Context, request interface{}) (response interface{}, err error) {\n\t\t\tdefer func(begin time.Time) {\n\t\t\t\tend := time.Now()\n\t\t\t\tduration := time.Since(begin)\n\t\t\t\tfields := []zap.Field{\n\t\t\t\t\tzap.String(\"endpoint_name\", endpointName),\n\t\t\t\t\tzap.String(\"start_time\", begin.Format(service.dateTimeFormat)),\n\t\t\t\t\tzap.String(\"end_time\", end.Format(service.dateTimeFormat)),\n\t\t\t\t\tzap.String(\"duration\", duration.String()),\n\t\t\t\t}\n\n\t\t\t\tif service.logMessageDetails {\n\t\t\t\t\tfields = append(fields, zap.Any(\"request\", request), zap.Any(\"response\", response))\n\t\t\t\t}\n\n\t\t\t\tif err == nil {\n\t\t\t\t\tservice.logger.Info(\n\t\t\t\t\t\t\"Endpoint call succeeded\",\n\t\t\t\t\t\tfields...)\n\t\t\t\t} else {\n\t\t\t\t\tfields = append(fields, zap.Error(err))\n\t\t\t\t\tservice.logger.Error(\n\t\t\t\t\t\t\"Endpoint call failed\",\n\t\t\t\t\t\tfields...)\n\t\t\t\t}\n\t\t\t}(time.Now())\n\n\t\t\treturn next(ctx, request)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "f795080569d8519ab0325cb31b82a82a", "score": "0.57666665", "text": "func NewAuthMiddlewareHandler(handler http.Handler, auth influxdb.Authorizer) http.Handler {\n\treturn &authMiddlewareHandler{\n\t\thandler: handler,\n\t\tauth: auth,\n\t}\n}", "title": "" }, { "docid": "ed389d8beb34795595cffbdb13cb5ffd", "score": "0.57357633", "text": "func InjectRequestLogger(l Logger) Middleware {\n\treturn func(inner HandlerFunc) HandlerFunc {\n\t\treturn func(rw http.ResponseWriter, req *http.Request) error {\n\t\t\tif l != nil {\n\t\t\t\treq = Inject(req, ctxKeyLogger, l)\n\t\t\t}\n\n\t\t\treturn inner(rw, req)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "5b983269e5bc22c6ec7a290d316fcb66", "score": "0.5727905", "text": "func (mw loggingMiddleware) Logger() log.Logger {\n\treturn mw.logger\n}", "title": "" }, { "docid": "d7c010fe97dcc18392e69ab593bb19d9", "score": "0.5722908", "text": "func AuthMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tuser, _ := UserFromHTTPRequestgo(r)\n\t\tctx := r.Context()\n\n\t\t// put it in context\n\t\tctx = context.WithValue(ctx, ut.UserCtxKey, &user)\n\t\tr = r.WithContext(ctx)\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "title": "" }, { "docid": "823b6fdadba08476e7c1abb55da89aa7", "score": "0.57195073", "text": "func LoggingMiddleware(log *logrus.Logger) func(http.Handler) http.Handler {\n\treturn func(handler http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tr = r.WithContext(context.New(r.Context()))\n\n\t\t\t// https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#HttpRequest\n\t\t\trequest := &HTTPRequest{\n\t\t\t\tRequestMethod: r.Method,\n\t\t\t\tRequestURL: r.RequestURI,\n\t\t\t\tRemoteIP: r.RemoteAddr,\n\t\t\t\tReferer: r.Referer(),\n\t\t\t\tUserAgent: r.UserAgent(),\n\t\t\t\tRequestSize: strconv.FormatInt(r.ContentLength, 10),\n\t\t\t}\n\n\t\t\tm := httpsnoop.CaptureMetrics(handler, w, r)\n\n\t\t\trequest.Status = strconv.Itoa(m.Code)\n\t\t\trequest.Latency = fmt.Sprintf(\"%.9fs\", m.Duration.Seconds())\n\t\t\trequest.ResponseSize = strconv.FormatInt(m.Written, 10)\n\n\t\t\tfields := logrus.Fields{\"httpRequest\": request}\n\n\t\t\t// No idea if this works\n\t\t\ttraceHeader := r.Header.Get(\"X-Cloud-Trace-Context\")\n\t\t\tif traceHeader != \"\" {\n\t\t\t\tfields[\"trace\"] = traceHeader\n\t\t\t}\n\n\t\t\tlog.WithFields(fields).Info(\"Completed request\")\n\t\t})\n\t}\n}", "title": "" }, { "docid": "0d5536e6d42aa23b8d47de35a51e2e3b", "score": "0.56950545", "text": "func LoggingMiddleware() Middleware {\n\treturn func(h http.Handler) http.Handler {\n\n\t\tfn := func(w http.ResponseWriter, r *http.Request) {\n\t\t\tfmt.Println(\"...Before LoggingMiddleware\")\n\t\t\tstart := time.Now().UnixNano()\n\t\t\th.ServeHTTP(w, r)\n\t\t\tend := time.Now().UnixNano()\n\t\t\tfmt.Printf(\"start-time(ns): %d end-time(ns): %d duration(ns):%d \\\"%s %s\\\"\\n\", start, end,(end-start),r.Method, r.RequestURI)\n\t\t\tfmt.Println(\"...After LoggingMiddleware\")\n\t\t}\n\n\t\treturn http.HandlerFunc(fn)\n\t}\n}", "title": "" }, { "docid": "33a7f9fd67358f01d8f068fa16540cb3", "score": "0.56879157", "text": "func AuthenticationMiddleware(repo *storage.Repositories) echo.MiddlewareFunc {\n\treturn func(next echo.HandlerFunc) echo.HandlerFunc {\n\t\treturn func(c echo.Context) error {\n\t\t\tuser, err := auth.CheckAuth(repo, c)\n\t\t\tif err != nil {\n\t\t\t\tctx := graph.AddAnonymousUserToContext(c.Request().Context())\n\t\t\t\tc.SetRequest(c.Request().WithContext(ctx))\n\t\t\t\treturn next(c)\n\t\t\t}\n\t\t\tctx := graph.AddUserToContext(c.Request().Context(), user)\n\t\t\tc.SetRequest(c.Request().WithContext(ctx))\n\t\t\treturn next(c)\n\n\t\t}\n\t}\n}", "title": "" }, { "docid": "62499c11527ff4eccab4f199ac89c3a5", "score": "0.5682924", "text": "func GorillaLogger(w io.Writer) middleware.MiddleWare {\r\n\treturn func(next http.Handler) http.Handler {\r\n\t\treturn handlers.LoggingHandler(w, next)\r\n\t}\r\n}", "title": "" }, { "docid": "f0617c18388c9a1c39a6aff76aff4bc9", "score": "0.5676028", "text": "func Logger() echo.MiddlewareFunc {\n\treturn func(next echo.HandlerFunc) echo.HandlerFunc {\n\t\treturn func(c echo.Context) error {\n\t\t\tstart := time.Now()\n\t\t\tvar err error\n\t\t\tif err = next(c); err != nil {\n\t\t\t\tc.Error(err)\n\t\t\t}\n\t\t\tstop := time.Now()\n\n\t\t\treq := c.Request()\n\t\t\tres := c.Response()\n\t\t\treqSize := req.Header.Get(echo.HeaderContentLength)\n\t\t\tif reqSize == \"\" {\n\t\t\t\treqSize = \"0\"\n\t\t\t}\n\n\t\t\tfields := log.Fields{\n\t\t\t\t\"path\": req.RequestURI,\n\t\t\t\t\"method\": req.Method,\n\t\t\t\t\"status\": res.Status,\n\t\t\t\t\"request_size\": reqSize,\n\t\t\t\t\"response_size\": res.Size,\n\t\t\t\t\"duration\": stop.Sub(start).String(),\n\t\t\t\t\"error\": err,\n\t\t\t}\n\n\t\t\tif err == nil {\n\t\t\t\tfields[\"error\"] = \"\"\n\t\t\t}\n\t\t\tlogger := c.Get(\"logger\").(*log.Entry)\n\t\t\tlogger.WithFields(fields).Info(\"request\")\n\n\t\t\treturn err\n\t\t}\n\t}\n}", "title": "" }, { "docid": "c6bbb03ec727fc97ba89434d86850cc6", "score": "0.56691915", "text": "func Logger(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tlog.Infof(\"logging: request middleware\")\n\t\tnext.ServeHTTP(w, r)\n\t\tlog.Infof(\"logging: response middleware\")\n\t})\n}", "title": "" }, { "docid": "35cb9dc83462fbe0ed528a1f769891b3", "score": "0.5646347", "text": "func NewWithLogger(l *logrus.Logger) echo.MiddlewareFunc {\n\treturn func(next echo.HandlerFunc) echo.HandlerFunc {\n\t\treturn func(c *echo.Context) error {\n\t\t\tstart := time.Now()\n\t\t\tisError := false\n\n\t\t\tif err := next(c); err != nil {\n\t\t\t\tc.Error(err)\n\t\t\t\tisError = true\n\t\t\t}\n\n\t\t\tlatency := time.Since(start)\n\n\t\t\tentry := l.WithFields(logrus.Fields{\n\t\t\t\t\"request\": c.Request().RequestURI,\n\t\t\t\t\"method\": c.Request().Method,\n\t\t\t\t\"remote\": c.Request().RemoteAddr,\n\t\t\t\t\"status\": c.Response().Status(),\n\t\t\t\t\"text_status\": http.StatusText(c.Response().Status()),\n\t\t\t\t\"took\": latency,\n\t\t\t})\n\n\t\t\tif reqID := c.Request().Header.Get(\"X-Request-Id\"); reqID != \"\" {\n\t\t\t\tentry = entry.WithField(\"request_id\", reqID)\n\t\t\t}\n\t\t\t// Check middleware error\n\t\t\tif isError {\n\t\t\t\tentry.Error(\"error by handling request\")\n\t\t\t} else {\n\t\t\t\tentry.Info(\"request has been successfully processed\")\n\t\t\t}\n\n\t\t\treturn nil\n\t\t}\n\t}\n}", "title": "" }, { "docid": "cffed758a6836bb871eccccaa0febe14", "score": "0.5635498", "text": "func getLoggerMiddleware(c *cli.Context) (*loggerMw.Logger, error) {\n\tvar logger *loggerMw.Logger\n\tvar w io.Writer\n\tif c.IsSet(\"log-file\") {\n\t\tfw, err := os.Create(c.String(\"log-file\"))\n\t\tif err != nil {\n\t\t\treturn logger,\n\t\t\t\tfmt.Errorf(\"could not open log file %s %s\", c.String(\"log-file\"), err)\n\t\t}\n\t\tw = io.MultiWriter(fw, os.Stderr)\n\t} else {\n\t\tw = os.Stderr\n\t}\n\tif c.String(\"log-format\") == \"json\" {\n\t\tlogger = loggerMw.NewJSONFileLogger(w)\n\t} else {\n\t\tlogger = loggerMw.NewFileLogger(w)\n\t}\n\treturn logger, nil\n}", "title": "" }, { "docid": "42c2ee52234300611b857bc5a01512a4", "score": "0.5632144", "text": "func LoggingMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tlog.Infof(\"request [url = %v] [method = %v], [remote = %v], [protocol = %v]\",\n\t\t\tr.RequestURI,\n\t\t\tr.Method,\n\t\t\tr.RemoteAddr,\n\t\t\tr.Proto)\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "title": "" }, { "docid": "ee30ac069875ac1b50eab47758e42f14", "score": "0.56304985", "text": "func withLogger(logger Logger) func(next http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\tfn := func(w http.ResponseWriter, r *http.Request) {\n\n\t\t\t// Capture our own copy of the logger so change in this closure\n\t\t\t// won't affect the object passed-in.\n\n\t\t\tlogger := logger\n\n\t\t\tif reqID := middleware.GetReqID(r.Context()); reqID != \"\" {\n\t\t\t\tlogger = logger.With(\"HTTP Request ID\", reqID)\n\t\t\t}\n\n\t\t\t// Defer a function to log and entry once the main handler\n\t\t\t// has returned.\n\n\t\t\tww := middleware.NewWrapResponseWriter(w, r.ProtoMajor)\n\t\t\tt1 := time.Now()\n\n\t\t\tdefer func() {\n\t\t\t\tscheme := \"http\"\n\t\t\t\tif r.TLS != nil {\n\t\t\t\t\tscheme = \"https\"\n\t\t\t\t}\n\n\t\t\t\tlogger.Infow(\"HTTP request\",\n\t\t\t\t\t\"Method\", r.Method,\n\t\t\t\t\t\"URI\", fmt.Sprintf(\"%s://%s%s\", scheme, r.Host, r.RequestURI),\n\t\t\t\t\t\"Protocol\", r.Proto,\n\t\t\t\t\t\"Remote Address\", r.RemoteAddr,\n\t\t\t\t\t\"Status\", ww.Status(),\n\t\t\t\t\t\"Bytes Written\", ww.BytesWritten(),\n\t\t\t\t\t\"Time Taken\", time.Since(t1),\n\t\t\t\t)\n\t\t\t}()\n\n\t\t\tctx := context.WithValue(r.Context(), ctxKeyLogger, logger)\n\t\t\tnext.ServeHTTP(ww, r.WithContext(ctx))\n\t\t}\n\t\treturn http.HandlerFunc(fn)\n\t}\n}", "title": "" }, { "docid": "af9131094acb1830c55e1f425b726c5f", "score": "0.56277794", "text": "func (logger Logger) EchoLogger() echo.MiddlewareFunc {\n\treturn logger.loggerWithConfig(LoggerConfig{\n\t\tSkipper: middleware.DefaultSkipper,\n\t})\n}", "title": "" }, { "docid": "506f038af2563714ffb56151b5ef6e2b", "score": "0.56202203", "text": "func (m *LoggerMiddleware) ServeHTTPMiddleware(rw http.ResponseWriter, req *http.Request, next func(rw http.ResponseWriter, req *http.Request)) {\n\n\t// inject the log into the context along with some info\n\tentry := m.baseEntry.WithField(\"id\", uuid.NewV4())\n\n\treq = req.WithContext(context.WithValue(req.Context(), logCtxKey, entry))\n\n\tnext(rw, req)\n}", "title": "" }, { "docid": "4540b8917fc9b05f8a21ac148ee38be9", "score": "0.5613119", "text": "func NewLogger() func(next http.Handler) http.Handler {\n\treturn middleware.RequestLogger(&Logger{})\n}", "title": "" }, { "docid": "ec941e628f9d047014a58744ec00f444", "score": "0.5610343", "text": "func SetMiddleWareLogger(next http.HandlerFunc) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\t// fmt.Println(\"\")\n\t\tlog.Printf(\"%s %s%s %s\", r.Method, r.Host, r.RequestURI, r.Proto)\n\t\tnext(w, r)\n\t}\n}", "title": "" }, { "docid": "c0e4f5019cce918eebbd1c2cf2b4377f", "score": "0.5606873", "text": "func authMiddleware(authService auth.Service, userService user.Service) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tauthHeader := c.GetHeader(\"Authorization\")\n\n\t\tif !strings.Contains(authHeader, \"Bearer\") {\n\t\t\tresponse := helper.APIResponse(\"Unauthorized\", http.StatusUnauthorized, \"error\", nil)\n\t\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, response)\n\t\t\treturn\n\t\t}\n\n\t\t// Bearer tokentokentoken\n\t\ttokenString := \"\"\n\t\tarrayToken := strings.Split(authHeader, \" \")\n\t\tif len(arrayToken) == 2 {\n\t\t\ttokenString = arrayToken[1]\n\t\t}\n\n\t\ttoken, err := authService.ValidateToken(tokenString)\n\t\tif err != nil {\n\t\t\tresponse := helper.APIResponse(\"Unauthorized\", http.StatusUnauthorized, \"error\", nil)\n\t\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, response)\n\t\t\treturn\n\t\t}\n\n\t\tclaim, ok := token.Claims.(jwt.MapClaims)\n\t\tif !ok || !token.Valid {\n\t\t\tresponse := helper.APIResponse(\"Unauthorized\", http.StatusUnauthorized, \"error\", nil)\n\t\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, response)\n\t\t\treturn\n\t\t}\n\n\t\tuserID := int(claim[\"user_id\"].(float64))\n\n\t\tuser, err := userService.GetUserById(userID)\n\t\tif err != nil {\n\t\t\tresponse := helper.APIResponse(\"Unauthorized\", http.StatusUnauthorized, \"error\", nil)\n\t\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, response)\n\t\t\treturn\n\t\t}\n\n\t\tc.Set(\"currentUser\", user)\n\t}\n}", "title": "" }, { "docid": "405a3d83e93a4a8b030b63041430ffa7", "score": "0.56029016", "text": "func NewWithLogger(l *logrus.Logger) echo.MiddlewareFunc {\n\treturn func(next echo.HandlerFunc) echo.HandlerFunc {\n\t\treturn func(c echo.Context) error {\n\t\t\tstart := time.Now()\n\t\t\tisError := false\n\n\t\t\tbody := c.Request().Body\n\t\t\tdataIn, _ := ioutil.ReadAll(body)\n\t\t\tif len(dataIn) > 0 {\n\t\t\t\tc.Request().Body = ioutil.NopCloser(bytes.NewReader(dataIn))\n\t\t\t}\n\n\t\t\tif err := next(c); err != nil {\n\t\t\t\tc.Error(err)\n\t\t\t\tisError = true\n\t\t\t}\n\n\t\t\tlatency := time.Since(start)\n\n\t\t\thost, _ := os.Hostname()\n\n\t\t\t// TODO 完善access日志的field\n\t\t\tentry := l.WithFields(logrus.Fields{\n\t\t\t\t\"type\": \"access\",\n\t\t\t\t\"server\": host,\n\t\t\t\t\"method\": c.Request().Method,\n\t\t\t\t\"ip\": c.Request().RemoteAddr,\n\t\t\t\t\"status\": c.Response().Status,\n\t\t\t\t\"latency\": latency.Nanoseconds() / int64(time.Millisecond),\n\t\t\t\t\"body\": string(dataIn),\n\t\t\t\t\"route\": c.Path(),\n\t\t\t})\n\n\t\t\tif c.Response().Status != http.StatusNotFound {\n\t\t\t\tentry = entry.WithField(\"url\", c.Request().URL)\n\t\t\t} else {\n\t\t\t\tentry = entry.WithField(\"illegalURL\", c.Request().URL)\n\t\t\t}\n\n\t\t\tif reqID := c.Request().Header.Get(\"X-Request-Id\"); reqID != \"\" {\n\t\t\t\tentry = entry.WithField(\"request_id\", reqID)\n\t\t\t}\n\n\t\t\t// Check middleware error\n\t\t\tif isError {\n\t\t\t\tentry.Error(\"error by handling request\")\n\t\t\t} else {\n\t\t\t\tentry.Info(\"completed handling request\")\n\t\t\t}\n\n\t\t\treturn nil\n\t\t}\n\t}\n}", "title": "" }, { "docid": "1f446b00097ed12dcb1a59de83aaa2eb", "score": "0.5598595", "text": "func LoggerMiddleware(skippers ...SkipperFunc) gin.HandlerFunc {\n\treturn func (c *gin.Context) {\n\t\tif skipHandler(c, skippers...) {\n\t\t\tc.Next()\n\t\t\treturn\n\t\t}\n\n\t\tmethod := c.Request.Method\n\n\t\tfields := make(logger.Fields)\n\n\t\tfields[\"ip\"] = c.ClientIP()\n\t\tfields[\"method\"] = method\n\t\tfields[\"url\"] = c.Request.URL.String()\n\t\tfields[\"proto\"] = c.Request.Proto\n\n\t\tif method == http.MethodPost || method == http.MethodPut {\n\t\t\tmediaType, _, _ := mime.ParseMediaType(c.GetHeader(\"Content-Type\"))\n\t\t\tif mediaType == \"application/json\" {\n\t\t\t\tbody, err := ioutil.ReadAll(c.Request.Body)\n\t\t\t\tc.Request.Body.Close()\n\t\t\t\tif err == nil {\n\t\t\t\t\tbuf := bytes.NewBuffer(body)\n\t\t\t\t\tc.Request.Body = ioutil.NopCloser(buf)\n\t\t\t\t\tfields[\"request_body\"] = string(body)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tstart := time.Now()\n\t\tc.Next()\n\t\ttimeConsuming := time.Since(start).Nanoseconds() / 1e6\n\t\tfields[\"time_consuming(ms)\"] = timeConsuming\n\n\t\tfields[\"res_status\"] = c.Writer.Status()\n\t\tif id := ginhelper.GetUserID(c); id != \"\" {\n\t\t\tfields[\"user_id\"] = id\n\t\t}\n\t\tif r := ginhelper.GetResponseBody(c); r != \"\" {\n\t\t\tfields[\"response_body\"] = r\n\t\t}\n\n\t\tlogger.InfoWithFields(\"API Log\", fields)\n\t}\n}", "title": "" }, { "docid": "920a7290ea41156b43c426652f8ec512", "score": "0.5597918", "text": "func NewMiddleware(gcpProjectID string, logger *zap.Logger) func(next http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tt := newTraceFromTraceContext(gcpProjectID, r.Header.Get(\"X-Cloud-Trace-Context\"))\n\t\t\tzapFields := append(\n\t\t\t\tzapdriver.TraceContext(t.TraceID, t.SpanID, true, t.ProjectID),\n\t\t\t\tzap.String(\"ip\", r.Header.Get(\"X-Forwarded-For\")),\n\t\t\t)\n\t\t\tctx := ctxzap.ToContext(r.Context(), logger.With(zapFields...))\n\t\t\tnext.ServeHTTP(w, r.WithContext(ctx))\n\t\t})\n\t}\n}", "title": "" }, { "docid": "10821997e032b6a0ab110f7672456ca4", "score": "0.558306", "text": "func (env *Env) AuthMiddleware(next http.Handler) http.Handler {\n\treturn jwtauth.Verifier(jwtAuth)(extractUser(env.userRepository)(next))\n}", "title": "" }, { "docid": "9f3892492e16a71f5759110c06270125", "score": "0.5576301", "text": "func LoggingEndpointMiddleware(logger log.Logger) endpoint.Middleware {\n\treturn func(next endpoint.Endpoint) endpoint.Endpoint {\n\t\treturn func(ctx context.Context, request interface{}) (response interface{}, err error) {\n\n\t\t\tdefer func(begin time.Time) {\n\t\t\t\tlogger.Log(\"transport_error\", err, \"took\", time.Since(begin))\n\t\t\t}(time.Now())\n\t\t\treturn next(ctx, request)\n\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ad20760c7f154b21a9ac6f6864b55616", "score": "0.5570976", "text": "func SetMiddlewareLogger(next http.HandlerFunc) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tlog.Printf(\"\\n%s %s%s %s\", r.Method, r.Host, r.RequestURI, r.Proto)\n\t\tnext(w, r)\n\t}\n}", "title": "" }, { "docid": "a573e237ac55325ca08f2e7a025e3766", "score": "0.556968", "text": "func LoggerMiddleware(c *web.C, h http.Handler) http.Handler {\n\tfn := func(w http.ResponseWriter, r *http.Request) {\n\t\tctx := gctx.FromC(*c)\n\t\tmw := mutil.WrapWriter(w)\n\n\t\tlogger := log.WithField(\"req\", middleware.GetReqID(*c))\n\n\t\tctx = log.Set(ctx, logger)\n\t\tgctx.Set(c, ctx)\n\n\t\tlogStartOfRequest(ctx, r)\n\n\t\tthen := time.Now()\n\t\th.ServeHTTP(mw, r)\n\t\tduration := time.Now().Sub(then)\n\t\t// Checking `Accept` header from user request because if the streaming connection\n\t\t// is reset before sending the first event no Content-Type header is sent in a response.\n\t\tacceptHeader := r.Header.Get(\"Accept\")\n\t\tstreaming := strings.Contains(acceptHeader, render.MimeEventStream)\n\t\tlogEndOfRequest(ctx, r, duration, mw, streaming)\n\t}\n\n\treturn http.HandlerFunc(fn)\n}", "title": "" }, { "docid": "72c9fa8a0433f5d376a2680be67c8bdb", "score": "0.5561141", "text": "func LogMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(responseWriter http.ResponseWriter, request *http.Request) {\n\t\tTemplateInput := getNewTemplateInput(responseWriter, request)\n\t\tlogging.WriteLog(logging.LogLevelVerbose, \"loggingmiddleware/LogMiddleware\", TemplateInput.UserInformation.GetCompositeID(), logging.ResultInfo, []string{request.RequestURI})\n\t\t//Save template input to context\n\t\trequest = request.WithContext(context.WithValue(request.Context(), TemplateInputKeyID, TemplateInput))\n\t\tnext.ServeHTTP(responseWriter, request) // call ServeHTTP on the original handler\n\t})\n}", "title": "" }, { "docid": "f334ebda0a5a2037c2328727c91a2d11", "score": "0.55601096", "text": "func authMiddleware(next echo.HandlerFunc) echo.HandlerFunc {\n\treturn func(c echo.Context) error {\n\t\tcookie, err := c.Cookie(model.CookieName)\n\t\tif err == nil {\n\t\t\tlogin := authcookie.Login(cookie.Value, []byte(model.SECRET))\n\t\t\tif login == \"\" {\n\t\t\t\treturn c.Redirect(http.StatusTemporaryRedirect, \"/login\")\n\t\t\t}\n\t\t\tc.Request().Header.Set(model.HEADER_AUTH_USER_ID, login)\n\t\t\tusername := strings.Split(login, \"@\")[0]\n\t\t\tdbName := username\n\t\t\terr := db.ConnectDB(dbName)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatalf(\"cannot initialize db: %v\", err)\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn next(c)\n\t\t}\n\t\tlog.Println(err)\n\t\treturn c.Redirect(http.StatusTemporaryRedirect, \"/login\")\n\t}\n}", "title": "" }, { "docid": "d1e8167a947c671273a0d79f142e8eb3", "score": "0.5559113", "text": "func NewAccessLogMiddleware(logger *log.Logger) context.Handler {\n\treturn func(ctx iris.Context) {\n\t\tctx.Next()\n\t\tif logger != nil {\n\t\t\tl := MakeAccessLog(ctx, time.Now())\n\t\t\tlogger.Info(l)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "609545da8053da271d37cb6fb4eb0fcf", "score": "0.555811", "text": "func LoggerMiddleware() echo.MiddlewareFunc {\n\treturn func(next echo.HandlerFunc) echo.HandlerFunc {\n\t\treturn func(c echo.Context) (err error) {\n\n\t\t\tif logRequests {\n\n\t\t\t\treq := c.Request()\n\t\t\t\tres := c.Response()\n\t\t\t\tstart := time.Now()\n\t\t\t\tif err := next(c); err != nil {\n\t\t\t\t\tc.Error(err)\n\t\t\t\t}\n\t\t\t\tstop := time.Now()\n\n\t\t\t\tp := req.URL.Path\n\t\t\t\tif p == \"\" {\n\t\t\t\t\tp = \"/\"\n\t\t\t\t}\n\n\t\t\t\tbytesIn := req.Header.Get(echo.HeaderContentLength)\n\t\t\t\tif bytesIn == \"\" {\n\t\t\t\t\tbytesIn = \"0\"\n\t\t\t\t}\n\n\t\t\t\tlogContext := logrus.WithFields(map[string]interface{}{\n\t\t\t\t\t\"time_rfc3339\": time.Now().Format(time.RFC3339),\n\t\t\t\t\t\"remote_ip\": c.RealIP(),\n\t\t\t\t\t\"host\": req.Host,\n\t\t\t\t\t\"uri\": req.RequestURI,\n\t\t\t\t\t\"method\": req.Method,\n\t\t\t\t\t\"path\": p,\n\t\t\t\t\t\"referer\": req.Referer(),\n\t\t\t\t\t\"user_agent\": req.UserAgent(),\n\t\t\t\t\t\"status\": res.Status,\n\t\t\t\t\t\"latency\": strconv.FormatInt(stop.Sub(start).Nanoseconds()/1000, 10),\n\t\t\t\t\t\"latency_human\": stop.Sub(start).String(),\n\t\t\t\t\t\"bytes_in\": bytesIn,\n\t\t\t\t\t\"bytes_out\": strconv.FormatInt(res.Size, 10),\n\t\t\t\t})\n\n\t\t\t\tmsg := fmt.Sprintf(\"%s %s [ %d ]\", req.Method, p, res.Status)\n\t\t\t\tif res.Status > 499 {\n\t\t\t\t\tlogContext.Error(msg)\n\t\t\t\t} else if res.Status > 399 {\n\t\t\t\t\tlogContext.Warn(msg)\n\t\t\t\t} else {\n\t\t\t\t\tlogContext.Info(msg)\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t}\n\t}\n}", "title": "" }, { "docid": "fbe2d293102e0060d2c9309d4c6c79f1", "score": "0.55430996", "text": "func AuthMiddleware() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tauthHeader := c.GetHeader(\"authorization\")\n\t\tif authHeader == \"\" || len(authHeader) < len(\"Token\")+1 {\n\t\t\trestErr := resterror.NewUnAuthorizedError()\n\t\t\tc.JSON(restErr.StatusCode, restErr)\n\t\t\tc.Abort()\n\t\t\treturn\n\t\t}\n\n\t\ttoken := authHeader[len(\"Token \"):]\n\n\t\tauthService := services.JWTAuthService()\n\t\tresult, err := authService.ValidateToken(token)\n\t\tif err != nil || !result.Valid {\n\t\t\trestErr := resterror.NewUnAuthorizedError()\n\t\t\tc.JSON(restErr.StatusCode, restErr)\n\t\t\tc.Abort()\n\t\t\treturn\n\t\t}\n\n\t\tclaims := result.Claims.(jwt.MapClaims)\n\t\tc.Set(\"user_id\", claims[\"user_id\"])\n\t\tc.Set(\"is_admin\", claims[\"is_admin\"])\n\n\t\tc.Next()\n\t}\n}", "title": "" }, { "docid": "ac13a78abded23484e66831aa8c04999", "score": "0.5534792", "text": "func Middleware(l Logger) func(http.ResponseWriter, *http.Request, http.HandlerFunc) {\n\treturn func(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) {\n\t\tdefer func() {\n\t\t\terr := recover()\n\t\t\tif err == nil {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tswitch assert := err.(type) {\n\t\t\tcase AssertError:\n\t\t\t\tif assert.statusCode == http.StatusInternalServerError && l != nil {\n\t\t\t\t\tl.Printf(\"PANIC: %s\\n%s\", assert.Error(), assert.stack())\n\t\t\t\t\thttp.Error(rw, http.StatusText(assert.statusCode), assert.statusCode)\n\t\t\t\t} else {\n\t\t\t\t\thttp.Error(rw, assert.Error(), assert.statusCode)\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t}()\n\n\t\tnext(rw, r)\n\t}\n}", "title": "" }, { "docid": "061eab71d1a665b0b99bdf18ab056e4c", "score": "0.5513051", "text": "func LoggingMiddlewareWithOptions(options Options) func(stdhttp.Handler) stdhttp.Handler {\n\treturn func(next stdhttp.Handler) stdhttp.Handler {\n\t\treturn stdhttp.HandlerFunc(func(w stdhttp.ResponseWriter, r *stdhttp.Request) {\n\t\t\tmw := mutil.WrapWriter(w)\n\t\t\tctx := log.PushContext(r.Context(), func(l *log.Entry) *log.Entry {\n\t\t\t\treturn l.WithFields(log.F{\n\t\t\t\t\t\"req\": middleware.GetReqID(r.Context()),\n\t\t\t\t})\n\t\t\t})\n\t\t\tr = r.WithContext(ctx)\n\n\t\t\tlogStartOfRequest(r, options.ExtraHeaders)\n\n\t\t\tthen := time.Now()\n\t\t\tnext.ServeHTTP(mw, r)\n\t\t\tduration := time.Since(then)\n\n\t\t\tlogEndOfRequest(r, duration, mw)\n\t\t})\n\t}\n}", "title": "" }, { "docid": "3f600d8979b4c3ead6ba87f3f26e94df", "score": "0.5495734", "text": "func Logging() Middleware {\n\n // Create the middleware\n return func(f http.HandlerFunc) http.HandlerFunc {\n\n // Define the middleware's behavior\n return func(w http.ResponseWriter, r *http.Request) {\n start := time.Now()\n defer func() { log.Println(r.URL.Path, time.Since(start)) }()\n\n // Call the next middleware in the chain\n f(w, r)\n }\n }\n}", "title": "" }, { "docid": "ae9754e8381cb0be049eee032e105bdd", "score": "0.54865", "text": "func authMiddleware() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\t// Get the client secret key\n\t\terr := jwtMiddleWare.CheckJWT(c.Writer, c.Request)\n\t\tif err != nil {\n\t\t\t// Token not found\n\t\t\tfmt.Println(err)\n\t\t\tc.Abort()\n\t\t\tc.Writer.WriteHeader(http.StatusUnauthorized)\n\t\t\tc.Writer.Write([]byte(\"Unauthorized\"))\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ba10bfae574c6e6d499056a54b60b7f7", "score": "0.5483457", "text": "func NewMiddlewareWrapper(logger Logger, metrics Metrics, corsOptions *CORSOptions, globals ServiceGlobals) MiddlewareWrapper {\n\tm := &middlewareWrapperImpl{\n\t\tlogger: logger,\n\t\tmetrics: metrics,\n\t\tglobals: globals,\n\t}\n\tm.corsOptions = m.mergeCORSOptions(corsOptions)\n\treturn m\n}", "title": "" }, { "docid": "a7ab4259731d735a505fb859083f1779", "score": "0.5466856", "text": "func AuthMiddleware(c *fiber.Ctx) error {\n\ttoken := c.Get(\"Authorization\")\n\tuser, err := auth.GetUserFromToken(token)\n\tif err != nil {\n\t\tc.Status(401)\n\t\tc.JSON(types.Error{\n\t\t\tMessage: \"Not Authorized\",\n\t\t})\n\t\treturn err\n\t}\n\tc.Locals(\"user\", user)\n\treturn c.Next()\n}", "title": "" }, { "docid": "1ad102277f0a1291a49de53a3ff3af33", "score": "0.54618067", "text": "func (routes *Routes) AuthMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tauthHeader := r.Header[\"Authorization\"]\n\t\tif len(authHeader) == 0 {\n\t\t\twriteErrorMessage(w, \"Unauthorized\", http.StatusUnauthorized)\n\t\t\treturn\n\t\t}\n\t\tauthString := authHeader[0]\n\n\t\tauthParts := strings.Split(authString, \" \")\n\t\tif len(authParts) != 2 {\n\t\t\twriteErrorMessage(w, \"Unauthorized\", http.StatusUnauthorized)\n\t\t\tlog.Println(\"Unexpected auth header:\", authString)\n\t\t\treturn\n\t\t}\n\n\t\ttoken := authParts[1]\n\n\t\temail, err := routes.extractEmailFromJWT(token)\n\t\tif err != nil {\n\t\t\twriteErrorMessage(w, \"Unauthorized\", http.StatusUnauthorized)\n\t\t\tlog.Printf(\"Error parsing jwt %s error: %s\\n\", token, err.Error())\n\t\t\treturn\n\t\t}\n\n\t\t// set the email back on the context\n\t\tc := context.WithValue(r.Context(), emailContextKey, email)\n\t\tr = r.WithContext(c)\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "title": "" }, { "docid": "48b8108779035c79b74d3632b2366546", "score": "0.5451141", "text": "func LogMiddleware(logger logrus.FieldLogger) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tlog := &logReq{\n\t\t\tURI: c.Request.URL.Path,\n\t\t\tMethod: c.Request.Method,\n\t\t\tIP: c.ClientIP(),\n\t\t\tContentType: c.ContentType(),\n\t\t\tAgent: c.Request.Header.Get(\"User-Agent\"),\n\t\t}\n\n\t\t// format is string\n\t\toutput := fmt.Sprintf(\"%s %s %s %s %s\",\n\t\t\tlog.Method,\n\t\t\tlog.URI,\n\t\t\tlog.IP,\n\t\t\tlog.ContentType,\n\t\t\tlog.Agent,\n\t\t)\n\n\t\t// TODO: Use logger\n\t\tlogger.Debug(output)\n\n\t\tc.Next()\n\t}\n}", "title": "" }, { "docid": "49683a6d205223241e9a3635eddd858f", "score": "0.5447122", "text": "func (s *Server) AuthMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tcookie, err := r.Cookie(\"password\")\n\t\tif err != nil || cookie.Value != s.Password {\n\t\t\thttp.Redirect(w, r, \"/login?redirect=\"+url.QueryEscape(r.URL.String()), http.StatusFound)\n\t\t\treturn\n\t\t}\n\t\tr = ctxAppendTemplateVars(r, map[string]interface{}{\n\t\t\t\"loggedin\": true,\n\t\t\t\"redirect\": r.URL.String(),\n\t\t})\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "title": "" }, { "docid": "3341eaf53e65a67f7a6e32af3ef6ac18", "score": "0.5434109", "text": "func AuthMiddleware(next http.HandlerFunc) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tbearerToken := r.Header.Get(\"Authorization\")\n\n\t\tif bearerToken == \"\" {\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t\tw.Write([]byte(\"authorization header must be sent\"))\n\t\t\treturn\n\t\t}\n\n\t\tbearerToken = strings.Replace(bearerToken, \"Bearer \", \"\", 1)\n\n\t\ttoken, err := jwt.ParseWithClaims(bearerToken, &models.Claims{}, func(token *jwt.Token) (interface{}, error) {\n\t\t\treturn []byte(os.Getenv(\"JWT_KEY\")), nil\n\t\t})\n\n\t\tif err != nil {\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t\tw.Write([]byte(err.Error()))\n\t\t\treturn\n\t\t}\n\n\t\tclaims, ok := token.Claims.(*models.Claims)\n\n\t\tif !ok || !token.Valid {\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t\tw.Write([]byte(\"invalid token\"))\n\t\t\treturn\n\t\t}\n\n\t\tctx := context.WithValue(r.Context(), \"username\", claims.Username)\n\n\t\tnext.ServeHTTP(w, r.WithContext(ctx))\n\t})\n}", "title": "" }, { "docid": "c9c38bafe52839eace7192485b516d73", "score": "0.5429697", "text": "func TracingHandlerFunc(h http.HandlerFunc) http.HandlerFunc {\n\treturn TracingMiddleware(LoggingMiddleWare(h))\n}", "title": "" }, { "docid": "ad464b31534f03a23c44b5e6ae1be055", "score": "0.54222625", "text": "func NewLogger(l *zap.Logger) middleware.Logger {\n\treturn &adapter{l}\n}", "title": "" } ]
c6c1f6d209134136089cec9e92a9ac6c
GetGroupTag gets the groupTag property value. Group Tag of the Windows autopilot device.
[ { "docid": "24347cf8923f9ee3159f0af22441f784", "score": "0.8526594", "text": "func (m *ImportedWindowsAutopilotDeviceIdentity) GetGroupTag()(*string) {\n return m.groupTag\n}", "title": "" } ]
[ { "docid": "b65a4296370a27f661673349e496f581", "score": "0.783471", "text": "func (m *ImportedWindowsAutopilotDeviceIdentity) SetGroupTag(value *string)() {\n m.groupTag = value\n}", "title": "" }, { "docid": "a4dc8ba72b1ed902677ebc18c9f7568c", "score": "0.66701436", "text": "func (Gadolinium) GetGroup() string {\n\tvar g groupType = b3\n\treturn g.get()\n}", "title": "" }, { "docid": "9660434ed2b1b404298b08e1d0e5ddcb", "score": "0.6669193", "text": "func (Meitnerium) GetGroup() string {\n\tvar g groupType = b8\n\treturn g.get()\n}", "title": "" }, { "docid": "82fb743f6fe07bafbc7d37a9c6eaf620", "score": "0.65882003", "text": "func (Dysprosium) GetGroup() string {\n\tvar g groupType = b3\n\treturn g.get()\n}", "title": "" }, { "docid": "b09fec0a497677b5c0cfe21f6c13a3bc", "score": "0.6567123", "text": "func (Cobalt) GetGroup() string {\n\tvar g groupType = b8\n\treturn g.get()\n}", "title": "" }, { "docid": "46a0d0bf664cb89bba015177867623bb", "score": "0.6525462", "text": "func (Iridium) GetGroup() string {\n\tvar g groupType = b8\n\treturn g.get()\n}", "title": "" }, { "docid": "82b9a476192003f0debad6ed27c097de", "score": "0.6515168", "text": "func (Protactinium) GetGroup() string {\n\tvar g groupType = b3\n\treturn g.get()\n}", "title": "" }, { "docid": "0e7acffd1fca199251379692c80b4296", "score": "0.64786214", "text": "func (c *HiddenCommand) GetGroup() string {\n\treturn c.Wrapped.GetGroup()\n}", "title": "" }, { "docid": "1db5b1536922f3b90a95de9ca9f44bfa", "score": "0.64592904", "text": "func (Strontium) GetGroup() string {\n\tvar g groupType = a2\n\treturn g.get()\n}", "title": "" }, { "docid": "e3f84008e85fffaa948efb0050c0c1d3", "score": "0.6458628", "text": "func (Roentgenium) GetGroup() string {\n\tvar g groupType = b1\n\treturn g.get()\n}", "title": "" }, { "docid": "33accaea371ab5f7df6b7ee6caa978fa", "score": "0.6356788", "text": "func (Zirconium) GetGroup() string {\n\tvar g groupType = b4\n\treturn g.get()\n}", "title": "" }, { "docid": "5fd80647a970b7ddd1e4fe29cedb0f05", "score": "0.63474107", "text": "func (Iron) GetGroup() string {\n\tvar g groupType = b8\n\treturn g.get()\n}", "title": "" }, { "docid": "fdfeb90e7ca9050054490eac991a1cb6", "score": "0.6330198", "text": "func (c *DeprecatedCommand) GetGroup() string {\n\treturn c.Wrapped.GetGroup()\n}", "title": "" }, { "docid": "c1a34bcce593a4da552c41d1e0933122", "score": "0.629855", "text": "func (c *Command) GetGroup() string {\n\treturn c.Group\n}", "title": "" }, { "docid": "616990223edf4e3241da2a7ca61d2cfa", "score": "0.6293138", "text": "func (Einsteinium) GetGroup() string {\n\tvar g groupType = b3\n\treturn g.get()\n}", "title": "" }, { "docid": "186ca19111b2cd8c72b971bd1bd49a6a", "score": "0.626663", "text": "func (c *Descriptor) Group() string {\n\treturn c.group\n}", "title": "" }, { "docid": "a54932e276b0e0560262a61c8919000d", "score": "0.6262067", "text": "func (c *YouTube) GetGroup() string {\n\treturn static.CommandGroupPlayer\n}", "title": "" }, { "docid": "b106b8da9587a8dcdac6813bb50cedea", "score": "0.6248293", "text": "func (Ruthenium) GetGroup() string {\n\tvar g groupType = b8\n\treturn g.get()\n}", "title": "" }, { "docid": "15222a40c7a5330a61032b4b2d524126", "score": "0.61953366", "text": "func (h *AnsibleHost) GetGroup() string {\n\treturn h.group\n}", "title": "" }, { "docid": "6337d868ff7cd966496a6a50f0a2eff9", "score": "0.6150007", "text": "func (o *PolicyIn) GetGroup() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Group\n}", "title": "" }, { "docid": "12815f18ebf00b95d5a68e84c2a18f69", "score": "0.60924566", "text": "func (c *AliasCommand) GetGroup() string {\n\treturn c.Wrapped.GetGroup()\n}", "title": "" }, { "docid": "40598d5783fffc5e0b23bac3e52dabc3", "score": "0.6087978", "text": "func (c *Consumer) GetGroup() string {\n\treturn c.group\n}", "title": "" }, { "docid": "c3a77af8f9bb4dad18b7208a3025f6f9", "score": "0.59568596", "text": "func (dc DevfileCommand) GetGroup() *Group {\n\tif dc.Composite != nil {\n\t\treturn dc.Composite.Group\n\t} else if dc.Exec != nil {\n\t\treturn dc.Exec.Group\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "49baa08c8e083d30165593dc4926091d", "score": "0.5947583", "text": "func (o *IscsiInitiatorListEntryInfoType) TpgroupTag() int {\n\tvar r int\n\tif o.TpgroupTagPtr == nil {\n\t\treturn r\n\t}\n\tr = *o.TpgroupTagPtr\n\treturn r\n}", "title": "" }, { "docid": "208fabe646811ff26ff66fe768f9e4a7", "score": "0.5903011", "text": "func (c *Prefix) GetGroup() string {\n\treturn discordgocmds.GroupAdmin\n}", "title": "" }, { "docid": "77f08decad08070aa4f3336af5245513", "score": "0.5887339", "text": "func (o *IscsiInitiatorListEntryInfoType) SetTpgroupTag(newValue int) *IscsiInitiatorListEntryInfoType {\n\to.TpgroupTagPtr = &newValue\n\treturn o\n}", "title": "" }, { "docid": "ca0dd00c8d60286241da2eb80dfd6187", "score": "0.58708465", "text": "func (o *CreateLaunchpad) GetGroupGuid() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.GroupGuid\n}", "title": "" }, { "docid": "772d083266a40ab300076afe02b54ef5", "score": "0.5843591", "text": "func (o *Component) GetGroup() bool {\n\tif o == nil || o.Group == nil {\n\t\tvar ret bool\n\t\treturn ret\n\t}\n\treturn *o.Group\n}", "title": "" }, { "docid": "cc117df34825f8093926f6dd1167cd58", "score": "0.583102", "text": "func (d *Data) Group() string {\n\treturn d.G\n}", "title": "" }, { "docid": "249bbc9d6ef5b031812a6f02aa0ac4bd", "score": "0.58135337", "text": "func (handler *APIHandler) GetDeviceGroup(ctx context.Context, id *voltha.ID) (*voltha.DeviceGroup, error) {\n\tlogger.Debug(\"GetDeviceGroup\")\n\treturn &voltha.DeviceGroup{}, errors.New(\"UnImplemented\")\n}", "title": "" }, { "docid": "3e835cb585774cd96a0de494fc610429", "score": "0.5800789", "text": "func (_m *MockCloudWatchLogsAPI) TagLogGroup(_param0 *cloudwatchlogs.TagLogGroupInput) (*cloudwatchlogs.TagLogGroupOutput, error) {\n\tret := _m.ctrl.Call(_m, \"TagLogGroup\", _param0)\n\tret0, _ := ret[0].(*cloudwatchlogs.TagLogGroupOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "9504d6addaecd525c4236010ca286b1a", "score": "0.5791292", "text": "func (o *PkgContentSetArch) GetGroup() string {\n\tif o == nil || IsNil(o.Group) {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Group\n}", "title": "" }, { "docid": "0edbe15773a07922ab587ef3090bb5a0", "score": "0.5783147", "text": "func (o ChallengeSpecIssuerRefPtrOutput) Group() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ChallengeSpecIssuerRef) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Group\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "7f5f723ee64e2244b10057db32728dca", "score": "0.5760275", "text": "func (o TargetPortalGroupResponseOutput) Tag() pulumi.IntOutput {\n\treturn o.ApplyT(func(v TargetPortalGroupResponse) int { return v.Tag }).(pulumi.IntOutput)\n}", "title": "" }, { "docid": "139850ac61e901e665afd46fd5d22264", "score": "0.574595", "text": "func (p *Property) Group() string {\n\treturn p.group\n}", "title": "" }, { "docid": "7306c0c0d0d362923287628cb8db1efe", "score": "0.57074296", "text": "func (o GroupPolicyAttachmentOutput) Group() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *GroupPolicyAttachment) pulumi.StringOutput { return v.Group }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "767438ab1548e793b191ce7a27bf6901", "score": "0.57014674", "text": "func (o GroupLabelOutput) Group() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *GroupLabel) pulumi.StringOutput { return v.Group }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "32466f1edb114dfd954c96015e9b6628", "score": "0.56842273", "text": "func (o GroupPolicyOutput) Group() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *GroupPolicy) pulumi.StringOutput { return v.Group }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "62bb9ce3145a609aa92535a195236deb", "score": "0.566601", "text": "func (o *Component) SetGroup(v bool) {\n\to.Group = &v\n}", "title": "" }, { "docid": "b3ba7c65d4138df7b174734172de9c46", "score": "0.56525296", "text": "func (o ManagedPrivateEndpointTypeOutput) GroupId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ManagedPrivateEndpointType) *string { return v.GroupId }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "ad268c6a23791f64d760b5d3f1115bef", "score": "0.5626061", "text": "func GetGroup(ctx *pulumi.Context,\n\tname string, id pulumi.IDInput, state *GroupState, opts ...pulumi.ResourceOption) (*Group, error) {\n\tvar resource Group\n\terr := ctx.ReadResource(\"aws-native:synthetics:Group\", name, id, state, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "4bd026263930cd9f51c24173f978b8b3", "score": "0.5623377", "text": "func (o *PolicyIn) SetGroup(v string) {\n\to.Group = v\n}", "title": "" }, { "docid": "20194fc691fe860ea39a8ab418b641a1", "score": "0.56194293", "text": "func (m *TunnelConfigurationIKEv2Custom) GetDhGroup()(*DhGroup) {\n val, err := m.GetBackingStore().Get(\"dhGroup\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*DhGroup)\n }\n return nil\n}", "title": "" }, { "docid": "8289f177d49c56f0209ca1988f944219", "score": "0.5605405", "text": "func (o *VersionedRemoteGroupPort) GetGroupIdentifier() string {\n\tif o == nil || o.GroupIdentifier == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.GroupIdentifier\n}", "title": "" }, { "docid": "c77e34c568a1504843833cc863843134", "score": "0.55926585", "text": "func (g Graph) GetGroup(w http.ResponseWriter, r *http.Request) {\n\tgroup := r.Context().Value(groupIDKey).(*ldap.Entry)\n\n\trender.Status(r, http.StatusOK)\n\trender.JSON(w, r, createGroupModelFromLDAP(group))\n}", "title": "" }, { "docid": "2b50c696d138b80dfd7b4532d96fcc36", "score": "0.5580566", "text": "func (c *GroupTagClient) Get(ctx context.Context, id int) (*GroupTag, error) {\n\treturn c.Query().Where(grouptag.ID(id)).Only(ctx)\n}", "title": "" }, { "docid": "52f7b0d74287e127de23c7c8ed32bfea", "score": "0.558044", "text": "func GetGroup(dc v1.Command) *v1.CommandGroup {\n\tswitch {\n\tcase dc.Composite != nil:\n\t\treturn dc.Composite.Group\n\tcase dc.Exec != nil:\n\t\treturn dc.Exec.Group\n\tcase dc.Apply != nil:\n\t\treturn dc.Apply.Group\n\tcase dc.Custom != nil:\n\t\treturn dc.Custom.Group\n\n\tdefault:\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "52f7b0d74287e127de23c7c8ed32bfea", "score": "0.558044", "text": "func GetGroup(dc v1.Command) *v1.CommandGroup {\n\tswitch {\n\tcase dc.Composite != nil:\n\t\treturn dc.Composite.Group\n\tcase dc.Exec != nil:\n\t\treturn dc.Exec.Group\n\tcase dc.Apply != nil:\n\t\treturn dc.Apply.Group\n\tcase dc.Custom != nil:\n\t\treturn dc.Custom.Group\n\n\tdefault:\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "9802b9d3a9e8ff8ee15c87b3bcca9a40", "score": "0.55721", "text": "func (o ManagedPrivateEndpointTypePtrOutput) GroupId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ManagedPrivateEndpointType) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.GroupId\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "c4f8b9c4ab24604e4c2baebcc3855a32", "score": "0.5552633", "text": "func (m *PrivilegedAccessGroupAssignmentScheduleRequest) GetGroup()(Groupable) {\n val, err := m.GetBackingStore().Get(\"group\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(Groupable)\n }\n return nil\n}", "title": "" }, { "docid": "a9df749d14659a68c72b0f92eba7dfcf", "score": "0.55489117", "text": "func (t *ThingGroupSvc) GetGroup(ctx context.Context, in *pb.GroupIDRequest) (*pb.ThingGroup, error) {\n\tif in.ID == \"\" {\n\t\treturn nil, grpc.Errorf(codes.InvalidArgument, \"Error: bad ID\")\n\t}\n\tgroup := new(models.ThingGroup)\n\tif err := t.Db.GetGroupByID(in.GetID(), group); err != nil {\n\t\treturn nil, grpc.Errorf(codes.NotFound, err.Error())\n\t}\n\tbytes, err := json.Marshal(group)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pb.ThingGroup{Item: bytes}, nil\n}", "title": "" }, { "docid": "014d0c3ba5029e1f9fc0069468be6996", "score": "0.5540769", "text": "func (lfg *LFGroup) GetGroup(ctx context.Context, groupID string) (*LDAPGroup, error) {\n\tf := logrus.Fields{\n\t\t\"functionName\": \"v1.gerrits.lf_group.GetGroup\",\n\t\tutils.XREQUESTID: ctx.Value(utils.XREQUESTID),\n\t\t\"groupID\": groupID,\n\t}\n\n\taccessToken, err := lfg.getAccessToken(ctx)\n\tif err != nil {\n\t\tlog.WithFields(f).WithError(err).Warn(\"problem loading access token\")\n\t\treturn nil, err\n\t}\n\tgetGroupURL := fmt.Sprintf(\"%s/rest/auth0/og/%s\", lfg.LfBaseURL, groupID)\n\treq, err := http.NewRequest(\"GET\", getGroupURL, nil)\n\tif err != nil {\n\t\tlog.WithFields(f).WithError(err).Warnf(\"problem creating a new request to URL: %s\", getGroupURL)\n\t\treturn nil, err\n\t}\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\treq.Header.Add(\"Authorization\", \"Bearer \"+accessToken)\n\n\tclient := http.Client{\n\t\tTimeout: DefaultHTTPTimeout,\n\t}\n\tres, err := client.Do(req)\n\tif err != nil {\n\t\tlog.WithFields(f).WithError(err).Warnf(\"problem invoking request to URL: %s\", getGroupURL)\n\t\treturn nil, err\n\t}\n\n\tdefer func() {\n\t\tcloseErr := res.Body.Close()\n\t\tif closeErr != nil {\n\t\t\tlog.WithFields(f).WithError(closeErr).Warn(\"error closing response body\")\n\t\t}\n\t}()\n\n\tbody, err := io.ReadAll(res.Body)\n\tif err != nil {\n\t\tlog.WithFields(f).WithError(err).Warnf(\"problem reading the response from URL: %s\", getGroupURL)\n\t\treturn nil, err\n\t}\n\n\tvar out LDAPGroup\n\terr = json.Unmarshal(body, &out)\n\tif err != nil {\n\t\tlog.WithFields(f).WithError(err).Warnf(\"problem unmarshalling the response from URL: %s\", getGroupURL)\n\t\treturn nil, err\n\t}\n\n\treturn &out, nil\n}", "title": "" }, { "docid": "39d3a6c47160858279e4f2620ac7dad0", "score": "0.548937", "text": "func (o OrderSpecIssuerRefPtrOutput) Group() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *OrderSpecIssuerRef) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Group\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "3ed88ddbc1a95ca3ba5b780b713fe37c", "score": "0.54860634", "text": "func (o *ExportRuleGetIterRequest) Tag() string {\n\tvar r string\n\tif o.TagPtr == nil {\n\t\treturn r\n\t}\n\tr = *o.TagPtr\n\treturn r\n}", "title": "" }, { "docid": "b6e7c00d8cb2ff352954f8c968b864a2", "score": "0.5480095", "text": "func (o ManagedPrivateEndpointResponseOutput) GroupId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ManagedPrivateEndpointResponse) *string { return v.GroupId }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "f93fbcb5ccdba9e02dd405d6c32beabc", "score": "0.54708225", "text": "func NewGroupTagClient(c config) *GroupTagClient {\n\treturn &GroupTagClient{config: c}\n}", "title": "" }, { "docid": "9bcc07a0ee0b5210045572523c31a8da", "score": "0.5462137", "text": "func (eh *EdgeHub) Group() string {\n\treturn modules.HubGroup\n}", "title": "" }, { "docid": "9bcc07a0ee0b5210045572523c31a8da", "score": "0.5462137", "text": "func (eh *EdgeHub) Group() string {\n\treturn modules.HubGroup\n}", "title": "" }, { "docid": "c9c5f5b3533804f593bfc50a37480d5d", "score": "0.5454509", "text": "func (o *CreateLaunchpad) GetGroupGuidOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.GroupGuid, true\n}", "title": "" }, { "docid": "88b5f3f65ed951158f2ea73995f1b95c", "score": "0.545328", "text": "func (o *NvmeSubsystemControllerGetIterRequest) Tag() string {\n\tvar r string\n\tif o.TagPtr == nil {\n\t\treturn r\n\t}\n\tr = *o.TagPtr\n\treturn r\n}", "title": "" }, { "docid": "6875f80710c984b0a69c1c0fb21620f6", "score": "0.5450656", "text": "func (o ManagedPrivateEndpointResponsePtrOutput) GroupId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ManagedPrivateEndpointResponse) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.GroupId\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "c73d2e5e9496a6dfab3aea0c6b1c729c", "score": "0.5444607", "text": "func (o ScaledObjectSpecJobTargetRefTemplateSpecVolumesQuobytePtrOutput) Group() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ScaledObjectSpecJobTargetRefTemplateSpecVolumesQuobyte) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Group\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "a12a132010444662409bda3f0f9863f0", "score": "0.5443699", "text": "func (o *PolicyExtendedAllOf) GetGroup() GroupOut {\n\tif o == nil {\n\t\tvar ret GroupOut\n\t\treturn ret\n\t}\n\n\treturn o.Group\n}", "title": "" }, { "docid": "87a4946459e8e8cce96ecdcba6619ca8", "score": "0.5442647", "text": "func (o *TeamResponse) GetGroup() int32 {\n\tif o == nil {\n\t\tvar ret int32\n\t\treturn ret\n\t}\n\n\treturn o.Group\n}", "title": "" }, { "docid": "ec19ce159a6425bef36bc67a8efbb9a0", "score": "0.5439278", "text": "func (o *Term) GetTermGroup() int64 {\n\treturn o.TermGroup\n}", "title": "" }, { "docid": "daed1f66b7c93a67d4f3e0fa12ff1615", "score": "0.5435942", "text": "func JSContextGetGroup(ctx JSContextRef) JSContextGroupRef {\n\tcctx, _ := *(*C.JSContextRef)(unsafe.Pointer(&ctx)), cgoAllocsUnknown\n\t__ret := C.JSContextGetGroup(cctx)\n\t__v := *(*JSContextGroupRef)(unsafe.Pointer(&__ret))\n\treturn __v\n}", "title": "" }, { "docid": "baa1b3dd7df04a05bdde1e29e47acc27", "score": "0.54303086", "text": "func (s *KVSnapshot) SetResourceGroupTag(tag []byte) {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\ts.mu.resourceGroupTag = tag\n}", "title": "" }, { "docid": "1f3e08ad49ed70a611dc178d541544d5", "score": "0.542933", "text": "func group(tag string, value string) string { return fmt.Sprintf(`<g %s=\"%s\">`, tag, value) }", "title": "" }, { "docid": "15bbb3e1c4bed3d24bd6fe5a4c3fb048", "score": "0.5428103", "text": "func GetGroup(name string)*Group {//获取特定名称的group\n\tmu.RLock() //不涉及任何冲突变量的写操作\n\tg:=groups[name]\n\tmu.RUnlock()\n\treturn g\n}", "title": "" }, { "docid": "4048d11f3bc9c12c72c35ee2e1df8e2e", "score": "0.54193354", "text": "func (o *VersionedProcessor) GetGroupIdentifier() string {\n\tif o == nil || o.GroupIdentifier == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.GroupIdentifier\n}", "title": "" }, { "docid": "9034c983d28782e3d04f808bac6d4da2", "score": "0.5412225", "text": "func (o *Component) GetGroupId() string {\n\tif o == nil || o.GroupId == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.GroupId\n}", "title": "" }, { "docid": "98bcbb7ed00bc930e8947180fa23c720", "score": "0.5407616", "text": "func (e *DicomElement) getTag() string {\n\treturn fmt.Sprintf(\"(%04X,%04X)\", e.Group, e.Element)\n}", "title": "" }, { "docid": "76938b007a3442a64ed4cf9dfbaf796e", "score": "0.5392306", "text": "func (s *API) GetGroup(req *GetGroupRequest, opts ...scw.RequestOption) (*Group, error) {\n\tvar err error\n\n\tif fmt.Sprint(req.GroupID) == \"\" {\n\t\treturn nil, errors.New(\"field GroupID cannot be empty in request\")\n\t}\n\n\tscwReq := &scw.ScalewayRequest{\n\t\tMethod: \"GET\",\n\t\tPath: \"/iam/v1alpha1/groups/\" + fmt.Sprint(req.GroupID) + \"\",\n\t\tHeaders: http.Header{},\n\t}\n\n\tvar resp Group\n\n\terr = s.client.Do(scwReq, &resp, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}", "title": "" }, { "docid": "bd61d595f7bf8883d66a275c9d7df09f", "score": "0.53868943", "text": "func (o *PolicyExtendedAllOf) SetGroup(v GroupOut) {\n\to.Group = v\n}", "title": "" }, { "docid": "c0c4caa167aec494cece88db88d13491", "score": "0.53849965", "text": "func (m *Subject) GetGroup() string {\n\tif m != nil {\n\t\treturn m.Group\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "f6304da533f3070fa36f58307bad2895", "score": "0.53783816", "text": "func (k Keeper) GetGroup(ctx sdk.Context, id types.GroupID) (types.Group, bool) {\n\tstore := ctx.KVStore(k.skey)\n\n\tkey := groupKey(id)\n\n\tif !store.Has(key) {\n\t\treturn types.Group{}, false\n\t}\n\n\tbuf := store.Get(key)\n\n\tvar val types.Group\n\n\tk.cdc.MustUnmarshal(buf, &val)\n\n\treturn val, true\n}", "title": "" }, { "docid": "accdc7cb6288ebf276329f3f71f44d77", "score": "0.53769803", "text": "func (o ScaledObjectSpecJobTargetRefTemplateSpecVolumesQuobyteOutput) Group() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ScaledObjectSpecJobTargetRefTemplateSpecVolumesQuobyte) *string { return v.Group }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "af8f801dfdf7fbc40450a07ce54f35b7", "score": "0.53670734", "text": "func GetGroupSep() (sep string) { return Conf.GetGroupSep() }", "title": "" }, { "docid": "069cd531f5512218c1b18ea60802afd7", "score": "0.5364109", "text": "func (o ChallengeSpecIssuerRefOutput) Group() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ChallengeSpecIssuerRef) *string { return v.Group }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "cb2159d1e789b3860b2fc6564ce22259", "score": "0.53557473", "text": "func (o *PkgContentSetArch) SetGroup(v string) {\n\to.Group = &v\n}", "title": "" }, { "docid": "4d878f8db5ac79fb29d1ffee726392c4", "score": "0.5354995", "text": "func (c *TagClient) QueryGroupTags(t *Tag) *GroupTagQuery {\n\tquery := (&GroupTagClient{config: c.config}).Query()\n\tquery.path = func(context.Context) (fromV *sql.Selector, _ error) {\n\t\tid := t.ID\n\t\tstep := sqlgraph.NewStep(\n\t\t\tsqlgraph.From(tag.Table, tag.FieldID, id),\n\t\t\tsqlgraph.To(grouptag.Table, grouptag.FieldID),\n\t\t\tsqlgraph.Edge(sqlgraph.O2M, true, tag.GroupTagsTable, tag.GroupTagsColumn),\n\t\t)\n\t\tfromV = sqlgraph.Neighbors(t.driver.Dialect(), step)\n\t\treturn fromV, nil\n\t}\n\treturn query\n}", "title": "" }, { "docid": "bd77ab53196250a1baf69db469148924", "score": "0.5351128", "text": "func (o *PolicyIn) GetGroupOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Group, true\n}", "title": "" }, { "docid": "21a07125ac762272c01401bd75f871e9", "score": "0.53509015", "text": "func (m *edgeLBManager) PoolGroup() string {\n\treturn m.poolGroup\n}", "title": "" }, { "docid": "4521edbf35466b3f7b5910ec93c8d39d", "score": "0.5350045", "text": "func getGroup() (g string) {\n\tg = os.Getenv(\"group\")\n\tif g == \"\" {\n\t\tg = _defaultGroup\n\t}\n\treturn\n}", "title": "" }, { "docid": "e2dd7165eb68ee47750f20f4650b48e1", "score": "0.5348711", "text": "func GetGroup(ctx *pulumi.Context,\n\tname string, id pulumi.IDInput, state *GroupState, opts ...pulumi.ResourceOption) (*Group, error) {\n\tvar resource Group\n\terr := ctx.ReadResource(\"gcp:cloudidentity/group:Group\", name, id, state, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "b373f52c8d19f038a2332919c24733e7", "score": "0.5348605", "text": "func (o GroupSamlLinkOutput) Group() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *GroupSamlLink) pulumi.StringOutput { return v.Group }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "33bb8ae539ef9da098b5cef6d132ce62", "score": "0.534768", "text": "func GetGroup(cli bce.Client, groupUuid string) (*Group, error) {\n\turl := PREFIX + \"/\" + groupUuid\n\n\tresult := &Group{}\n\treq := &GetHttpReq{Url: url, Result: result}\n\terr := Get(cli, req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn result, nil\n}", "title": "" }, { "docid": "f68d16a7ded17a90b8d963d7862984c2", "score": "0.5329152", "text": "func GetGroup(gID int) (group *Group, err error) {\n\tif gID <= 0 {\n\t\tgID = 14\n\t}\n\tswitch gID {\n\tcase 1:\n\t\tgroup = &Group{\n\t\t\tg: new(big.Int).SetInt64(2),\n\t\t\tp: GetGroup1(),\n\t\t}\n\tcase 2:\n\t\tgroup = &Group{\n\t\t\tg: new(big.Int).SetInt64(2),\n\t\t\tp: GetGroup2(),\n\t\t}\n\tcase 14:\n\t\tgroup = &Group{\n\t\t\tg: new(big.Int).SetInt64(2),\n\t\t\tp: GetGroup14(),\n\t\t}\n\tdefault:\n\t\tgroup = nil\n\t\terr = errors.New(\"Unknown group\")\n\t}\n\treturn\n}", "title": "" }, { "docid": "e8263b7ea28917914871aeab0b5a8b79", "score": "0.5315013", "text": "func GroupName(sharedUIDLabel string) string {\n\treturn sharedUIDLabelMap[sharedUIDLabel]\n}", "title": "" }, { "docid": "e4b1ee7e74c0345f14cb976d35b05eb0", "score": "0.53106225", "text": "func (o OrderSpecIssuerRefOutput) Group() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v OrderSpecIssuerRef) *string { return v.Group }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "53752b27a35dbcc8a6f072460a48f0a2", "score": "0.5306255", "text": "func (m *AndroidFotaDeploymentAssignmentTarget) GetGroupId()(*string) {\n val, err := m.GetBackingStore().Get(\"groupId\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "title": "" }, { "docid": "2500c154c891f326bca8dce8687adb7f", "score": "0.53017026", "text": "func FindGroupFromTag(epgTag string) (*mastercfg.EndpointGroupState, error) {\n\tstateDriver, err := utils.GetStateDriver()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tepgCfg := &mastercfg.EndpointGroupState{}\n\tepgCfg.StateDriver = stateDriver\n\n\tepgList, err := epgCfg.ReadAll()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar epg *mastercfg.EndpointGroupState\n\tfound := false\n\tfor _, epgP := range epgList {\n\t\tepg = epgP.(*mastercfg.EndpointGroupState)\n\t\tif epg.GroupTag == epgTag {\n\t\t\tfound = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif !found {\n\t\treturn nil, fmt.Errorf(\"Couldn't find group matching the tag %s\", epgTag)\n\t}\n\treturn epg, nil\n}", "title": "" }, { "docid": "af657e63a4b4dcc99deac8a4185ca65d", "score": "0.5295367", "text": "func (o GroupCustomAttributeOutput) Group() pulumi.IntOutput {\n\treturn o.ApplyT(func(v *GroupCustomAttribute) pulumi.IntOutput { return v.Group }).(pulumi.IntOutput)\n}", "title": "" }, { "docid": "6ddd8cf204ba6ce9648d764aa3f3fb9f", "score": "0.52921164", "text": "func (o LaunchTemplatePlacementPtrOutput) GroupName() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *LaunchTemplatePlacement) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.GroupName\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "ac24ae385aa41d9385e4cbe500e388fb", "score": "0.52884245", "text": "func (o *NvmeSubsystemControllerGetIterRequest) SetTag(newValue string) *NvmeSubsystemControllerGetIterRequest {\n\to.TagPtr = &newValue\n\treturn o\n}", "title": "" }, { "docid": "ce2f44033b0fd405372c5991cb5a91f7", "score": "0.5288026", "text": "func (s *BrokenStore) GroupGet(id string) (*storagepb.Group, error) {\n\treturn nil, errIntentional\n}", "title": "" }, { "docid": "cb191aae99756c8bdf5a7f1e7b7176b2", "score": "0.5285976", "text": "func (r Ticket) GetGroup() (resp datatypes.Ticket_Group, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Ticket\", \"getGroup\", nil, &r.Options, &resp)\n\treturn\n}", "title": "" }, { "docid": "0ac17cd1f05151d1f1e40458e4e8a136", "score": "0.52836204", "text": "func (s *Service) GetGroup(group string, resp ...*http.Response) (*Group, error) {\n\tpp := struct {\n\t\tGroup string\n\t}{\n\t\tGroup: group,\n\t}\n\tu, err := s.Client.BuildURLFromPathParams(nil, serviceCluster, `/identity/v3/groups/{{.Group}}`, pp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresponse, err := s.Client.Get(services.RequestParams{URL: u})\n\tif response != nil {\n\t\tdefer response.Body.Close()\n\n\t\t// populate input *http.Response if provided\n\t\tif len(resp) > 0 && resp[0] != nil {\n\t\t\t*resp[0] = *response\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar rb Group\n\terr = util.ParseResponse(&rb, response)\n\treturn &rb, err\n}", "title": "" }, { "docid": "7d1e2889477ec37f5fa128d39ee66a8f", "score": "0.5275676", "text": "func (p *Property) SetGroup(group string) {\n\tp.group = strings.ToUpper(group)\n}", "title": "" }, { "docid": "3aaeca9a8b05685bac6b68e884ac5a59", "score": "0.52755195", "text": "func (o *TeamResponse) SetGroup(v int32) {\n\to.Group = v\n}", "title": "" } ]
08a237b5f116219e56a27110ec6987b7
The &39;fabric.UplinkRole&39; resource to update.
[ { "docid": "90c6aae96e70baaf1e77b78f0d811230", "score": "0.65120137", "text": "func (r ApiUpdateFabricUplinkRoleRequest) FabricUplinkRole(fabricUplinkRole FabricUplinkRole) ApiUpdateFabricUplinkRoleRequest {\n\tr.fabricUplinkRole = &fabricUplinkRole\n\treturn r\n}", "title": "" } ]
[ { "docid": "61fc076ca1bc4e0024a880c77d619af8", "score": "0.74004185", "text": "func (a *FabricApiService) UpdateFabricUplinkRole(ctx context.Context, moid string) ApiUpdateFabricUplinkRoleRequest {\n\treturn ApiUpdateFabricUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "702b0bd5df7ea471e297c7a0eef67f9d", "score": "0.6803201", "text": "func (a *FabricApiService) PatchFabricUplinkRole(ctx context.Context, moid string) ApiPatchFabricUplinkRoleRequest {\n\treturn ApiPatchFabricUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "219cf794b6369e22bc1ffc9e3629e310", "score": "0.67557466", "text": "func (a *FabricApiService) UpdateFabricFcUplinkRole(ctx context.Context, moid string) ApiUpdateFabricFcUplinkRoleRequest {\n\treturn ApiUpdateFabricFcUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "5405e1dd721f2f72d32d52faf1ac40dd", "score": "0.6475647", "text": "func (a *FabricApiService) CreateFabricUplinkRole(ctx context.Context) ApiCreateFabricUplinkRoleRequest {\n\treturn ApiCreateFabricUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "ed1ab01a6665c0031eb6bca5a86661c1", "score": "0.64607674", "text": "func (r ApiPatchFabricUplinkRoleRequest) FabricUplinkRole(fabricUplinkRole FabricUplinkRole) ApiPatchFabricUplinkRoleRequest {\n\tr.fabricUplinkRole = &fabricUplinkRole\n\treturn r\n}", "title": "" }, { "docid": "09855e1a653b04df4488607090c7d49e", "score": "0.64167714", "text": "func (a *FabricApiService) UpdateFabricFcoeUplinkRole(ctx context.Context, moid string) ApiUpdateFabricFcoeUplinkRoleRequest {\n\treturn ApiUpdateFabricFcoeUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "c07f904e9a4270a87dbbe22524899306", "score": "0.6359223", "text": "func (r ApiCreateFabricUplinkRoleRequest) FabricUplinkRole(fabricUplinkRole FabricUplinkRole) ApiCreateFabricUplinkRoleRequest {\n\tr.fabricUplinkRole = &fabricUplinkRole\n\treturn r\n}", "title": "" }, { "docid": "62881bbfb7989e7eb8299e4b2875e563", "score": "0.62276566", "text": "func (r ApiUpdateFabricFcUplinkRoleRequest) FabricFcUplinkRole(fabricFcUplinkRole FabricFcUplinkRole) ApiUpdateFabricFcUplinkRoleRequest {\n\tr.fabricFcUplinkRole = &fabricFcUplinkRole\n\treturn r\n}", "title": "" }, { "docid": "b7a60e3442d46624319d475d8dc014dd", "score": "0.61933154", "text": "func (a *FabricApiService) GetFabricUplinkRoleList(ctx context.Context) ApiGetFabricUplinkRoleListRequest {\n\treturn ApiGetFabricUplinkRoleListRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "4f42a895a85e2ea38354587d3b5d23da", "score": "0.61205333", "text": "func (a *FabricApiService) UpdateFabricUplinkPcRole(ctx context.Context, moid string) ApiUpdateFabricUplinkPcRoleRequest {\n\treturn ApiUpdateFabricUplinkPcRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "bb209267786440eb23037393e4928fde", "score": "0.61093414", "text": "func (r ApiPatchFabricFcUplinkRoleRequest) FabricFcUplinkRole(fabricFcUplinkRole FabricFcUplinkRole) ApiPatchFabricFcUplinkRoleRequest {\n\tr.fabricFcUplinkRole = &fabricFcUplinkRole\n\treturn r\n}", "title": "" }, { "docid": "b37739c3e61638e454b171975b74cd8b", "score": "0.6082573", "text": "func (a *FabricApiService) PatchFabricFcUplinkRole(ctx context.Context, moid string) ApiPatchFabricFcUplinkRoleRequest {\n\treturn ApiPatchFabricFcUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "4b3aaa041f5d00d556b724a66277f3d7", "score": "0.60748196", "text": "func (r ApiUpdateFabricFcoeUplinkRoleRequest) FabricFcoeUplinkRole(fabricFcoeUplinkRole FabricFcoeUplinkRole) ApiUpdateFabricFcoeUplinkRoleRequest {\n\tr.fabricFcoeUplinkRole = &fabricFcoeUplinkRole\n\treturn r\n}", "title": "" }, { "docid": "724103bdee411111ac5090c653fb9e95", "score": "0.6032653", "text": "func (r ApiCreateFabricFcUplinkRoleRequest) FabricFcUplinkRole(fabricFcUplinkRole FabricFcUplinkRole) ApiCreateFabricFcUplinkRoleRequest {\n\tr.fabricFcUplinkRole = &fabricFcUplinkRole\n\treturn r\n}", "title": "" }, { "docid": "ee841a0701991857da5c6955d1677c02", "score": "0.60097295", "text": "func (a *FabricApiService) UpdateFabricUplinkRoleExecute(r ApiUpdateFabricUplinkRoleRequest) (*FabricUplinkRole, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *FabricUplinkRole\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"FabricApiService.UpdateFabricUplinkRole\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/fabric/UplinkRoles/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.fabricUplinkRole == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"fabricUplinkRole is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.fabricUplinkRole\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "4365de087eadbca121563a3db01e59f1", "score": "0.59794855", "text": "func (a *FabricApiService) CreateFabricFcUplinkRole(ctx context.Context) ApiCreateFabricFcUplinkRoleRequest {\n\treturn ApiCreateFabricFcUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "440ca5fda1061a8319e6a3191c421c0c", "score": "0.5932297", "text": "func (r ApiPatchFabricFcoeUplinkRoleRequest) FabricFcoeUplinkRole(fabricFcoeUplinkRole FabricFcoeUplinkRole) ApiPatchFabricFcoeUplinkRoleRequest {\n\tr.fabricFcoeUplinkRole = &fabricFcoeUplinkRole\n\treturn r\n}", "title": "" }, { "docid": "1ce228cd5ee48e9213ea7b019f62c794", "score": "0.5901182", "text": "func resourceKibanaRoleUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {\n\tid := d.Id()\n\n\terr := createRole(d, meta)\n\tif err != nil {\n\t\treturn diag.FromErr(err)\n\t}\n\n\tlog.Infof(\"Updated role %s successfully\", id)\n\tfmt.Printf(\"[INFO] Updated role %s successfully\", id)\n\n\treturn resourceKibanaRoleRead(ctx, d, meta)\n}", "title": "" }, { "docid": "ca772d1c0d0b3db30ffcb1194197f4fa", "score": "0.5896799", "text": "func (a *FabricApiService) CreateFabricUplinkRoleExecute(r ApiCreateFabricUplinkRoleRequest) (*FabricUplinkRole, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *FabricUplinkRole\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"FabricApiService.CreateFabricUplinkRole\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/fabric/UplinkRoles\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.fabricUplinkRole == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"fabricUplinkRole is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\tif r.ifNoneMatch != nil {\n\t\tlocalVarHeaderParams[\"If-None-Match\"] = parameterToString(*r.ifNoneMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.fabricUplinkRole\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "fddc704a71c68f56d7fa029998b96f9c", "score": "0.588848", "text": "func (c *Client) RoleUpdate(r string,payload *RoleStruct) ( error) {\nvar err error\nreturn err\n}", "title": "" }, { "docid": "2b4d6c4046ad9203cfcf7c7beb1a2013", "score": "0.576712", "text": "func (c *Client) RoleUpdate(r string, payload *RoleStruct) error {\n\tvar err error\n\treturn err\n}", "title": "" }, { "docid": "3996b7776d423947421820378bdc4b94", "score": "0.5762955", "text": "func (a *FabricApiService) PatchFabricUplinkRoleExecute(r ApiPatchFabricUplinkRoleRequest) (*FabricUplinkRole, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *FabricUplinkRole\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"FabricApiService.PatchFabricUplinkRole\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/fabric/UplinkRoles/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.fabricUplinkRole == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"fabricUplinkRole is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.fabricUplinkRole\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "e7fb6f1a78eb95c7b71c7f40e89440a6", "score": "0.574494", "text": "func (r ApiCreateFabricFcoeUplinkRoleRequest) FabricFcoeUplinkRole(fabricFcoeUplinkRole FabricFcoeUplinkRole) ApiCreateFabricFcoeUplinkRoleRequest {\n\tr.fabricFcoeUplinkRole = &fabricFcoeUplinkRole\n\treturn r\n}", "title": "" }, { "docid": "ea6569efd4a10ddf3d867fd327ca4361", "score": "0.57268125", "text": "func (a *FabricApiService) GetFabricFcUplinkRoleList(ctx context.Context) ApiGetFabricFcUplinkRoleListRequest {\n\treturn ApiGetFabricFcUplinkRoleListRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "01d1a9108a30c01c40ce2f143de54a97", "score": "0.57175684", "text": "func (a *FabricApiService) GetFabricUplinkRoleByMoid(ctx context.Context, moid string) ApiGetFabricUplinkRoleByMoidRequest {\n\treturn ApiGetFabricUplinkRoleByMoidRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "8ace604a16295e96198f9b06f1728bde", "score": "0.56967014", "text": "func (a *FabricApiService) UpdateFabricFcUplinkPcRole(ctx context.Context, moid string) ApiUpdateFabricFcUplinkPcRoleRequest {\n\treturn ApiUpdateFabricFcUplinkPcRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "015539b2805958f16123b02c796944b2", "score": "0.5672118", "text": "func (a *FabricApiService) DeleteFabricUplinkRole(ctx context.Context, moid string) ApiDeleteFabricUplinkRoleRequest {\n\treturn ApiDeleteFabricUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "6e423e0c7e026cf2978033daf3c4fd03", "score": "0.55983365", "text": "func (r ApiUpdateFabricUplinkPcRoleRequest) FabricUplinkPcRole(fabricUplinkPcRole FabricUplinkPcRole) ApiUpdateFabricUplinkPcRoleRequest {\n\tr.fabricUplinkPcRole = &fabricUplinkPcRole\n\treturn r\n}", "title": "" }, { "docid": "4646cbc4cd741f53c9f99e0339cf0804", "score": "0.552382", "text": "func (r *Role) GetRole() *Role { return r }", "title": "" }, { "docid": "95de2bd2e50d882a7852e4842326b698", "score": "0.54933673", "text": "func (r ApiGetFabricUplinkRoleListRequest) Apply(apply string) ApiGetFabricUplinkRoleListRequest {\n\tr.apply = &apply\n\treturn r\n}", "title": "" }, { "docid": "2606c4b17b7d489e6fec4c90384b25c8", "score": "0.54914635", "text": "func (a *FabricApiService) CreateFabricUplinkPcRole(ctx context.Context) ApiCreateFabricUplinkPcRoleRequest {\n\treturn ApiCreateFabricUplinkPcRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "a3b1e2d9c9ff85ddaba61b741298e471", "score": "0.54903364", "text": "func (a *FabricApiService) PatchFabricFcoeUplinkRole(ctx context.Context, moid string) ApiPatchFabricFcoeUplinkRoleRequest {\n\treturn ApiPatchFabricFcoeUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "a5833b44eb7b18912dd9c16322563342", "score": "0.54655075", "text": "func (b *backend) pathRoleCreateUpdate(\n\treq *logical.Request, data *framework.FieldData) (*logical.Response, error) {\n\n\troleName := strings.ToLower(data.Get(\"role\").(string))\n\tif roleName == \"\" {\n\t\treturn logical.ErrorResponse(\"missing role\"), nil\n\t}\n\n\tb.roleMutex.Lock()\n\tdefer b.roleMutex.Unlock()\n\n\troleEntry, err := b.nonLockedAWSRole(req.Storage, roleName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif roleEntry == nil {\n\t\troleEntry = &awsRoleEntry{}\n\t} else {\n\t\tneedUpdate, err := upgradeRoleEntry(roleEntry)\n\t\tif err != nil {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"failed to update roleEntry: %v\", err)), nil\n\t\t}\n\t\tif needUpdate {\n\t\t\terr = b.nonLockedSetAWSRole(req.Storage, roleName, roleEntry)\n\t\t\tif err != nil {\n\t\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"failed to save upgraded roleEntry: %v\", err)), nil\n\t\t\t}\n\t\t}\n\t}\n\n\t// Fetch and set the bound parameters. There can't be default values\n\t// for these.\n\tif boundAmiIDRaw, ok := data.GetOk(\"bound_ami_id\"); ok {\n\t\troleEntry.BoundAmiID = boundAmiIDRaw.(string)\n\t}\n\n\tif boundAccountIDRaw, ok := data.GetOk(\"bound_account_id\"); ok {\n\t\troleEntry.BoundAccountID = boundAccountIDRaw.(string)\n\t}\n\n\tif boundRegionRaw, ok := data.GetOk(\"bound_region\"); ok {\n\t\troleEntry.BoundRegion = boundRegionRaw.(string)\n\t}\n\n\tif boundVpcIDRaw, ok := data.GetOk(\"bound_vpc_id\"); ok {\n\t\troleEntry.BoundVpcID = boundVpcIDRaw.(string)\n\t}\n\n\tif boundSubnetIDRaw, ok := data.GetOk(\"bound_subnet_id\"); ok {\n\t\troleEntry.BoundSubnetID = boundSubnetIDRaw.(string)\n\t}\n\n\tif boundIamRoleARNRaw, ok := data.GetOk(\"bound_iam_role_arn\"); ok {\n\t\troleEntry.BoundIamRoleARN = boundIamRoleARNRaw.(string)\n\t}\n\n\tif boundIamInstanceProfileARNRaw, ok := data.GetOk(\"bound_iam_instance_profile_arn\"); ok {\n\t\troleEntry.BoundIamInstanceProfileARN = boundIamInstanceProfileARNRaw.(string)\n\t}\n\n\tif boundIamPrincipalARNRaw, ok := data.GetOk(\"bound_iam_principal_arn\"); ok {\n\t\troleEntry.BoundIamPrincipalARN = boundIamPrincipalARNRaw.(string)\n\t}\n\n\tif inferRoleTypeRaw, ok := data.GetOk(\"inferred_entity_type\"); ok {\n\t\troleEntry.InferredEntityType = inferRoleTypeRaw.(string)\n\t}\n\n\tif inferredAWSRegionRaw, ok := data.GetOk(\"inferred_aws_region\"); ok {\n\t\troleEntry.InferredAWSRegion = inferredAWSRegionRaw.(string)\n\t}\n\n\t// auth_type is a special case as it's immutable and can't be changed once a role is created\n\tif authTypeRaw, ok := data.GetOk(\"auth_type\"); ok {\n\t\t// roleEntry.AuthType should only be \"\" when it's a new role; existing roles without an\n\t\t// auth_type should have already been upgraded to have one before we get here\n\t\tif roleEntry.AuthType == \"\" {\n\t\t\tswitch authTypeRaw.(string) {\n\t\t\tcase ec2AuthType, iamAuthType:\n\t\t\t\troleEntry.AuthType = authTypeRaw.(string)\n\t\t\tdefault:\n\t\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"unrecognized auth_type: %v\", authTypeRaw.(string))), nil\n\t\t\t}\n\t\t} else if authTypeRaw.(string) != roleEntry.AuthType {\n\t\t\treturn logical.ErrorResponse(\"changing auth_type on a role is not allowed\"), nil\n\t\t}\n\t} else if req.Operation == logical.CreateOperation {\n\t\tswitch req.MountType {\n\t\t// maintain backwards compatibility for old aws-ec2 auth types\n\t\tcase \"aws-ec2\":\n\t\t\troleEntry.AuthType = ec2AuthType\n\t\t// but default to iamAuth for new mounts going forward\n\t\tcase \"aws\":\n\t\t\troleEntry.AuthType = iamAuthType\n\t\tdefault:\n\t\t\troleEntry.AuthType = iamAuthType\n\t\t}\n\t}\n\n\tallowEc2Binds := roleEntry.AuthType == ec2AuthType\n\n\tif roleEntry.InferredEntityType != \"\" {\n\t\tswitch {\n\t\tcase roleEntry.AuthType != iamAuthType:\n\t\t\treturn logical.ErrorResponse(\"specified inferred_entity_type but didn't allow iam auth_type\"), nil\n\t\tcase roleEntry.InferredEntityType != ec2EntityType:\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified invalid inferred_entity_type: %s\", roleEntry.InferredEntityType)), nil\n\t\tcase roleEntry.InferredAWSRegion == \"\":\n\t\t\treturn logical.ErrorResponse(\"specified inferred_entity_type but not inferred_aws_region\"), nil\n\t\t}\n\t\tallowEc2Binds = true\n\t} else if roleEntry.InferredAWSRegion != \"\" {\n\t\treturn logical.ErrorResponse(\"specified inferred_aws_region but not inferred_entity_type\"), nil\n\t}\n\n\tnumBinds := 0\n\n\tif roleEntry.BoundAccountID != \"\" {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_account_id but not allowing ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif roleEntry.BoundRegion != \"\" {\n\t\tif roleEntry.AuthType != ec2AuthType {\n\t\t\treturn logical.ErrorResponse(\"specified bound_region but not allowing ec2 auth_type\"), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif roleEntry.BoundAmiID != \"\" {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_ami_id but not allowing ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif roleEntry.BoundIamInstanceProfileARN != \"\" {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_iam_instance_profile_arn but not allowing ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif roleEntry.BoundIamRoleARN != \"\" {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_iam_role_arn but not allowing ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif roleEntry.BoundIamPrincipalARN != \"\" {\n\t\tif roleEntry.AuthType != iamAuthType {\n\t\t\treturn logical.ErrorResponse(\"specified bound_iam_principal_arn but not allowing iam auth_type\"), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif roleEntry.BoundVpcID != \"\" {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_vpc_id but not allowing ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif roleEntry.BoundSubnetID != \"\" {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_subnet_id but not allowing ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif numBinds == 0 {\n\t\treturn logical.ErrorResponse(\"at least be one bound parameter should be specified on the role\"), nil\n\t}\n\n\tpoliciesStr, ok := data.GetOk(\"policies\")\n\tif ok {\n\t\troleEntry.Policies = policyutil.ParsePolicies(policiesStr.(string))\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.Policies = []string{\"default\"}\n\t}\n\n\tdisallowReauthenticationBool, ok := data.GetOk(\"disallow_reauthentication\")\n\tif ok {\n\t\tif roleEntry.AuthType != ec2AuthType {\n\t\t\treturn logical.ErrorResponse(\"specified disallow_reauthentication when not using ec2 auth type\"), nil\n\t\t}\n\t\troleEntry.DisallowReauthentication = disallowReauthenticationBool.(bool)\n\t} else if req.Operation == logical.CreateOperation && roleEntry.AuthType == ec2AuthType {\n\t\troleEntry.DisallowReauthentication = data.Get(\"disallow_reauthentication\").(bool)\n\t}\n\n\tallowInstanceMigrationBool, ok := data.GetOk(\"allow_instance_migration\")\n\tif ok {\n\t\tif roleEntry.AuthType != ec2AuthType {\n\t\t\treturn logical.ErrorResponse(\"specified allow_instance_migration when not using ec2 auth type\"), nil\n\t\t}\n\t\troleEntry.AllowInstanceMigration = allowInstanceMigrationBool.(bool)\n\t} else if req.Operation == logical.CreateOperation && roleEntry.AuthType == ec2AuthType {\n\t\troleEntry.AllowInstanceMigration = data.Get(\"allow_instance_migration\").(bool)\n\t}\n\n\tvar resp logical.Response\n\n\tttlRaw, ok := data.GetOk(\"ttl\")\n\tif ok {\n\t\tttl := time.Duration(ttlRaw.(int)) * time.Second\n\t\tdefaultLeaseTTL := b.System().DefaultLeaseTTL()\n\t\tif ttl > defaultLeaseTTL {\n\t\t\tresp.AddWarning(fmt.Sprintf(\"Given ttl of %d seconds greater than current mount/system default of %d seconds; ttl will be capped at login time\", ttl/time.Second, defaultLeaseTTL/time.Second))\n\t\t}\n\t\troleEntry.TTL = ttl\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.TTL = time.Duration(data.Get(\"ttl\").(int)) * time.Second\n\t}\n\n\tmaxTTLInt, ok := data.GetOk(\"max_ttl\")\n\tif ok {\n\t\tmaxTTL := time.Duration(maxTTLInt.(int)) * time.Second\n\t\tsystemMaxTTL := b.System().MaxLeaseTTL()\n\t\tif maxTTL > systemMaxTTL {\n\t\t\tresp.AddWarning(fmt.Sprintf(\"Given max_ttl of %d seconds greater than current mount/system default of %d seconds; max_ttl will be capped at login time\", maxTTL/time.Second, systemMaxTTL/time.Second))\n\t\t}\n\n\t\tif maxTTL < time.Duration(0) {\n\t\t\treturn logical.ErrorResponse(\"max_ttl cannot be negative\"), nil\n\t\t}\n\n\t\troleEntry.MaxTTL = maxTTL\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.MaxTTL = time.Duration(data.Get(\"max_ttl\").(int)) * time.Second\n\t}\n\n\tif roleEntry.MaxTTL != 0 && roleEntry.MaxTTL < roleEntry.TTL {\n\t\treturn logical.ErrorResponse(\"ttl should be shorter than max_ttl\"), nil\n\t}\n\n\tperiodRaw, ok := data.GetOk(\"period\")\n\tif ok {\n\t\troleEntry.Period = time.Second * time.Duration(periodRaw.(int))\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.Period = time.Second * time.Duration(data.Get(\"period\").(int))\n\t}\n\n\tif roleEntry.Period > b.System().MaxLeaseTTL() {\n\t\treturn logical.ErrorResponse(fmt.Sprintf(\"'period' of '%s' is greater than the backend's maximum lease TTL of '%s'\", roleEntry.Period.String(), b.System().MaxLeaseTTL().String())), nil\n\t}\n\n\troleTagStr, ok := data.GetOk(\"role_tag\")\n\tif ok {\n\t\tif roleEntry.AuthType != ec2AuthType {\n\t\t\treturn logical.ErrorResponse(\"tried to enable role_tag when not using ec2 auth method\"), nil\n\t\t}\n\t\troleEntry.RoleTag = roleTagStr.(string)\n\t\t// There is a limit of 127 characters on the tag key for AWS EC2 instances.\n\t\t// Complying to that requirement, do not allow the value of 'key' to be more than that.\n\t\tif len(roleEntry.RoleTag) > 127 {\n\t\t\treturn logical.ErrorResponse(\"length of role tag exceeds the EC2 key limit of 127 characters\"), nil\n\t\t}\n\t} else if req.Operation == logical.CreateOperation && roleEntry.AuthType == ec2AuthType {\n\t\troleEntry.RoleTag = data.Get(\"role_tag\").(string)\n\t}\n\n\tif roleEntry.HMACKey == \"\" {\n\t\troleEntry.HMACKey, err = uuid.GenerateUUID()\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to generate role HMAC key: %v\", err)\n\t\t}\n\t}\n\n\tif err := b.nonLockedSetAWSRole(req.Storage, roleName, roleEntry); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif len(resp.Warnings()) == 0 {\n\t\treturn nil, nil\n\t}\n\n\treturn &resp, nil\n}", "title": "" }, { "docid": "833429a6b2f86404601c6849b200cb30", "score": "0.5455847", "text": "func (a *FabricApiService) CreateFabricFcoeUplinkRole(ctx context.Context) ApiCreateFabricFcoeUplinkRoleRequest {\n\treturn ApiCreateFabricFcoeUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "9a39fa09b05532badff16959fe108056", "score": "0.5452583", "text": "func (c *Client) Update(ctx context.Context, p *Role) (res string, err error) {\n\tvar ires interface{}\n\tires, err = c.UpdateEndpoint(ctx, p)\n\tif err != nil {\n\t\treturn\n\t}\n\treturn ires.(string), nil\n}", "title": "" }, { "docid": "e299837d4c6a80990e36a89b0cb19e70", "score": "0.544694", "text": "func (r ApiCreateFabricUplinkPcRoleRequest) FabricUplinkPcRole(fabricUplinkPcRole FabricUplinkPcRole) ApiCreateFabricUplinkPcRoleRequest {\n\tr.fabricUplinkPcRole = &fabricUplinkPcRole\n\treturn r\n}", "title": "" }, { "docid": "377496076a4fb24c3e5d51545ebd0b5d", "score": "0.54228497", "text": "func (r ApiPatchFabricUplinkPcRoleRequest) FabricUplinkPcRole(fabricUplinkPcRole FabricUplinkPcRole) ApiPatchFabricUplinkPcRoleRequest {\n\tr.fabricUplinkPcRole = &fabricUplinkPcRole\n\treturn r\n}", "title": "" }, { "docid": "da1794ceb27a4e6cb7fc07f3f829c7a2", "score": "0.54108095", "text": "func Role(n NFD) (ResourceStatus, error) {\n\t// state represents the resource's 'control' function index\n\tstate := n.idx\n\n\t// It is assumed that the index has already been verified to be a\n\t// Role object, so let's get the resource's Role object\n\tobj := n.resources[state].Role\n\n\t// The Namespace should already be defined, so let's set the\n\t// namespace to the namespace defined in the Role object\n\tobj.SetNamespace(n.ins.GetNamespace())\n\n\t// found states if the Role was found\n\tfound := &rbacv1.Role{}\n\n\tklog.InfoS(\"Looking for Role\", \"name\", obj.Name, \"namespace\", obj.Namespace)\n\n\t// SetControllerReference sets the owner as a Controller OwnerReference\n\t// and is used for garbage collection of the controlled object. It is\n\t// also used to reconcile the owner object on changes to the controlled\n\t// object. If we cannot set the owner, then return NotReady\n\tif err := controllerutil.SetControllerReference(n.ins, &obj, n.rec.Scheme); err != nil {\n\t\treturn NotReady, err\n\t}\n\n\t// Look for the Role to see if it exists, and if so, check if it's\n\t// Ready/NotReady. If the Role does not exist, then attempt to create it\n\terr := n.rec.Client.Get(context.TODO(), types.NamespacedName{Namespace: obj.Namespace, Name: obj.Name}, found)\n\tif err != nil && errors.IsNotFound(err) {\n\t\tklog.InfoS(\"Role not found, creating\", \"name\", obj.Name, \"namespace\", obj.Namespace)\n\t\terr = n.rec.Client.Create(context.TODO(), &obj)\n\t\tif err != nil {\n\t\t\tklog.ErrorS(err, \"Couldn't create Role\", \"name\", obj.Name, \"namespace\", obj.Namespace)\n\t\t\treturn NotReady, err\n\t\t}\n\t\treturn Ready, nil\n\t} else if err != nil {\n\t\treturn NotReady, err\n\t}\n\n\t// If we found the Role, let's attempt to update it\n\tklog.InfoS(\"Found Role, updating\", \"name\", obj.Name, \"namespace\", obj.Namespace)\n\terr = n.rec.Client.Update(context.TODO(), &obj)\n\tif err != nil {\n\t\treturn NotReady, err\n\t}\n\n\treturn Ready, nil\n}", "title": "" }, { "docid": "794c4c2c5ba7a0445397f18d0dfe9b79", "score": "0.5409219", "text": "func (r ApiGetFabricFcUplinkRoleListRequest) Apply(apply string) ApiGetFabricFcUplinkRoleListRequest {\n\tr.apply = &apply\n\treturn r\n}", "title": "" }, { "docid": "b7a179360d7b940708e974fd628a61c3", "score": "0.5391925", "text": "func (r *REST) Update(ctx kapi.Context, obj runtime.Object) (runtime.Object, bool, error) {\n\trole, ok := obj.(*authorizationapi.Role)\n\tif !ok {\n\t\treturn nil, false, fmt.Errorf(\"not a role: %#v\", obj)\n\t}\n\tif !kapi.ValidNamespace(ctx, &role.ObjectMeta) {\n\t\treturn nil, false, kerrors.NewConflict(\"role\", role.Namespace, fmt.Errorf(\"Role.Namespace does not match the provided context\"))\n\t}\n\n\tif errs := validation.ValidateRole(role); len(errs) > 0 {\n\t\treturn nil, false, kerrors.NewInvalid(\"role\", role.Name, errs)\n\t}\n\n\tpolicy, err := r.EnsurePolicy(ctx)\n\tif err != nil {\n\t\treturn nil, false, err\n\t}\n\tif !doesRoleExist(role.Name, policy) {\n\t\treturn nil, false, fmt.Errorf(\"role %v does not exist\", role.Name)\n\t}\n\n\t// set defaults\n\trole.CreationTimestamp = util.Now()\n\n\tpolicy.Roles[role.Name] = *role\n\tpolicy.LastModified = util.Now()\n\n\tif err := r.registry.UpdatePolicy(ctx, policy); err != nil {\n\t\treturn nil, false, err\n\t}\n\treturn role, false, nil\n}", "title": "" }, { "docid": "d6d7149a0b33d1fe35d651388981ab1b", "score": "0.53883696", "text": "func (b *azureSecretBackend) pathRoleUpdate(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {\n\tvar resp *logical.Response\n\n\tclient, err := b.getClient(ctx, req.Storage)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// load or create role\n\tname := d.Get(\"name\").(string)\n\trole, err := getRole(ctx, name, req.Storage)\n\tif err != nil {\n\t\treturn nil, errwrap.Wrapf(\"error reading role: {{err}}\", err)\n\t}\n\n\tif role == nil {\n\t\tif req.Operation == logical.UpdateOperation {\n\t\t\treturn nil, errors.New(\"role entry not found during update operation\")\n\t\t}\n\t\trole = &roleEntry{\n\t\t\tCredentialType: credentialTypeSP,\n\t\t}\n\t}\n\n\t// load and validate TTLs\n\tif ttlRaw, ok := d.GetOk(\"ttl\"); ok {\n\t\trole.TTL = time.Duration(ttlRaw.(int)) * time.Second\n\t} else if req.Operation == logical.CreateOperation {\n\t\trole.TTL = time.Duration(d.Get(\"ttl\").(int)) * time.Second\n\t}\n\n\tif maxTTLRaw, ok := d.GetOk(\"max_ttl\"); ok {\n\t\trole.MaxTTL = time.Duration(maxTTLRaw.(int)) * time.Second\n\t} else if req.Operation == logical.CreateOperation {\n\t\trole.MaxTTL = time.Duration(d.Get(\"max_ttl\").(int)) * time.Second\n\t}\n\n\tif role.MaxTTL != 0 && role.TTL > role.MaxTTL {\n\t\treturn logical.ErrorResponse(\"ttl cannot be greater than max_ttl\"), nil\n\t}\n\n\t// update and verify Application Object ID if provided\n\tif appObjectID, ok := d.GetOk(\"application_object_id\"); ok {\n\t\trole.ApplicationObjectID = appObjectID.(string)\n\t}\n\n\tif role.ApplicationObjectID != \"\" {\n\t\tapp, err := client.provider.GetApplication(ctx, role.ApplicationObjectID)\n\t\tif err != nil {\n\t\t\treturn nil, errwrap.Wrapf(\"error loading Application: {{err}}\", err)\n\t\t}\n\t\trole.ApplicationID = to.String(app.AppID)\n\t}\n\n\t// Parse the Azure roles\n\tif roles, ok := d.GetOk(\"azure_roles\"); ok {\n\t\tparsedRoles := make([]*AzureRole, 0) // non-nil to avoid a \"missing roles\" error later\n\n\t\terr := jsonutil.DecodeJSON([]byte(roles.(string)), &parsedRoles)\n\t\tif err != nil {\n\t\t\treturn logical.ErrorResponse(\"error parsing Azure roles '%s': %s\", roles.(string), err.Error()), nil\n\t\t}\n\t\trole.AzureRoles = parsedRoles\n\t}\n\n\t// Parse the Azure groups\n\tif groups, ok := d.GetOk(\"azure_groups\"); ok {\n\t\tparsedGroups := make([]*AzureGroup, 0) // non-nil to avoid a \"missing groups\" error later\n\n\t\terr := jsonutil.DecodeJSON([]byte(groups.(string)), &parsedGroups)\n\t\tif err != nil {\n\t\t\treturn logical.ErrorResponse(\"error parsing Azure groups '%s': %s\", groups.(string), err.Error()), nil\n\t\t}\n\t\trole.AzureGroups = parsedGroups\n\t}\n\n\t// update and verify Azure roles, including looking up each role by ID or name.\n\troleSet := make(map[string]bool)\n\tfor _, r := range role.AzureRoles {\n\t\tvar roleDef authorization.RoleDefinition\n\t\tif r.RoleID != \"\" {\n\t\t\troleDef, err = client.provider.GetRoleByID(ctx, r.RoleID)\n\t\t\tif err != nil {\n\t\t\t\tif strings.Contains(err.Error(), \"RoleDefinitionDoesNotExist\") {\n\t\t\t\t\treturn logical.ErrorResponse(\"no role found for role_id: '%s'\", r.RoleID), nil\n\t\t\t\t}\n\t\t\t\treturn nil, errwrap.Wrapf(\"unable to lookup Azure role: {{err}}\", err)\n\t\t\t}\n\t\t} else {\n\t\t\tdefs, err := client.findRoles(ctx, r.RoleName)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errwrap.Wrapf(\"unable to lookup Azure role: {{err}}\", err)\n\t\t\t}\n\t\t\tif l := len(defs); l == 0 {\n\t\t\t\treturn logical.ErrorResponse(\"no role found for role_name: '%s'\", r.RoleName), nil\n\t\t\t} else if l > 1 {\n\t\t\t\treturn logical.ErrorResponse(\"multiple matches found for role_name: '%s'. Specify role by ID instead.\", r.RoleName), nil\n\t\t\t}\n\t\t\troleDef = defs[0]\n\t\t}\n\n\t\troleDefID := to.String(roleDef.ID)\n\t\troleDefName := to.String(roleDef.RoleName)\n\n\t\tr.RoleName, r.RoleID = roleDefName, roleDefID\n\n\t\trsKey := r.RoleID + \"||\" + r.Scope\n\t\tif roleSet[rsKey] {\n\t\t\treturn logical.ErrorResponse(\"duplicate role_id and scope: '%s', '%s'\", r.RoleID, r.Scope), nil\n\t\t}\n\t\troleSet[rsKey] = true\n\t}\n\n\t// update and verify Azure groups, including looking up each group by ID or name.\n\tgroupSet := make(map[string]bool)\n\tfor _, r := range role.AzureGroups {\n\t\tvar groupDef graphrbac.ADGroup\n\t\tif r.ObjectID != \"\" {\n\t\t\tgroupDef, err = client.provider.GetGroup(ctx, r.ObjectID)\n\t\t\tif err != nil {\n\t\t\t\tif strings.Contains(err.Error(), \"Request_ResourceNotFound\") {\n\t\t\t\t\treturn logical.ErrorResponse(\"no group found for object_id: '%s'\", r.ObjectID), nil\n\t\t\t\t}\n\t\t\t\treturn nil, errwrap.Wrapf(\"unable to lookup Azure group: {{err}}\", err)\n\t\t\t}\n\t\t} else {\n\t\t\tdefs, err := client.findGroups(ctx, r.GroupName)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errwrap.Wrapf(\"unable to lookup Azure group: {{err}}\", err)\n\t\t\t}\n\t\t\tif l := len(defs); l == 0 {\n\t\t\t\treturn logical.ErrorResponse(\"no group found for group_name: '%s'\", r.GroupName), nil\n\t\t\t} else if l > 1 {\n\t\t\t\treturn logical.ErrorResponse(\"multiple matches found for group_name: '%s'. Specify group by ObjectID instead.\", r.GroupName), nil\n\t\t\t}\n\t\t\tgroupDef = defs[0]\n\t\t}\n\n\t\tgroupDefID := to.String(groupDef.ObjectID)\n\t\tgroupDefName := to.String(groupDef.DisplayName)\n\t\tr.GroupName, r.ObjectID = groupDefName, groupDefID\n\n\t\tif groupSet[r.ObjectID] {\n\t\t\treturn logical.ErrorResponse(\"duplicate object_id '%s'\", r.ObjectID), nil\n\t\t}\n\t\tgroupSet[r.ObjectID] = true\n\t}\n\n\tif role.ApplicationObjectID == \"\" && len(role.AzureRoles) == 0 && len(role.AzureGroups) == 0 {\n\t\treturn logical.ErrorResponse(\"either Azure role definitions, group definitions, or an Application Object ID must be provided\"), nil\n\t}\n\n\t// save role\n\terr = saveRole(ctx, req.Storage, role, name)\n\tif err != nil {\n\t\treturn nil, errwrap.Wrapf(\"error storing role: {{err}}\", err)\n\t}\n\n\treturn resp, nil\n}", "title": "" }, { "docid": "979ccaf5242891f0a9575b06a91f21cb", "score": "0.5381393", "text": "func (a *FabricApiService) UpdateFabricFcoeUplinkPcRole(ctx context.Context, moid string) ApiUpdateFabricFcoeUplinkPcRoleRequest {\n\treturn ApiUpdateFabricFcoeUplinkPcRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "ca92e4a34ecb34554f5718b42945689a", "score": "0.53778017", "text": "func (a *FabricApiService) GetFabricFcoeUplinkRoleList(ctx context.Context) ApiGetFabricFcoeUplinkRoleListRequest {\n\treturn ApiGetFabricFcoeUplinkRoleListRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "d20701898543ca8133ac4515004219e5", "score": "0.53759426", "text": "func (a *FabricApiService) UpdateFabricFcUplinkRoleExecute(r ApiUpdateFabricFcUplinkRoleRequest) (*FabricFcUplinkRole, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *FabricFcUplinkRole\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"FabricApiService.UpdateFabricFcUplinkRole\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/fabric/FcUplinkRoles/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.fabricFcUplinkRole == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"fabricFcUplinkRole is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.fabricFcUplinkRole\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "b84d569ddad29d357ca9bed56d1d771f", "score": "0.53651845", "text": "func (s *Server) Update(ctx context.Context, message *rolespb.UpdateRequest) (*rolespb.UpdateResponse, error) {\n\tctx = context.WithValue(ctx, goa.MethodKey, \"update\")\n\tctx = context.WithValue(ctx, goa.ServiceKey, \"roles\")\n\tresp, err := s.UpdateH.Handle(ctx, message)\n\tif err != nil {\n\t\treturn nil, goagrpc.EncodeError(err)\n\t}\n\treturn resp.(*rolespb.UpdateResponse), nil\n}", "title": "" }, { "docid": "c70c728ce99875c224b083d37a54fc18", "score": "0.5360057", "text": "func (r ApiUpdateFabricFcUplinkPcRoleRequest) FabricFcUplinkPcRole(fabricFcUplinkPcRole FabricFcUplinkPcRole) ApiUpdateFabricFcUplinkPcRoleRequest {\n\tr.fabricFcUplinkPcRole = &fabricFcUplinkPcRole\n\treturn r\n}", "title": "" }, { "docid": "5a7ec78f53ec5f7d8d37d241502c86a1", "score": "0.53583306", "text": "func (a *FabricApiService) GetFabricFcUplinkRoleByMoid(ctx context.Context, moid string) ApiGetFabricFcUplinkRoleByMoidRequest {\n\treturn ApiGetFabricFcUplinkRoleByMoidRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "0c03bc5ea90c8c3d8bf46c5883c600d4", "score": "0.5357341", "text": "func (r ApiGetFabricUplinkRoleListRequest) Count(count bool) ApiGetFabricUplinkRoleListRequest {\n\tr.count = &count\n\treturn r\n}", "title": "" }, { "docid": "645308dfc26a73d3bf1c8bdda7339fd9", "score": "0.5352811", "text": "func (self VirtualMachineClient) UpdateRole(cloudServiceName, deploymentName, roleName string, role Role) (management.OperationId, error) {\n\tif cloudServiceName == \"\" {\n\t\treturn \"\", fmt.Errorf(errParamNotSpecified, \"cloudServiceName\")\n\t}\n\tif deploymentName == \"\" {\n\t\treturn \"\", fmt.Errorf(errParamNotSpecified, \"deploymentName\")\n\t}\n\tif roleName == \"\" {\n\t\treturn \"\", fmt.Errorf(errParamNotSpecified, \"roleName\")\n\t}\n\n\tdata, err := xml.Marshal(PersistentVMRole{Role: role})\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\trequestURL := fmt.Sprintf(azureRoleURL, cloudServiceName, deploymentName, roleName)\n\treturn self.client.SendAzurePutRequest(requestURL, \"text/xml\", data)\n}", "title": "" }, { "docid": "27754054d8a1cfabc5b9212fa12206d0", "score": "0.5335207", "text": "func (c *RoleClient) Update() *RoleUpdate {\n\tmutation := newRoleMutation(c.config, OpUpdate)\n\treturn &RoleUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}\n}", "title": "" }, { "docid": "27754054d8a1cfabc5b9212fa12206d0", "score": "0.5335207", "text": "func (c *RoleClient) Update() *RoleUpdate {\n\tmutation := newRoleMutation(c.config, OpUpdate)\n\treturn &RoleUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}\n}", "title": "" }, { "docid": "001af0b1cfc5275b6c8b0f7926c8de15", "score": "0.5333126", "text": "func (r *REST) Update(ctx kapi.Context, obj runtime.Object) (runtime.Object, bool, error) {\n\trole, ok := obj.(*authorizationapi.Role)\n\tif !ok {\n\t\treturn nil, false, fmt.Errorf(\"not a role: %#v\", obj)\n\t}\n\tif !kapi.ValidNamespace(ctx, &role.ObjectMeta) {\n\t\treturn nil, false, kerrors.NewConflict(\"role\", role.Namespace, fmt.Errorf(\"Role.Namespace does not match the provided context\"))\n\t}\n\n\tif errs := validation.ValidateRole(role); len(errs) > 0 {\n\t\treturn nil, false, kerrors.NewInvalid(\"role\", role.Name, errs)\n\t}\n\n\terr := r.registry.UpdateRole(ctx, role)\n\tif err != nil {\n\t\treturn nil, false, err\n\t}\n\treturn role, false, nil\n}", "title": "" }, { "docid": "3dbcb78430ea9ab11e97d74a0f400a72", "score": "0.53232914", "text": "func (r *AccessPackageAssignmentResourceRoleRequest) Update(ctx context.Context, reqObj *AccessPackageAssignmentResourceRole) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "title": "" }, { "docid": "e43d498934b2512ef5c79cd2c1b09c2b", "score": "0.53177816", "text": "func (a *FabricApiService) PatchFabricUplinkPcRole(ctx context.Context, moid string) ApiPatchFabricUplinkPcRoleRequest {\n\treturn ApiPatchFabricUplinkPcRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "b395181926ac8f99936292383051880f", "score": "0.53085595", "text": "func (r *Role) URN() pulumi.URNOutput {\n\treturn r.s.URN()\n}", "title": "" }, { "docid": "f8d3b45d9645340078a2466781a1a9b4", "score": "0.5296464", "text": "func (r ApiGetFabricFcoeUplinkRoleListRequest) Apply(apply string) ApiGetFabricFcoeUplinkRoleListRequest {\n\tr.apply = &apply\n\treturn r\n}", "title": "" }, { "docid": "d287d2ae11b2662cb26331c2d0550062", "score": "0.5260782", "text": "func (b *backend) pathRoleCreateUpdate(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) {\n\troleName := strings.ToLower(data.Get(\"role\").(string))\n\tif roleName == \"\" {\n\t\treturn logical.ErrorResponse(\"missing role\"), nil\n\t}\n\n\tb.roleMutex.Lock()\n\tdefer b.roleMutex.Unlock()\n\n\troleEntry, err := b.nonLockedAWSRole(ctx, req.Storage, roleName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif roleEntry == nil {\n\t\troleID, err := uuid.GenerateUUID()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\troleEntry = &awsRoleEntry{\n\t\t\tRoleID: roleID,\n\t\t\tVersion: currentRoleStorageVersion,\n\t\t}\n\t} else {\n\t\tneedUpdate, err := b.upgradeRoleEntry(ctx, req.Storage, roleEntry)\n\t\tif err != nil {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"failed to update roleEntry: %v\", err)), nil\n\t\t}\n\t\tif needUpdate {\n\t\t\terr = b.nonLockedSetAWSRole(ctx, req.Storage, roleName, roleEntry)\n\t\t\tif err != nil {\n\t\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"failed to save upgraded roleEntry: %v\", err)), nil\n\t\t\t}\n\t\t}\n\t}\n\n\t// Fetch and set the bound parameters. There can't be default values\n\t// for these.\n\tif boundAmiIDRaw, ok := data.GetOk(\"bound_ami_id\"); ok {\n\t\troleEntry.BoundAmiIDs = boundAmiIDRaw.([]string)\n\t}\n\n\tif boundAccountIDRaw, ok := data.GetOk(\"bound_account_id\"); ok {\n\t\troleEntry.BoundAccountIDs = boundAccountIDRaw.([]string)\n\t}\n\n\tif boundRegionRaw, ok := data.GetOk(\"bound_region\"); ok {\n\t\troleEntry.BoundRegions = boundRegionRaw.([]string)\n\t}\n\n\tif boundVpcIDRaw, ok := data.GetOk(\"bound_vpc_id\"); ok {\n\t\troleEntry.BoundVpcIDs = boundVpcIDRaw.([]string)\n\t}\n\n\tif boundSubnetIDRaw, ok := data.GetOk(\"bound_subnet_id\"); ok {\n\t\troleEntry.BoundSubnetIDs = boundSubnetIDRaw.([]string)\n\t}\n\n\tif resolveAWSUniqueIDsRaw, ok := data.GetOk(\"resolve_aws_unique_ids\"); ok {\n\t\tswitch {\n\t\tcase req.Operation == logical.CreateOperation:\n\t\t\troleEntry.ResolveAWSUniqueIDs = resolveAWSUniqueIDsRaw.(bool)\n\t\tcase roleEntry.ResolveAWSUniqueIDs && !resolveAWSUniqueIDsRaw.(bool):\n\t\t\treturn logical.ErrorResponse(\"changing resolve_aws_unique_ids from true to false is not allowed\"), nil\n\t\tdefault:\n\t\t\troleEntry.ResolveAWSUniqueIDs = resolveAWSUniqueIDsRaw.(bool)\n\t\t}\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.ResolveAWSUniqueIDs = data.Get(\"resolve_aws_unique_ids\").(bool)\n\t}\n\n\tif boundIamRoleARNRaw, ok := data.GetOk(\"bound_iam_role_arn\"); ok {\n\t\troleEntry.BoundIamRoleARNs = boundIamRoleARNRaw.([]string)\n\t}\n\n\tif boundIamInstanceProfileARNRaw, ok := data.GetOk(\"bound_iam_instance_profile_arn\"); ok {\n\t\troleEntry.BoundIamInstanceProfileARNs = boundIamInstanceProfileARNRaw.([]string)\n\t}\n\n\tif boundEc2InstanceIDRaw, ok := data.GetOk(\"bound_ec2_instance_id\"); ok {\n\t\troleEntry.BoundEc2InstanceIDs = boundEc2InstanceIDRaw.([]string)\n\t}\n\n\tif boundIamPrincipalARNRaw, ok := data.GetOk(\"bound_iam_principal_arn\"); ok {\n\t\tprincipalARNs := boundIamPrincipalARNRaw.([]string)\n\t\troleEntry.BoundIamPrincipalARNs = principalARNs\n\t\troleEntry.BoundIamPrincipalIDs = []string{}\n\t}\n\tif roleEntry.ResolveAWSUniqueIDs && len(roleEntry.BoundIamPrincipalIDs) == 0 {\n\t\t// we might be turning on resolution on this role, so ensure we update the IDs\n\t\tfor _, principalARN := range roleEntry.BoundIamPrincipalARNs {\n\t\t\tif !strings.HasSuffix(principalARN, \"*\") {\n\t\t\t\tprincipalID, err := b.resolveArnToUniqueIDFunc(ctx, req.Storage, principalARN)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"unable to resolve ARN %#v to internal ID: %s\", principalARN, err.Error())), nil\n\t\t\t\t}\n\t\t\t\troleEntry.BoundIamPrincipalIDs = append(roleEntry.BoundIamPrincipalIDs, principalID)\n\t\t\t}\n\t\t}\n\t}\n\n\tif inferRoleTypeRaw, ok := data.GetOk(\"inferred_entity_type\"); ok {\n\t\troleEntry.InferredEntityType = inferRoleTypeRaw.(string)\n\t}\n\n\tif inferredAWSRegionRaw, ok := data.GetOk(\"inferred_aws_region\"); ok {\n\t\troleEntry.InferredAWSRegion = inferredAWSRegionRaw.(string)\n\t}\n\n\t// auth_type is a special case as it's immutable and can't be changed once a role is created\n\tif authTypeRaw, ok := data.GetOk(\"auth_type\"); ok {\n\t\t// roleEntry.AuthType should only be \"\" when it's a new role; existing roles without an\n\t\t// auth_type should have already been upgraded to have one before we get here\n\t\tif roleEntry.AuthType == \"\" {\n\t\t\tswitch authTypeRaw.(string) {\n\t\t\tcase ec2AuthType, iamAuthType:\n\t\t\t\troleEntry.AuthType = authTypeRaw.(string)\n\t\t\tdefault:\n\t\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"unrecognized auth_type: %v\", authTypeRaw.(string))), nil\n\t\t\t}\n\t\t} else if authTypeRaw.(string) != roleEntry.AuthType {\n\t\t\treturn logical.ErrorResponse(\"changing auth_type on a role is not allowed\"), nil\n\t\t}\n\t} else if req.Operation == logical.CreateOperation {\n\t\tswitch req.MountType {\n\t\t// maintain backwards compatibility for old aws-ec2 auth types\n\t\tcase \"aws-ec2\":\n\t\t\troleEntry.AuthType = ec2AuthType\n\t\t// but default to iamAuth for new mounts going forward\n\t\tcase \"aws\":\n\t\t\troleEntry.AuthType = iamAuthType\n\t\tdefault:\n\t\t\troleEntry.AuthType = iamAuthType\n\t\t}\n\t}\n\n\tallowEc2Binds := roleEntry.AuthType == ec2AuthType\n\n\tif roleEntry.InferredEntityType != \"\" {\n\t\tswitch {\n\t\tcase roleEntry.AuthType != iamAuthType:\n\t\t\treturn logical.ErrorResponse(\"specified inferred_entity_type but didn't allow iam auth_type\"), nil\n\t\tcase roleEntry.InferredEntityType != ec2EntityType:\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified invalid inferred_entity_type: %s\", roleEntry.InferredEntityType)), nil\n\t\tcase roleEntry.InferredAWSRegion == \"\":\n\t\t\treturn logical.ErrorResponse(\"specified inferred_entity_type but not inferred_aws_region\"), nil\n\t\t}\n\t\tallowEc2Binds = true\n\t} else if roleEntry.InferredAWSRegion != \"\" {\n\t\treturn logical.ErrorResponse(\"specified inferred_aws_region but not inferred_entity_type\"), nil\n\t}\n\n\tnumBinds := 0\n\n\tif len(roleEntry.BoundAccountIDs) > 0 {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_account_id but not specifying ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif len(roleEntry.BoundRegions) > 0 {\n\t\tif roleEntry.AuthType != ec2AuthType {\n\t\t\treturn logical.ErrorResponse(\"specified bound_region but not specifying ec2 auth_type\"), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif len(roleEntry.BoundAmiIDs) > 0 {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_ami_id but not specifying ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif len(roleEntry.BoundIamInstanceProfileARNs) > 0 {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_iam_instance_profile_arn but not specifying ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif len(roleEntry.BoundEc2InstanceIDs) > 0 {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_ec2_instance_id but not specifying ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif len(roleEntry.BoundIamRoleARNs) > 0 {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_iam_role_arn but not specifying ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif len(roleEntry.BoundIamPrincipalARNs) > 0 {\n\t\tif roleEntry.AuthType != iamAuthType {\n\t\t\treturn logical.ErrorResponse(\"specified bound_iam_principal_arn but not specifying iam auth_type\"), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif len(roleEntry.BoundVpcIDs) > 0 {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_vpc_id but not specifying ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif len(roleEntry.BoundSubnetIDs) > 0 {\n\t\tif !allowEc2Binds {\n\t\t\treturn logical.ErrorResponse(fmt.Sprintf(\"specified bound_subnet_id but not specifying ec2 auth_type or inferring %s\", ec2EntityType)), nil\n\t\t}\n\t\tnumBinds++\n\t}\n\n\tif numBinds == 0 {\n\t\treturn logical.ErrorResponse(\"at least one bound parameter should be specified on the role\"), nil\n\t}\n\n\tpoliciesRaw, ok := data.GetOk(\"policies\")\n\tif ok {\n\t\troleEntry.Policies = policyutil.ParsePolicies(policiesRaw)\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.Policies = []string{}\n\t}\n\n\tdisallowReauthenticationBool, ok := data.GetOk(\"disallow_reauthentication\")\n\tif ok {\n\t\tif roleEntry.AuthType != ec2AuthType {\n\t\t\treturn logical.ErrorResponse(\"specified disallow_reauthentication when not using ec2 auth type\"), nil\n\t\t}\n\t\troleEntry.DisallowReauthentication = disallowReauthenticationBool.(bool)\n\t} else if req.Operation == logical.CreateOperation && roleEntry.AuthType == ec2AuthType {\n\t\troleEntry.DisallowReauthentication = data.Get(\"disallow_reauthentication\").(bool)\n\t}\n\n\tallowInstanceMigrationBool, ok := data.GetOk(\"allow_instance_migration\")\n\tif ok {\n\t\tif roleEntry.AuthType != ec2AuthType {\n\t\t\treturn logical.ErrorResponse(\"specified allow_instance_migration when not using ec2 auth type\"), nil\n\t\t}\n\t\troleEntry.AllowInstanceMigration = allowInstanceMigrationBool.(bool)\n\t} else if req.Operation == logical.CreateOperation && roleEntry.AuthType == ec2AuthType {\n\t\troleEntry.AllowInstanceMigration = data.Get(\"allow_instance_migration\").(bool)\n\t}\n\n\tif roleEntry.AllowInstanceMigration && roleEntry.DisallowReauthentication {\n\t\treturn logical.ErrorResponse(\"cannot specify both disallow_reauthentication=true and allow_instance_migration=true\"), nil\n\t}\n\n\tvar resp logical.Response\n\n\tttlRaw, ok := data.GetOk(\"ttl\")\n\tif ok {\n\t\tttl := time.Duration(ttlRaw.(int)) * time.Second\n\t\tdefaultLeaseTTL := b.System().DefaultLeaseTTL()\n\t\tif ttl > defaultLeaseTTL {\n\t\t\tresp.AddWarning(fmt.Sprintf(\"Given ttl of %d seconds greater than current mount/system default of %d seconds; ttl will be capped at login time\", ttl/time.Second, defaultLeaseTTL/time.Second))\n\t\t}\n\t\troleEntry.TTL = ttl\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.TTL = time.Duration(data.Get(\"ttl\").(int)) * time.Second\n\t}\n\n\tmaxTTLInt, ok := data.GetOk(\"max_ttl\")\n\tif ok {\n\t\tmaxTTL := time.Duration(maxTTLInt.(int)) * time.Second\n\t\tsystemMaxTTL := b.System().MaxLeaseTTL()\n\t\tif maxTTL > systemMaxTTL {\n\t\t\tresp.AddWarning(fmt.Sprintf(\"Given max_ttl of %d seconds greater than current mount/system default of %d seconds; max_ttl will be capped at login time\", maxTTL/time.Second, systemMaxTTL/time.Second))\n\t\t}\n\n\t\tif maxTTL < time.Duration(0) {\n\t\t\treturn logical.ErrorResponse(\"max_ttl cannot be negative\"), nil\n\t\t}\n\n\t\troleEntry.MaxTTL = maxTTL\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.MaxTTL = time.Duration(data.Get(\"max_ttl\").(int)) * time.Second\n\t}\n\n\tif roleEntry.MaxTTL != 0 && roleEntry.MaxTTL < roleEntry.TTL {\n\t\treturn logical.ErrorResponse(\"ttl should be shorter than max_ttl\"), nil\n\t}\n\n\tperiodRaw, ok := data.GetOk(\"period\")\n\tif ok {\n\t\troleEntry.Period = time.Second * time.Duration(periodRaw.(int))\n\t} else if req.Operation == logical.CreateOperation {\n\t\troleEntry.Period = time.Second * time.Duration(data.Get(\"period\").(int))\n\t}\n\n\tif roleEntry.Period > b.System().MaxLeaseTTL() {\n\t\treturn logical.ErrorResponse(fmt.Sprintf(\"'period' of '%s' is greater than the backend's maximum lease TTL of '%s'\", roleEntry.Period.String(), b.System().MaxLeaseTTL().String())), nil\n\t}\n\n\troleTagStr, ok := data.GetOk(\"role_tag\")\n\tif ok {\n\t\tif roleEntry.AuthType != ec2AuthType {\n\t\t\treturn logical.ErrorResponse(\"tried to enable role_tag when not using ec2 auth method\"), nil\n\t\t}\n\t\troleEntry.RoleTag = roleTagStr.(string)\n\t\t// There is a limit of 127 characters on the tag key for AWS EC2 instances.\n\t\t// Complying to that requirement, do not allow the value of 'key' to be more than that.\n\t\tif len(roleEntry.RoleTag) > 127 {\n\t\t\treturn logical.ErrorResponse(\"length of role tag exceeds the EC2 key limit of 127 characters\"), nil\n\t\t}\n\t} else if req.Operation == logical.CreateOperation && roleEntry.AuthType == ec2AuthType {\n\t\troleEntry.RoleTag = data.Get(\"role_tag\").(string)\n\t}\n\n\tif roleEntry.HMACKey == \"\" {\n\t\troleEntry.HMACKey, err = uuid.GenerateUUID()\n\t\tif err != nil {\n\t\t\treturn nil, errwrap.Wrapf(\"failed to generate role HMAC key: {{err}}\", err)\n\t\t}\n\t}\n\n\tif err := b.nonLockedSetAWSRole(ctx, req.Storage, roleName, roleEntry); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif len(resp.Warnings) == 0 {\n\t\treturn nil, nil\n\t}\n\n\treturn &resp, nil\n}", "title": "" }, { "docid": "ee897078f4d23c53a0b19d8ce3172df0", "score": "0.5257495", "text": "func (r ApiUpdateFabricFcoeUplinkPcRoleRequest) FabricFcoeUplinkPcRole(fabricFcoeUplinkPcRole FabricFcoeUplinkPcRole) ApiUpdateFabricFcoeUplinkPcRoleRequest {\n\tr.fabricFcoeUplinkPcRole = &fabricFcoeUplinkPcRole\n\treturn r\n}", "title": "" }, { "docid": "16d1590ec932c508202e1981aee3140f", "score": "0.5243178", "text": "func RoleBinding(n NFD) (ResourceStatus, error) {\n\t// state represents the resource's 'control' function index\n\tstate := n.idx\n\n\t// It is assumed that the index has already been verified to be a\n\t// RoleBinding object, so let's get the resource's RoleBinding\n\t// object\n\tobj := n.resources[state].RoleBinding\n\n\t// The Namespace should already be defined, so let's set the\n\t// namespace to the namespace defined in the\n\tobj.SetNamespace(n.ins.GetNamespace())\n\n\t// found states if the RoleBinding was found\n\tfound := &rbacv1.RoleBinding{}\n\n\tklog.InfoS(\"Looking for RoleBinding\", \"name\", obj.Name, \"namespace\", obj.Namespace)\n\n\t// SetControllerReference sets the owner as a Controller OwnerReference\n\t// and is used for garbage collection of the controlled object. It is\n\t// also used to reconcile the owner object on changes to the controlled\n\tif err := controllerutil.SetControllerReference(n.ins, &obj, n.rec.Scheme); err != nil {\n\t\treturn NotReady, err\n\t}\n\n\t// Look for the RoleBinding to see if it exists, and if so, check if\n\t// it's Ready/NotReady. If the RoleBinding does not exist, then attempt\n\t// to create it\n\terr := n.rec.Client.Get(context.TODO(), types.NamespacedName{Namespace: obj.Namespace, Name: obj.Name}, found)\n\tif err != nil && errors.IsNotFound(err) {\n\t\tklog.InfoS(\"RoleBinding not found, creating\", \"name\", obj.Name, \"namespace\", obj.Namespace)\n\t\terr = n.rec.Client.Create(context.TODO(), &obj)\n\t\tif err != nil {\n\t\t\tklog.ErrorS(err, \"Couldn't create RoleBinding\", \"name\", obj.Name, \"namespace\", obj.Namespace)\n\t\t\treturn NotReady, err\n\t\t}\n\t\treturn Ready, nil\n\t} else if err != nil {\n\t\treturn NotReady, err\n\t}\n\n\t// If we found the RoleBinding, let's attempt to update it\n\tklog.InfoS(\"RoleBinding found, updating\", \"name\", obj.Name, \"namespace\", obj.Namespace)\n\terr = n.rec.Client.Update(context.TODO(), &obj)\n\tif err != nil {\n\t\treturn NotReady, err\n\t}\n\n\treturn Ready, nil\n}", "title": "" }, { "docid": "4f46baf3e6277cd0fbd175c3fa26a9c1", "score": "0.5238157", "text": "func (a *FabricApiService) DeleteFabricFcUplinkRole(ctx context.Context, moid string) ApiDeleteFabricFcUplinkRoleRequest {\n\treturn ApiDeleteFabricFcUplinkRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "title": "" }, { "docid": "5d9b3ac38d0edf987226e0ba89d1b185", "score": "0.52037895", "text": "func (res roleMemberBase) Role() *types.Role {\n\treturn res.role\n}", "title": "" }, { "docid": "8ebb884ab154ff1cc4f3ccddab01a654", "score": "0.52026856", "text": "func (r *EndpointsAuthV1RestClient) AutoUpdateRole(ctx context.Context, in *Role) (*Role, error) {\n\tpath := makeURIAuthV1AutoUpdateRoleUpdateOper(in)\n\tif r.bufferId != \"\" {\n\t\tpath = strings.Replace(path, \"/configs\", \"/staging/\"+r.bufferId, 1)\n\t}\n\treq, err := r.getHTTPRequest(ctx, in, \"PUT\", path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresp, err := r.client.Do(req.WithContext(ctx))\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"request failed (%s)\", err)\n\t}\n\tdefer resp.Body.Close()\n\tret, err := decodeHTTPrespAuthV1AutoUpdateRole(ctx, resp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret.(*Role), err\n}", "title": "" }, { "docid": "bf183d0de539a73a94e66c4e9c127580", "score": "0.5202026", "text": "func (r ApiCreateFabricFcUplinkPcRoleRequest) FabricFcUplinkPcRole(fabricFcUplinkPcRole FabricFcUplinkPcRole) ApiCreateFabricFcUplinkPcRoleRequest {\n\tr.fabricFcUplinkPcRole = &fabricFcUplinkPcRole\n\treturn r\n}", "title": "" }, { "docid": "db118bcff50b43f740018f66140566f5", "score": "0.51952714", "text": "func (r *Reconciler) createOrUpdateRoleBinding(clusterName string) error {\n\troleName := roleName(clusterName)\n\tacmRoleBinding := helpers.NewRoleBinding(roleName, clusterName).Groups(subjectPrefix + clusterName).BindingOrDie()\n\n\t// role and rolebinding have the same name\n\tbinding, err := r.kubeClient.RbacV1().RoleBindings(clusterName).Get(context.TODO(), roleName, metav1.GetOptions{})\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t_, err = r.kubeClient.RbacV1().RoleBindings(clusterName).Create(context.TODO(), &acmRoleBinding, metav1.CreateOptions{})\n\t\t}\n\t\treturn err\n\t}\n\n\tneedUpdate := false\n\tif !reflect.DeepEqual(acmRoleBinding.RoleRef, binding.RoleRef) {\n\t\tneedUpdate = true\n\t\tbinding.RoleRef = acmRoleBinding.RoleRef\n\t}\n\tif !reflect.DeepEqual(acmRoleBinding.Subjects, binding.Subjects) {\n\t\tneedUpdate = true\n\t\tbinding.Subjects = acmRoleBinding.Subjects\n\t}\n\tif needUpdate {\n\t\t_, err = r.kubeClient.RbacV1().RoleBindings(clusterName).Update(context.TODO(), binding, metav1.UpdateOptions{})\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "4fca557a1499c348990a00a6fae66700", "score": "0.5179893", "text": "func (r *REST) Update(ctx kapi.Context, obj runtime.Object) (runtime.Object, bool, error) {\n\troleBinding, ok := obj.(*authorizationapi.RoleBinding)\n\tif !ok {\n\t\treturn nil, false, fmt.Errorf(\"not a roleBinding: %#v\", obj)\n\t}\n\tif !kapi.ValidNamespace(ctx, &roleBinding.ObjectMeta) {\n\t\treturn nil, false, kerrors.NewConflict(\"roleBinding\", roleBinding.Namespace, fmt.Errorf(\"RoleBinding.Namespace does not match the provided context\"))\n\t}\n\n\tif errs := validation.ValidateRoleBinding(roleBinding); len(errs) > 0 {\n\t\treturn nil, false, kerrors.NewInvalid(\"roleBinding\", roleBinding.Name, errs)\n\t}\n\n\terr := r.registry.UpdateRoleBinding(ctx, roleBinding, false)\n\tif err != nil {\n\t\treturn nil, false, err\n\t}\n\treturn roleBinding, false, nil\n}", "title": "" }, { "docid": "6b193a2672b73cfd4562c2d3a70f9720", "score": "0.51745355", "text": "func (a *FabricApiService) CreateFabricFcUplinkRoleExecute(r ApiCreateFabricFcUplinkRoleRequest) (*FabricFcUplinkRole, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *FabricFcUplinkRole\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"FabricApiService.CreateFabricFcUplinkRole\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/fabric/FcUplinkRoles\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.fabricFcUplinkRole == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"fabricFcUplinkRole is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\tif r.ifNoneMatch != nil {\n\t\tlocalVarHeaderParams[\"If-None-Match\"] = parameterToString(*r.ifNoneMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.fabricFcUplinkRole\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "e5fcadd723b4796fd390435ea2fdee98", "score": "0.51678264", "text": "func (a *FabricApiService) CreateFabricFcUplinkPcRole(ctx context.Context) ApiCreateFabricFcUplinkPcRoleRequest {\n\treturn ApiCreateFabricFcUplinkPcRoleRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "6e9d58b0216aee6284a8be8ac19feabb", "score": "0.51665163", "text": "func (r ApiGetFabricFcUplinkRoleListRequest) Count(count bool) ApiGetFabricFcUplinkRoleListRequest {\n\tr.count = &count\n\treturn r\n}", "title": "" }, { "docid": "a3ea25b45680d4edc216b53ec28ff454", "score": "0.5165032", "text": "func (r *REST) Update(ctx kapi.Context, obj runtime.Object) (runtime.Object, bool, error) {\n\troleBinding, ok := obj.(*authorizationapi.RoleBinding)\n\tif !ok {\n\t\treturn nil, false, fmt.Errorf(\"not a roleBinding: %#v\", obj)\n\t}\n\tif !kapi.ValidNamespace(ctx, &roleBinding.ObjectMeta) {\n\t\treturn nil, false, kerrors.NewConflict(\"roleBinding\", roleBinding.Namespace, fmt.Errorf(\"RoleBinding.Namespace does not match the provided context\"))\n\t}\n\n\tif errs := validation.ValidateRoleBinding(roleBinding); len(errs) > 0 {\n\t\treturn nil, false, kerrors.NewInvalid(\"roleBinding\", roleBinding.Name, errs)\n\t}\n\n\terr := r.registry.UpdateRoleBinding(ctx, roleBinding)\n\tif err != nil {\n\t\treturn nil, false, err\n\t}\n\treturn roleBinding, false, nil\n}", "title": "" }, { "docid": "aea1c004df45955b69cb6f7761717703", "score": "0.51528066", "text": "func (s *Server) UpdateRole(ctx context.Context, req *request.UpdateRole) (*response.Role, error) {\n\terr := validation.New(validation.Options{\n\t\tTarget: \"role\",\n\t\tRequest: *req,\n\t\tRules: validation.Rules{\n\t\t\t\"OrgId\": []string{\"required\"},\n\t\t\t\"ServerId\": []string{\"required\"},\n\t\t\t\"Name\": []string{\"required\"},\n\t\t},\n\t}).Validate()\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tc, err := s.createClient(ctx, req.OrgId, req.ServerId)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\trunList := req.RunList\n\tif len(runList) == 0 {\n\t\trunList = []string{}\n\t}\n\n\tdefaultAttributes, err := StructToJSON(req.DefaultAttributes)\n\tif err != nil {\n\t\treturn nil, status.Error(codes.Internal, err.Error())\n\t}\n\n\toverrideAttributes, err := StructToJSON(req.OverrideAttributes)\n\tif err != nil {\n\t\treturn nil, status.Error(codes.Internal, err.Error())\n\t}\n\n\trole, err := c.client.Roles.Put(\n\t\t&chef.Role{\n\t\t\tName: req.Name,\n\t\t\tDescription: req.Description,\n\t\t\tRunList: runList,\n\t\t\tDefaultAttributes: defaultAttributes,\n\t\t\tOverrideAttributes: overrideAttributes,\n\t\t\tEnvRunList: chef.EnvRunList{},\n\t\t})\n\n\tif err != nil {\n\t\treturn nil, ParseAPIError(err)\n\t}\n\n\treturn fromAPIToRoleResponse(role)\n}", "title": "" }, { "docid": "ea4e14516ca3bdfc48b59576aa0f864e", "score": "0.5147721", "text": "func (r *ReconcileCoherenceCluster) updateRole(p params) (reconcile.Result, error) {\n\tlogger := p.reqLogger.WithValues(\"Role\", p.existingRole.GetName())\n\n\tif reflect.DeepEqual(p.existingRole.Spec, p.desiredRole) {\n\t\t// nothing to do\n\t\tlogger.Info(\"Existing Role is at the desired spec\")\n\t\treturn reconcile.Result{}, nil\n\t}\n\n\tdiff := deep.Equal(p.existingRole.Spec, p.desiredRole)\n\tlogger.Info(\"Updating CoherenceRole - diff\\n \" + strings.Join(diff, \"\\n \"+\"\\n\"))\n\n\t// Create the CoherenceRole resource in k8s which will be detected by the role controller\n\tp.existingRole.Spec = p.desiredRole\n\terr := r.client.Update(context.TODO(), &p.existingRole)\n\n\tif err == nil {\n\t\t// send a successful update event\n\t\tmsg := fmt.Sprintf(updateEventMessage, p.existingRole.Name, p.cluster.Name)\n\t\tr.events.Event(p.cluster, v1.EventTypeNormal, eventReasonUpdated, msg)\n\t} else {\n\t\t// send a failed update event\n\t\tmsg := fmt.Sprintf(updateFailedEventMessage, p.existingRole.Name, p.cluster.Name, err.Error())\n\t\tr.events.Event(p.cluster, v1.EventTypeNormal, eventReasonFailedUpdate, msg)\n\t}\n\n\treturn reconcile.Result{}, err\n}", "title": "" }, { "docid": "fde7106dd8d32f1cccc842b8a40ef38c", "score": "0.5146374", "text": "func (c *Client) RoleUpdate(id string, r *types.Role) error {\n\tv := url.Values{}\n\tv.Set(\"fields\", \"*\")\n\tv.Set(\"related\", strings.Join([]string{\"role_service_access_by_role_id\", \"role_lookup_by_role_id\"}, \",\"))\n\treturn c.send(\"PUT\", \"/api/v2/system/role/\"+id+\"?\"+v.Encode(), 200, *r, r)\n}", "title": "" }, { "docid": "98c039b559ca4e90024febf9203d5b83", "score": "0.51387215", "text": "func (r *role)Inherits(newRole *role) *role {\n\tif r.name != newRole.name {\n\t\tfor _, resource := range (newRole.resources.resourceMap) {\n\t\t\tr.AllowResource(resource.name)\n\t\t}\n\t}\n\treturn r\n}", "title": "" }, { "docid": "a533d3d2b2ba3987201327b6c7d86a69", "score": "0.51337713", "text": "func (a *FabricApiService) GetFabricUplinkPcRoleList(ctx context.Context) ApiGetFabricUplinkPcRoleListRequest {\n\treturn ApiGetFabricUplinkPcRoleListRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "b4093cfac1a84b05b290a884440841f6", "score": "0.51317036", "text": "func (o *Role) Update() (*restapi.GenericMapResponse, error) {\n\tif o.ID == \"\" {\n\t\treturn nil, errors.New(\"error: ID is empty\")\n\t}\n\tvar queryArg = make(map[string]interface{})\n\tqueryArg[\"Name\"] = o.ID\n\tif o.Name != \"\" {\n\t\tqueryArg[\"NewName\"] = o.Name\n\t}\n\tif o.Description != \"\" {\n\t\tqueryArg[\"Description\"] = o.Description\n\t}\n\n\treply, err := o.client.CallGenericMapAPI(o.apiUpdate, queryArg)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif !reply.Success {\n\t\treturn nil, errors.New(reply.Message)\n\t}\n\n\treturn reply, nil\n}", "title": "" }, { "docid": "6901bdfb96a41639f4e1b63efe6e3a63", "score": "0.5130132", "text": "func (r ApiPatchFabricFcUplinkPcRoleRequest) FabricFcUplinkPcRole(fabricFcUplinkPcRole FabricFcUplinkPcRole) ApiPatchFabricFcUplinkPcRoleRequest {\n\tr.fabricFcUplinkPcRole = &fabricFcUplinkPcRole\n\treturn r\n}", "title": "" }, { "docid": "6cf61ccc5053eca804dcee6d89df5d4e", "score": "0.5127046", "text": "func (s *RBACRoleService) Update(ctx context.Context,\n\trole *RBACRole) (*RBACRole, error) {\n\n\tif role == nil {\n\t\treturn nil, errors.New(\"cannot update a nil Role\")\n\t}\n\n\tif isEmptyString(role.ID) {\n\t\treturn nil, errors.New(\"ID cannot be nil for Update operation\")\n\t}\n\n\tendpoint := fmt.Sprintf(\"/rbac/roles/%v\", *role.ID)\n\treq, err := s.client.NewRequest(\"PATCH\", endpoint, nil, role)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar updatedRole RBACRole\n\t_, err = s.client.Do(ctx, req, &updatedRole)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &updatedRole, nil\n}", "title": "" }, { "docid": "eeeec015d4b62daa3f9dac7aff5f84cc", "score": "0.5102789", "text": "func (b *cognitoSecretBackend) pathRoleUpdate(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {\n\tvar resp *logical.Response\n\n\t// load or create role\n\tname := d.Get(\"name\").(string)\n\trole, err := getRole(ctx, name, req.Storage)\n\tif err != nil {\n\t\treturn nil, errwrap.Wrapf(\"error reading role: {{err}}\", err)\n\t}\n\n\tif role == nil {\n\t\tif req.Operation == logical.UpdateOperation {\n\t\t\treturn nil, errors.New(\"role entry not found during update operation\")\n\t\t}\n\t\trole = &roleEntry{\n\t\t\tCredentialType: credentialTypeClientCredentialsGrant,\n\t\t}\n\t}\n\n\t// update and verify credential type if provided\n\tif credentialType, ok := d.GetOk(\"credential_type\"); ok {\n\t\trole.CredentialType = credentialType.(string)\n\t}\n\n\t// update and verify Application Object ID if provided\n\tif cognitoPoolDomain, ok := d.GetOk(\"cognito_pool_domain\"); ok {\n\t\trole.CognitoPoolDomain = cognitoPoolDomain.(string)\n\t}\n\n\tif appClientSecret, ok := d.GetOk(\"app_client_secret\"); ok {\n\t\trole.AppClientSecret = appClientSecret.(string)\n\t}\n\n\tif region, ok := d.GetOk(\"region\"); ok {\n\t\trole.Region = region.(string)\n\t}\n\n\tif appClientId, ok := d.GetOk(\"app_client_id\"); ok {\n\t\trole.AppClientId = appClientId.(string)\n\t}\n\n\tif userPoolId, ok := d.GetOk(\"user_pool_id\"); ok {\n\t\trole.UserPoolId = userPoolId.(string)\n\t}\n\n\tif group, ok := d.GetOk(\"group\"); ok {\n\t\trole.Group = group.(string)\n\t}\n\n\tif dummyEmailDomain, ok := d.GetOk(\"dummy_email_domain\"); ok {\n\t\trole.DummyEmailDomain = dummyEmailDomain.(string)\n\t}\n\n\t// load and validate TTLs\n\tif ttlRaw, ok := d.GetOk(\"ttl\"); ok {\n\t\trole.TTL = time.Duration(ttlRaw.(int)) * time.Second\n\t} else if req.Operation == logical.CreateOperation {\n\t\trole.TTL = time.Duration(d.Get(\"ttl\").(int)) * time.Second\n\t}\n\n\tif maxTTLRaw, ok := d.GetOk(\"max_ttl\"); ok {\n\t\trole.MaxTTL = time.Duration(maxTTLRaw.(int)) * time.Second\n\t} else if req.Operation == logical.CreateOperation {\n\t\trole.MaxTTL = time.Duration(d.Get(\"max_ttl\").(int)) * time.Second\n\t}\n\n\tif role.MaxTTL != 0 && role.TTL > role.MaxTTL {\n\t\treturn logical.ErrorResponse(\"ttl cannot be greater than max_ttl\"), nil\n\t}\n\n\t// save role\n\terr = saveRole(ctx, req.Storage, role, name)\n\tif err != nil {\n\t\treturn nil, errwrap.Wrapf(\"error storing role: {{err}}\", err)\n\t}\n\n\treturn resp, nil\n}", "title": "" }, { "docid": "1c6833c8d00054bfa344bf713c626356", "score": "0.51027095", "text": "func (c *clusterRoles) Update(role *authorizationapi.ClusterRole) (result *authorizationapi.ClusterRole, err error) {\n\tresult = &authorizationapi.ClusterRole{}\n\terr = c.r.Put().Resource(\"clusterRoles\").Name(role.Name).Body(role).Do().Into(result)\n\treturn\n}", "title": "" }, { "docid": "72cd1d6400ebd66e00d458a7f412adc9", "score": "0.50981444", "text": "func (s *AccessService) UpsertRole(role services.Role, ttl time.Duration) error {\n\tdata, err := services.GetRoleMarshaler().MarshalRole(role)\n\tif err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\n\t// TODO(klizhentas): Picking smaller of the two ttls\n\tbackendTTL := backend.TTL(s.Clock(), role.Expiry())\n\tif backendTTL < ttl {\n\t\tttl = backendTTL\n\t}\n\n\terr = s.UpsertVal([]string{\"roles\", role.GetName()}, \"params\", []byte(data), ttl)\n\tif err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4eb09e9aadd4eef28c20adcd13eb6bb6", "score": "0.50925076", "text": "func (o RoleBindingOutput) RoleRef() RoleRefOutput {\n\treturn o.ApplyT(func(v *RoleBinding) RoleRefOutput { return v.RoleRef }).(RoleRefOutput)\n}", "title": "" }, { "docid": "0f13874562f22f82106d1792a3fdb157", "score": "0.508806", "text": "func (r *ReconcileCoherenceRole) upgrade(role *coh.CoherenceRole, existingRole *unstructured.Unstructured, replicas int32, desiredRole *coh.CoherenceInternalSpec) error {\n\t// Rolling upgrade\n\treqLogger := log.WithValues(\"Namespace\", role.Namespace, \"Name\", role.Name)\n\treqLogger.Info(\"Rolling upgrade of existing Role\")\n\n\tspec, err := coh.CoherenceInternalSpecAsMapFromSpec(desiredRole)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// update the CoherenceInternal, this should trigger an update of the Helm install\n\tdesiredRole.Replicas = &replicas\n\texistingRole.Object[\"spec\"] = spec\n\n\tif err = r.client.Update(context.TODO(), existingRole); err != nil {\n\t\treturn err\n\t}\n\n\t// Update this CoherenceRole's status\n\trole.Status.Status = coh.RoleStatusRollingUpgrade\n\tif err = r.client.Update(context.TODO(), role); err != nil {\n\t\treqLogger.Error(err, \"failed to update Status\")\n\t}\n\n\t// send a successful update event\n\tmsg := fmt.Sprintf(updateMessage, role.Name, role.Name)\n\tr.events.Event(role, corev1.EventTypeNormal, eventReasonUpdated, msg)\n\n\treturn nil\n}", "title": "" }, { "docid": "52b65dba4b840d4081a60c1a469ccf6b", "score": "0.50824267", "text": "func (a *FabricApiService) GetFabricUplinkRoleListExecute(r ApiGetFabricUplinkRoleListRequest) (*FabricUplinkRoleResponse, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodGet\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *FabricUplinkRoleResponse\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"FabricApiService.GetFabricUplinkRoleList\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/fabric/UplinkRoles\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\tif r.filter != nil {\n\t\tlocalVarQueryParams.Add(\"$filter\", parameterToString(*r.filter, \"\"))\n\t}\n\tif r.orderby != nil {\n\t\tlocalVarQueryParams.Add(\"$orderby\", parameterToString(*r.orderby, \"\"))\n\t}\n\tif r.top != nil {\n\t\tlocalVarQueryParams.Add(\"$top\", parameterToString(*r.top, \"\"))\n\t}\n\tif r.skip != nil {\n\t\tlocalVarQueryParams.Add(\"$skip\", parameterToString(*r.skip, \"\"))\n\t}\n\tif r.select_ != nil {\n\t\tlocalVarQueryParams.Add(\"$select\", parameterToString(*r.select_, \"\"))\n\t}\n\tif r.expand != nil {\n\t\tlocalVarQueryParams.Add(\"$expand\", parameterToString(*r.expand, \"\"))\n\t}\n\tif r.apply != nil {\n\t\tlocalVarQueryParams.Add(\"$apply\", parameterToString(*r.apply, \"\"))\n\t}\n\tif r.count != nil {\n\t\tlocalVarQueryParams.Add(\"$count\", parameterToString(*r.count, \"\"))\n\t}\n\tif r.inlinecount != nil {\n\t\tlocalVarQueryParams.Add(\"$inlinecount\", parameterToString(*r.inlinecount, \"\"))\n\t}\n\tif r.at != nil {\n\t\tlocalVarQueryParams.Add(\"at\", parameterToString(*r.at, \"\"))\n\t}\n\tif r.tags != nil {\n\t\tlocalVarQueryParams.Add(\"tags\", parameterToString(*r.tags, \"\"))\n\t}\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\", \"text/csv\", \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "289cb56879d3fb34743ac77450e9a885", "score": "0.5081754", "text": "func (_Endpointcontract *EndpointcontractTransactor) RenounceRole(opts *bind.TransactOpts, role [32]byte, account common.Address) (*types.Transaction, error) {\n\treturn _Endpointcontract.contract.Transact(opts, \"renounceRole\", role, account)\n}", "title": "" }, { "docid": "79ecc5e09bcb58dfc0e5c279ff2b08c4", "score": "0.50814", "text": "func (a *Client) UpdateUserRole(params *UpdateUserRoleParams, authInfo runtime.ClientAuthInfoWriter) (*UpdateUserRoleOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewUpdateUserRoleParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"updateUserRole\",\n\t\tMethod: \"PUT\",\n\t\tPathPattern: \"/api/userroles/{id}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &UpdateUserRoleReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*UpdateUserRoleOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for updateUserRole: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "title": "" }, { "docid": "1566883b77a0a2a41fb5ced8111eb284", "score": "0.50802463", "text": "func UpdateRole(role *entity.Role) (*entity.Role, error){\n\touput,err:= json.MarshalIndent(role,\"\",\"\\t\\t\")\n\t\n\tclient := &http.Client{}\n\tURL := fmt.Sprintf(\"%s%s/%d\",baseRoleURL,\"update\",role.ID)\n\treq,_ := http.NewRequest(\"PUT\",URL,bytes.NewBuffer(ouput))\n\t//DO return an http response\n\tres,err := client.Do(req)\n\t\n\tif err != nil {\n\t\treturn nil,err\n\t}\n\trolee := &entity.Role{}\n\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = json.Unmarshal(body,rolee)\n\tif err != nil{\n\t\treturn nil,err\n\t}\n\treturn rolee,nil\n}", "title": "" }, { "docid": "d28789068567b1e9c30d18ecde8106a1", "score": "0.50739646", "text": "func (bc *BaseController) UpdateRole(role *dream.Role) (*dream.Role, error) {\n\n\tresult, err := dream.UpdateRole(bc.DB, role)\n\n\tif err != nil {\n\t\treturn nil, ServiceErr\n\t}\n\n\treturn result, nil\n}", "title": "" }, { "docid": "2d5c797feb1c584b9f1cfbb89ae3dee9", "score": "0.50678015", "text": "func (r *roleRepository) Update(rl *iam.Role) error {\n\ts := r.client.db.Copy()\n\tdefer s.Close()\n\tc := s.DB(r.client.database).C(roles)\n\tif err := c.Update(bson.M{\"tenantId\": rl.TenantID, \"name\": rl.Name}, bson.M{\"$set\": rl}); err != nil {\n\t\treturn errors.Wrapf(err, \"An error occurred while updating role %s\", rl)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "76816d71ae8c435a7532043e842e519e", "score": "0.5055797", "text": "func (_SchellingCoin *SchellingCoinTransactor) RenounceRole(opts *bind.TransactOpts, role [32]byte, account common.Address) (*types.Transaction, error) {\n\treturn _SchellingCoin.contract.Transact(opts, \"renounceRole\", role, account)\n}", "title": "" }, { "docid": "706cc58969f756eaf9a1a4def72c680a", "score": "0.5055217", "text": "func (a *FabricApiService) UpdateFabricFcoeUplinkRoleExecute(r ApiUpdateFabricFcoeUplinkRoleRequest) (*FabricFcoeUplinkRole, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *FabricFcoeUplinkRole\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"FabricApiService.UpdateFabricFcoeUplinkRole\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/fabric/FcoeUplinkRoles/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.fabricFcoeUplinkRole == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"fabricFcoeUplinkRole is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.fabricFcoeUplinkRole\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "dda1654be5c84f01843f74fee902e63b", "score": "0.505098", "text": "func (r *Role) Update() *RoleUpdateOne {\n\treturn (&RoleClient{config: r.config}).UpdateOne(r)\n}", "title": "" }, { "docid": "e9f059b8d0a7f5ae97f5d8d51995a042", "score": "0.50507003", "text": "func UnmarshalRoleRef(m map[string]json.RawMessage, result interface{}) (err error) {\n\tobj := new(RoleRef)\n\terr = core.UnmarshalPrimitive(m, \"crn\", &obj.Crn)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalPrimitive(m, \"name\", &obj.Name)\n\tif err != nil {\n\t\treturn\n\t}\n\treflect.ValueOf(result).Elem().Set(reflect.ValueOf(obj))\n\treturn\n}", "title": "" }, { "docid": "ff4ff5c8a0be22dc3490ea4a038d4a37", "score": "0.5049873", "text": "func (e EndpointsAuthV1Client) AutoUpdateRole(ctx context.Context, in *Role) (*Role, error) {\n\tresp, err := e.AutoUpdateRoleEndpoint(ctx, in)\n\tif err != nil {\n\t\treturn &Role{}, err\n\t}\n\treturn resp.(*Role), nil\n}", "title": "" }, { "docid": "10eeefcb45c5d95c99720a78f883a5d1", "score": "0.50432736", "text": "func (m *ServiceUserAgent) SetRole(value *ServiceRole)() {\n m.role = value\n}", "title": "" }, { "docid": "a0b5009c2e54e14483c49bca75cc6fd5", "score": "0.5039839", "text": "func (u *Roles) Update(w http.ResponseWriter, r *http.Request) error {\n\tparamID := chi.URLParam(r, \"id\")\n\n\tid, err := strconv.Atoi(paramID)\n\tif err != nil {\n\t\tu.Log.Printf(\"ERROR : %+v\", err)\n\t\treturn errors.Wrap(err, \"type casting paramID\")\n\t}\n\n\tvar role models.Role\n\trole.ID = uint32(id)\n\terr = role.Get(r.Context(), u.Db)\n\tif err != nil {\n\t\tu.Log.Printf(\"ERROR : %+v\", err)\n\t\treturn errors.Wrap(err, \"Get Role\")\n\t}\n\n\tvar roleRequest request.RoleRequest\n\terr = api.Decode(r, &roleRequest)\n\tif err != nil {\n\t\tu.Log.Printf(\"ERROR : %+v\", err)\n\t\treturn errors.Wrap(err, \"Decode Role\")\n\t}\n\n\tif roleRequest.ID <= 0 {\n\t\troleRequest.ID = role.ID\n\t}\n\troleUpdate := roleRequest.Transform(&role)\n\terr = roleUpdate.Update(r.Context(), u.Db)\n\tif err != nil {\n\t\tu.Log.Printf(\"ERROR : %+v\", err)\n\t\treturn errors.Wrap(err, \"Update role\")\n\t}\n\n\tvar response response.RoleResponse\n\tresponse.Transform(roleUpdate)\n\treturn api.ResponseOK(w, response, http.StatusOK)\n}", "title": "" }, { "docid": "85032e80ea06aad68179e52fe11d7b48", "score": "0.5037481", "text": "func (a *FabricApiService) PatchFabricFcUplinkRoleExecute(r ApiPatchFabricFcUplinkRoleRequest) (*FabricFcUplinkRole, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *FabricFcUplinkRole\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"FabricApiService.PatchFabricFcUplinkRole\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/fabric/FcUplinkRoles/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.fabricFcUplinkRole == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"fabricFcUplinkRole is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.fabricFcUplinkRole\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "068f2b5657c1ebcfebb722024dc83d89", "score": "0.50226665", "text": "func (rm *RoleManager) Update(id string, r *Role) (err error) {\n\treturn rm.m.patch(rm.m.uri(\"roles\", id), r)\n}", "title": "" }, { "docid": "ea00b83b55bcf468f1ffc4d9f9569d2d", "score": "0.5020506", "text": "func (r ApiGetFabricFcoeUplinkRoleListRequest) Count(count bool) ApiGetFabricFcoeUplinkRoleListRequest {\n\tr.count = &count\n\treturn r\n}", "title": "" }, { "docid": "35970085c4dcc68350a2e747f7481467", "score": "0.49998724", "text": "func (this *RoleApiController) Update() {\n\tthis.CheckPermission(permissions.RoleUpdate)\n\tvar model user.Role\n\tjson.Unmarshal(this.Ctx.Input.RequestBody, &model)\n\tmodel.Name = strings.Trim(model.Name, \" \")\n\tif model.Id <= 0 {\n\t\tthis.Ctx.Abort(500, \"invalid role id\")\n\t}\n\tthis.validate(&model)\n\terr := this.roleService.Update(&model, \"Name\", \"IsSystemRole\", \"Description\")\n\tif err != nil {\n\t\tthis.Ctx.Abort(500, err.Error())\n\t}\n\tthis.ServeJSON()\n}", "title": "" }, { "docid": "26172f8c44c04eb4ddb543f6db3832b9", "score": "0.4997429", "text": "func UpdateRole(clientset *kubernetes.Clientset, namespace string, role *rbacv1.Role) (*rbacv1.Role, error) {\n\n\tclient := clientset.RbacV1().Roles(getValidNameSpace(namespace))\n\treturn client.Update(role)\n}", "title": "" } ]
caf262e0e76b6bfcfcd000108ec5b370
GetEdges returns all the edges in the schema
[ { "docid": "d2c6ccd0882de5359fc9beda9bddf653", "score": "0.77918196", "text": "func (s *Schema) GetEdges() map[string]*ent.AssocEdgeData {\n\treturn s.edges\n}", "title": "" } ]
[ { "docid": "16849cec328fdc8d896d117e08e8b67a", "score": "0.7289338", "text": "func (b *ElasticSearchBackend) GetEdges(t Context, m ElementMatcher) []*Edge {\n\treturn b.getEdges(t, m, false)\n}", "title": "" }, { "docid": "33e00ebe45e0d14d841311a104b27971", "score": "0.7046928", "text": "func (Schema) Edges() []ent.Edge { return nil }", "title": "" }, { "docid": "d0810a0e28b52bf3e86be7b4a5a67896", "score": "0.69837046", "text": "func (g Graph) GetEdges() EMAP {\n\treturn g.Edges\n}", "title": "" }, { "docid": "25a06d50ee21b40036457e2679fa22d2", "score": "0.6809826", "text": "func (c *CachedBackend) GetEdges(t Context, m ElementMatcher) []*Edge {\n\tmode := c.cacheMode.Load()\n\n\tif t.TimeSlice == nil || mode == CacheOnlyMode || c.persistent == nil {\n\t\treturn c.memory.GetEdges(t, m)\n\t}\n\treturn c.persistent.GetEdges(t, m)\n}", "title": "" }, { "docid": "281ea11e8664ba3ac8011862c5e2f6b8", "score": "0.6591799", "text": "func (m *Model) GetEdges() (edgeMap map[string]string) {\n\tm.lock.RLock()\n\tdefer m.lock.RUnlock()\n\n\tedgeMap = make(map[string]string)\n\tfor k, node := range m.nodeMap.nameMap {\n\t\tp := reflect.ValueOf(node.parent)\n\t\tpName := reflect.Indirect(p).FieldByName(\"Name\")\n\t\tif pName.IsValid() {\n\t\t\tedgeMap[k] = pName.String()\n\t\t\t// fmt.Printf(\"%s (%T) \\t\\t %s(%T)\\n\", k, node.object, pName, node.parent)\n\t\t}\n\t}\n\treturn edgeMap\n}", "title": "" }, { "docid": "4a13c7cf6b394b0d3f2b2d5b4c504dd9", "score": "0.6582606", "text": "func (b *ElasticSearchBackend) getEdges(t Context, m ElementMatcher, scrollAPI bool) []*Edge {\n\tvar filter *filters.Filter\n\tif m != nil {\n\t\tf, err := m.Filter()\n\t\tif err != nil {\n\t\t\treturn []*Edge{}\n\t\t}\n\t\tfilter = f\n\t}\n\n\tvar searchQuery filters.SearchQuery\n\tif !t.TimePoint {\n\t\tsearchQuery = filters.SearchQuery{Sort: true, SortBy: \"UpdatedAt\"}\n\t}\n\n\tedges := b.searchEdges(&TimedSearchQuery{\n\t\tSearchQuery: searchQuery,\n\t\tTimeFilter: getTimeFilter(t.TimeSlice),\n\t\tElementFilter: filter,\n\t}, scrollAPI)\n\n\tif t.TimePoint {\n\t\tedges = dedupEdges(edges)\n\t}\n\n\treturn edges\n}", "title": "" }, { "docid": "2cf1900a8d97aec9f9e92f15b57b35c1", "score": "0.6412326", "text": "func (tri *Triangulator) GetEdges() geom.MultiLineString {\n\treturn tri.builder.GetEdges()\n}", "title": "" }, { "docid": "33efaa47c195f984f74181e2d446de31", "score": "0.6381372", "text": "func (config *FolderConfig) GetEdges() ent.EdgeMap {\n\treturn ent.EdgeMap{\n\t\t// inverse edge from a folder to the todos in that folder\n\t\t\"Todos\": ent.AssociationEdge{\n\t\t\t// intentionally written in this order to test depgraph until we write tests for that\n\t\t\tInverseEdge: &ent.InverseAssocEdge{\n\t\t\t\tEdgeName: \"Folders\",\n\t\t\t},\n\t\t\tEntConfig: TodoConfig{},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "e44c463bd52df3b721a9cf1bb909de5a", "score": "0.6146453", "text": "func (s *Schema) GetEdgesToUpdate() []*ent.AssocEdgeData {\n\treturn s.edgesToUpdate\n}", "title": "" }, { "docid": "74194acc95e7ca559b1a2d2e2d772aa5", "score": "0.6083026", "text": "func (g *Gremlin) ReadEdges(node Node, predicates ...string) ([]*Edge, error) {\n\tnstr := g.NodeToID(node)\n\tif nstr == \"\" {\n\t\treturn nil, fmt.Errorf(\"%s: ReadEdges: Invalid node reference argument\", g.String())\n\t}\n\n\tgEdges, err := g.vertexEdges(node, BOTH, predicates...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"%s: ReadEdges: %v\", g.String(), err)\n\t}\n\n\tedges := g.convertEdges(gEdges)\n\tif len(edges) == 0 {\n\t\treturn nil, fmt.Errorf(\"%s: ReadEdges: Failed to discover edges for node %s\", g.String(), nstr)\n\t}\n\n\treturn edges, nil\n}", "title": "" }, { "docid": "81a38c8928a0e7153f5687ca32fb711a", "score": "0.6063105", "text": "func (g *WeightedGraph) Edges() []Edge {\n\treturn g.edges\n}", "title": "" }, { "docid": "c919e96589eb77949e72807a2714bf5f", "score": "0.60609835", "text": "func (g *graph) EdgeDefinitions() []EdgeDefinition {\n\treturn g.input.EdgeDefinitions\n}", "title": "" }, { "docid": "64a764a3edfbb23a2f9a488d6e02c3ac", "score": "0.60518616", "text": "func (kgdb *KVInterfaceGDB) GetEdgeList(ctx context.Context, loadProp bool) <-chan *gdbi.Edge {\n\to := make(chan *gdbi.Edge, 100)\n\tgo func() {\n\t\tdefer close(o)\n\t\tkgdb.kvg.kv.View(func(it kvi.KVIterator) error {\n\t\t\tePrefix := EdgeListPrefix(kgdb.graph)\n\t\t\tfor it.Seek(ePrefix); it.Valid() && bytes.HasPrefix(it.Key(), ePrefix); it.Next() {\n\t\t\t\tselect {\n\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\treturn nil\n\t\t\t\tdefault:\n\t\t\t\t}\n\t\t\t\tkeyValue := it.Key()\n\t\t\t\t_, eid, sid, did, label, etype := EdgeKeyParse(keyValue)\n\t\t\t\tif etype == edgeSingle {\n\t\t\t\t\tif loadProp {\n\t\t\t\t\t\tedgeData, _ := it.Value()\n\t\t\t\t\t\tge := &gripql.Edge{}\n\t\t\t\t\t\tproto.Unmarshal(edgeData, ge)\n\t\t\t\t\t\te := &gdbi.Edge{ID: ge.Gid, Label: ge.Label, From: sid, To: did, Data: ge.Data.AsMap(), Loaded: true}\n\t\t\t\t\t\to <- e\n\t\t\t\t\t} else {\n\t\t\t\t\t\te := &gdbi.Edge{ID: string(eid), Label: label, From: sid, To: did, Loaded: false}\n\t\t\t\t\t\to <- e\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\t}()\n\treturn o\n}", "title": "" }, { "docid": "216318dd10ead0d7d8ccb34235b70448", "score": "0.6021946", "text": "func (obj *GraphMetadata) GetEdgeTypes() ([]tgdb.TGEdgeType, tgdb.TGError) {\n\tif obj.edgeTypes == nil || len(obj.edgeTypes) == 0 {\n\t\tlogger.Warning(fmt.Sprint(\"WARNING: Returning GraphMetadata:GetEdgeTypes as there are NO edges`\"))\n\t\treturn nil, nil\n\t}\n\tedgeTypes := make([]tgdb.TGEdgeType, 0)\n\tfor _, edgeType := range obj.edgeTypes {\n\t\tedgeTypes = append(edgeTypes, edgeType)\n\t}\n\treturn edgeTypes, nil\n}", "title": "" }, { "docid": "5351042daf845aed306e86f0c8e39529", "score": "0.5876834", "text": "func (g Graph) GetEdgesSize() int {\n\treturn len(g.GetEdges())\n}", "title": "" }, { "docid": "386551bcc1c1897260ab08708c7396f4", "score": "0.5857203", "text": "func (b *ElasticSearchBackend) GetEdge(i Identifier, t Context) []*Edge {\n\tedges := b.searchEdges(&TimedSearchQuery{\n\t\tSearchQuery: filters.SearchQuery{\n\t\t\tFilter: filters.NewTermStringFilter(\"ID\", string(i)),\n\t\t\tSort: true,\n\t\t\tSortBy: \"Revision\",\n\t\t},\n\t\tTimeFilter: getTimeFilter(t.TimeSlice),\n\t}, false)\n\n\tif len(edges) > 1 && t.TimePoint {\n\t\treturn []*Edge{edges[len(edges)-1]}\n\t}\n\n\treturn edges\n}", "title": "" }, { "docid": "55e0a77b7f739dad1dedecea91fd749d", "score": "0.5816942", "text": "func (r GenericResource) BuildEdges(ns NodeStore) []Edge {\n\treturn []Edge{}\n}", "title": "" }, { "docid": "ea075307db1da7d2cec2c89284185359", "score": "0.5814614", "text": "func GetOutEdges(db *leveldb.DB, subject string) map[string]float64 {\n\trs := make(map[string]float64)\n\titer := db.NewIterator(util.BytesPrefix([]byte(subject+\"----->\")), nil)\n\tfor iter.Next() {\n\t\t// Remember that the contents of the returned slice should not be modified, and\n\t\t// only valid until the next call to Next.\n\t\tkey := string(iter.Key())\n\t\tvalue := StringToFloat64(string(iter.Value()))\n\t\trs[key] = value\n\t}\n\titer.Release()\n\terr := iter.Error()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn rs\n}", "title": "" }, { "docid": "43144f3e57f9329818e0293ec4ad544a", "score": "0.5779447", "text": "func TestGetVerticesEdges(t *testing.T) {\n\n\tadjMatrix := getVerticesEdgesMatrix()\n\t//DFS\n\tprintln(\"=====================\")\n\tprintGraphDFS(adjMatrix, 0)\n\n\t//BFS\n\n\tprintln(\"=====================\")\n\tprintGraphBFS(adjMatrix)\n\n}", "title": "" }, { "docid": "e1bd882d5b25e5790f0777bdf5a6b61e", "score": "0.5746613", "text": "func (g *WG) Edges(ctx context.Context) ([]graph.Edge, error) {\n\tg.mu.RLock()\n\twedges := g.WeightedGraphBuilder.WeightedEdges()\n\tgraphEdges := gonum.WeightedEdgesOf(wedges)\n\tg.mu.RUnlock()\n\n\tedges := make([]graph.Edge, len(graphEdges))\n\n\tfor i, e := range graphEdges {\n\t\tedges[i] = e.(*Edge)\n\t}\n\n\treturn edges, nil\n}", "title": "" }, { "docid": "de5711429368daf65e40501df4c7b387", "score": "0.5731506", "text": "func (c *Connection) SelectEdges(target string, limit int, queryParams string) ([](*Edge), error) {\n\tcomText := \"SELECT\"\n\tcomText += fmt.Sprintf(\" FROM %s %s\", target, queryParams)\n\tif limit > 1 {\n\t\tcomText += fmt.Sprintf(\" LIMIT %v\", limit)\n\t}\n\tres, err := (*c).Command(comText)\n\tvar ret [](*Edge)\n\tfor ind := range res {\n\t\te := newEdge()\n\t\terr = unpackProps(&e.Entry, res[ind]) // TODO: break on err?\n\t\te.vertex[Out], err = e.PropStr(\"out\")\n\t\tif err == nil {\n\t\t\te.vertex[In], err = e.PropStr(\"in\")\n\t\t}\n\t\tif err != nil { // serious business\n\t\t\treturn nil, errors.New(fmt.Sprintf(\"SelectEdges: edge cannot be read properly, error: %v\", err))\n\t\t}\n\t\tdelete(e.Entry.propsContainer, \"out\")\n\t\tdelete(e.Entry.propsContainer, \"in\")\n\t\tc.edges[e.Entry.Rid] = &e // add to the index\n\t\tret = append(ret, &e)\n\t}\n\treturn ret, err\n}", "title": "" }, { "docid": "6d7c23ee920b3f4294722a3f834f156d", "score": "0.57058764", "text": "func (g *Gremlin) ReadInEdges(node Node, predicates ...string) ([]*Edge, error) {\n\tnstr := g.NodeToID(node)\n\tif nstr == \"\" {\n\t\treturn nil, fmt.Errorf(\"%s: ReadInEdges: Invalid node reference argument\", g.String())\n\t}\n\n\tgEdges, err := g.vertexEdges(node, IN, predicates...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"%s: ReadInEdges: %v\", g.String(), err)\n\t}\n\n\tedges := g.convertEdges(gEdges)\n\tif len(edges) == 0 {\n\t\treturn nil, fmt.Errorf(\"%s: ReadInEdges: Failed to discover edges coming into node %s\", g.String(), nstr)\n\t}\n\n\treturn edges, nil\n}", "title": "" }, { "docid": "5057eba096cc33d2efca783d71b27e97", "score": "0.5704175", "text": "func (b *ElasticSearchBackend) GetNodesEdges(nodeList []*Node, t Context, m ElementMatcher) (edges []*Edge) {\n\tif len(nodeList) == 0 {\n\t\treturn []*Edge{}\n\t}\n\n\t// See comment at GetNodesFromIDs\n\t// As we are adding two operations per item, make small batches\n\tnodesBatch := batchNodes(nodeList, maxClauseCount/2)\n\n\tfor _, nList := range nodesBatch {\n\t\tvar filter *filters.Filter\n\t\tif m != nil {\n\t\t\tf, err := m.Filter()\n\t\t\tif err != nil {\n\t\t\t\treturn []*Edge{}\n\t\t\t}\n\t\t\tfilter = f\n\t\t}\n\n\t\tvar searchQuery filters.SearchQuery\n\t\tif !t.TimePoint {\n\t\t\tsearchQuery = filters.SearchQuery{Sort: true, SortBy: \"UpdatedAt\"}\n\t\t}\n\n\t\tnodesFilter := []*filters.Filter{}\n\t\tfor _, n := range nList {\n\t\t\tnodesFilter = append(nodesFilter, filters.NewTermStringFilter(\"Parent\", string(n.ID)))\n\t\t\tnodesFilter = append(nodesFilter, filters.NewTermStringFilter(\"Child\", string(n.ID)))\n\t\t}\n\t\tsearchQuery.Filter = filters.NewOrFilter(nodesFilter...)\n\n\t\tedges = append(edges, b.searchEdges(&TimedSearchQuery{\n\t\t\tSearchQuery: searchQuery,\n\t\t\tTimeFilter: getTimeFilter(t.TimeSlice),\n\t\t\tElementFilter: filter,\n\t\t}, false)...)\n\n\t}\n\n\tif len(edges) > 1 && t.TimePoint {\n\t\tedges = dedupEdges(edges)\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "3ff69bf0af5dc9046ba8656c6e513b98", "score": "0.568643", "text": "func (s *Schema) GetNewEdges() []*ent.AssocEdgeData {\n\treturn s.newEdges\n}", "title": "" }, { "docid": "ca873c1b05e9c0175ca5b2a038dd63c5", "score": "0.5608823", "text": "func (Registerstore) Edges() []ent.Edge {\n\treturn []ent.Edge{\n \t\t\n\t\t\tedge.From(\"stores\", Store.Type).\n Ref(\"registerstores\").\n\t\t\tUnique(),\n\t\t\t\n\t\t\tedge.From(\"users\", User.Type).\n Ref(\"registerstores\").\n\t\t\tUnique(),\n\t\t\t\n\t\t\tedge.From(\"drugs\", Drug.Type).\n Ref(\"registerstores\").\n\t\t\tUnique(),\n\t\t\n\t\t\t}\n}", "title": "" }, { "docid": "00779ef229e81b761409e6f0aae9061e", "score": "0.5593895", "text": "func GetInEdges(db *leveldb.DB, object string) map[string]float64 {\n\trs := make(map[string]float64)\n\titer := db.NewIterator(nil, nil)\n\tfor iter.Next() {\n\t\tkey := iter.Key()\n\t\tif bytes.HasSuffix(key, []byte(\"----->\"+object)) {\n\t\t\tvalue := StringToFloat64(string(iter.Value()))\n\t\t\trs[string(key)] = value\n\t\t}\n\t}\n\titer.Release()\n\terr := iter.Error()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn rs\n}", "title": "" }, { "docid": "67ecc8b71a81a0c0bc0ef42298c1992c", "score": "0.55629027", "text": "func (c *CachedBackend) GetEdge(i Identifier, t Context) []*Edge {\n\tmode := c.cacheMode.Load()\n\n\tif t.TimeSlice == nil || mode == CacheOnlyMode || c.persistent == nil {\n\t\treturn c.memory.GetEdge(i, t)\n\t}\n\n\treturn c.persistent.GetEdge(i, t)\n}", "title": "" }, { "docid": "515d8a48cb97e38970eee3ec53b03994", "score": "0.55605614", "text": "func (b *ElasticSearchBackend) searchEdges(tsq *TimedSearchQuery, scrollAPI bool) (edges []*Edge) {\n\t// Channel to get results from the query\n\thits := make(chan *elastic.SearchHit, 100)\n\n\t// New goroutine to execute the get the data from ElasticSearch\n\tgo b.Query(edgeType, tsq, scrollAPI, hits)\n\n\tfor d := range hits {\n\t\tvar edge Edge\n\t\tif err := json.Unmarshal(d.Source, &edge); err != nil {\n\t\t\tb.logger.Errorf(\"Failed to unmarshal edge %s: %s\", err, string(d.Source))\n\t\t\tcontinue\n\t\t}\n\t\tedges = append(edges, &edge)\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "c5f2b8ec0d9318f3fd683a798dcdaf20", "score": "0.55338985", "text": "func (g *fbGraph) Edges(includePruned bool) []wEdge {\n\t//edgesTo: map[int]map[int]map[int]wEdge\n\tvar edges []wEdge\n\tfor _, from := range g.edgesTo {\n\t\tfor _, edgesDict := range from {\n\t\t\tfor _, e := range edgesDict {\n\t\t\t\t// edge for line l\n\t\t\t\tedges = append(edges, e)\n\t\t\t}\n\t\t}\n\t}\n\tif includePruned {\n\t\tfor _, from := range g.prunedEdges {\n\t\t\tfor _, edgesDict := range from {\n\t\t\t\tfor _, e := range edgesDict {\n\t\t\t\t\t// edge for line l\n\t\t\t\t\tedges = append(edges, e)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "6dd89d8781fc55dce2aec0aa6d177bb5", "score": "0.55221343", "text": "func (p PolicyResource) BuildEdges(ns NodeStore) []Edge {\n\t//no op for now to implement interface\n\treturn []Edge{}\n}", "title": "" }, { "docid": "6d06d1f35bcc545616b4011bd27ceb82", "score": "0.5495437", "text": "func (g *Graph) Edges() []graph.Edge {\n\tvar edges []graph.Edge\n\tfor _, e := range g.edges {\n\t\tedges = append(edges, e)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "89d5035490f9181556e73450fcc4d4a7", "score": "0.5468177", "text": "func (Office) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\n\t\tedge.From(\"doctor\", Doctor.Type).\n\t\t\tRef(\"offices\").Unique(),\n\n\t\t// edge.From(\"workingtime\", Workingtime.Type).\n\t\t// \tRef(\"offices\").Unique(),\n\n\t\tedge.From(\"department\", Department.Type).\n\t\t\tRef(\"offices\").Unique(),\n\n\t\tedge.From(\"extradoctor\", Extradoctor.Type).\n\t\t\tRef(\"offices\").Unique(),\n\n\t\tedge.To(\"schedules\", Schedule.Type).\n\t\t\tStorageKey(edge.Column(\"office_id\")),\n\t}\n}", "title": "" }, { "docid": "9c52e42c79443ceb25e9198ad293e9b6", "score": "0.546765", "text": "func (Specializeddiag) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"appointment\", Specializedappoint.Type).StorageKey(edge.Column(\"diacnosticID\")), \n\t\tedge.From(\"user\", User.Type).Ref(\"specializeddiag\").Unique(),\n\t}\n}", "title": "" }, { "docid": "c7d1c73d2910e072479111d93e7bc52a", "score": "0.54343766", "text": "func (Restaurant) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"address\", Address.Type).\n\t\t\tUnique(),\n\t\tedge.To(\"tags\", Tag.Type),\n\t\tedge.From(\"owner\", RestaurantOwner.Type).Ref(\"restaurant\"),\n\t\tedge.To(\"products\", Product.Type),\n\t\tedge.To(\"images\", ImagePath.Type),\n\t}\n}", "title": "" }, { "docid": "b44c700df524ebd991d56e5cad478224", "score": "0.5433964", "text": "func (tm TrafficMap) Edges() []*Edge {\n\tvar edges []*Edge\n\tfor _, n := range tm {\n\t\tedges = append(edges, n.Edges...)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "19453373d8313741462de95b73629fe5", "score": "0.5428033", "text": "func (r *WorkflowConnectionResolver) Edges() *[]*WorkflowEdgeResolver {\n\twer := []*WorkflowEdgeResolver{}\n\tfor _, w := range r.wp.Workflows {\n\t\twer = append(wer, &WorkflowEdgeResolver{w})\n\t}\n\treturn &wer\n}", "title": "" }, { "docid": "ac207a08be12a0bda1f74fdf33289905", "score": "0.5417806", "text": "func (g *Gremlin) ReadOutEdges(node Node, predicates ...string) ([]*Edge, error) {\n\tnstr := g.NodeToID(node)\n\tif nstr == \"\" {\n\t\treturn nil, fmt.Errorf(\"%s: ReadOutEdges: Invalid node reference argument\", g.String())\n\t}\n\n\tgEdges, err := g.vertexEdges(node, OUT, predicates...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"%s: ReadOutEdges: %v\", g.String(), err)\n\t}\n\n\tedges := g.convertEdges(gEdges)\n\tif len(edges) == 0 {\n\t\treturn nil, fmt.Errorf(\"%s: ReadOutEdges: Failed to discover out-edges from node %s\", g.String(), nstr)\n\t}\n\n\treturn edges, nil\n}", "title": "" }, { "docid": "c738a9e1760d332311e4cbfed6a44a21", "score": "0.5417498", "text": "func (c *CachedBackend) GetNodeEdges(n *Node, t Context, m ElementMatcher) (edges []*Edge) {\n\tmode := c.cacheMode.Load()\n\n\tif t.TimeSlice == nil || mode == CacheOnlyMode || c.persistent == nil {\n\t\treturn c.memory.GetNodeEdges(n, t, m)\n\t}\n\n\treturn c.persistent.GetNodeEdges(n, t, m)\n}", "title": "" }, { "docid": "fd9587dc3706e5f36dc43a2cad556b8b", "score": "0.5413908", "text": "func (b *ElasticSearchBackend) GetNodeEdges(n *Node, t Context, m ElementMatcher) (edges []*Edge) {\n\tvar filter *filters.Filter\n\tif m != nil {\n\t\tf, err := m.Filter()\n\t\tif err != nil {\n\t\t\treturn []*Edge{}\n\t\t}\n\t\tfilter = f\n\t}\n\n\tvar searchQuery filters.SearchQuery\n\tif !t.TimePoint {\n\t\tsearchQuery = filters.SearchQuery{Sort: true, SortBy: \"UpdatedAt\"}\n\t}\n\tsearchQuery.Filter = NewFilterForEdge(n.ID, n.ID)\n\n\tedges = b.searchEdges(&TimedSearchQuery{\n\t\tSearchQuery: searchQuery,\n\t\tTimeFilter: getTimeFilter(t.TimeSlice),\n\t\tElementFilter: filter,\n\t}, false)\n\n\tif len(edges) > 1 && t.TimePoint {\n\t\tedges = dedupEdges(edges)\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "54f7b1270343f077144aa4d877ff1ef3", "score": "0.5393514", "text": "func (c *CachedBackend) GetNodesEdges(n []*Node, t Context, m ElementMatcher) (edges []*Edge) {\n\tmode := c.cacheMode.Load()\n\n\tif t.TimeSlice == nil || mode == CacheOnlyMode || c.persistent == nil {\n\t\treturn c.memory.GetNodesEdges(n, t, m)\n\t}\n\n\treturn c.persistent.GetNodesEdges(n, t, m)\n}", "title": "" }, { "docid": "5dbef8c99789816169c83c9356ebe4ff", "score": "0.5368301", "text": "func (p PlacementRuleResource) BuildEdges(ns NodeStore) []Edge {\n\t//no op for now to implement interface\n\treturn []Edge{}\n}", "title": "" }, { "docid": "c69d084a7250f6a4b305dd1b9fc6cd3d", "score": "0.53659534", "text": "func (m *QueryRoutesRequest) GetIgnoredEdges() []*EdgeLocator {\n\tif m != nil {\n\t\treturn m.IgnoredEdges\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c69d084a7250f6a4b305dd1b9fc6cd3d", "score": "0.53659534", "text": "func (m *QueryRoutesRequest) GetIgnoredEdges() []*EdgeLocator {\n\tif m != nil {\n\t\treturn m.IgnoredEdges\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c69d084a7250f6a4b305dd1b9fc6cd3d", "score": "0.53659534", "text": "func (m *QueryRoutesRequest) GetIgnoredEdges() []*EdgeLocator {\n\tif m != nil {\n\t\treturn m.IgnoredEdges\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "cb3b1e329c3f9f0f33f58546f3308ce0", "score": "0.5364673", "text": "func (pr PolicyReportResource) BuildEdges(ns NodeStore) []Edge {\n\t// TODO What edges does PolicyReport need\n\tret := []Edge{}\n\treturn ret\n}", "title": "" }, { "docid": "97b4d8cdcadb7829b7f63f9f8c9873d3", "score": "0.53490835", "text": "func (as *List) Edges() int {\n\treturn as.edges\n}", "title": "" }, { "docid": "489c66e28a118670777adfdc0d81e78d", "score": "0.53335685", "text": "func (instance *TodoConnectionExec) Edges() *TodoEdgeExecArray {\n\tvar args []GraphQLArg\n\n\tinstance.stack = append(instance.stack, Instruction{\n\t\tName: \"edges\",\n\t\tField: GraphQLField{\n\t\t\tName: \"edges\",\n\t\t\tTypeName: \"TodoEdge\",\n\t\t\tTypeFields: []string{\"cursor\"},\n\t\t},\n\t\tOperation: \"\",\n\t\tArgs: args,\n\t})\n\treturn &TodoEdgeExecArray{\n\t\tclient: instance.client,\n\t\tstack: instance.stack,\n\t}\n}", "title": "" }, { "docid": "3986d389744c2031abb49c141bb165f4", "score": "0.5319476", "text": "func (SubjectsOffered) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"Subject\", Subject.Type).\n\t\t\tRef(\"SubjectsOffered\").Unique(),\n\t\tedge.From(\"Degree\", Degree.Type).\n\t\t\tRef(\"SubjectsOffered\").Unique(),\n\t\tedge.From(\"Year\", Year.Type).\n\t\t\tRef(\"SubjectsOffered\").Unique(),\n\t\tedge.From(\"Term\", Term.Type).\n\t\t\tRef(\"SubjectsOffered\").Unique(),\n\t}\n}", "title": "" }, { "docid": "10fbaab8a7bd01c5ac07fe1f344fcaf5", "score": "0.53131914", "text": "func (service ServiceTx) EdgeGroups() ([]portainer.EdgeGroup, error) {\n\tvar groups = make([]portainer.EdgeGroup, 0)\n\n\terr := service.tx.GetAllWithJsoniter(\n\t\tBucketName,\n\t\t&portainer.EdgeGroup{},\n\t\tfunc(obj interface{}) (interface{}, error) {\n\t\t\tgroup, ok := obj.(*portainer.EdgeGroup)\n\t\t\tif !ok {\n\t\t\t\tlog.Debug().Str(\"obj\", fmt.Sprintf(\"%#v\", obj)).Msg(\"failed to convert to EdgeGroup object\")\n\t\t\t\treturn nil, fmt.Errorf(\"Failed to convert to EdgeGroup object: %s\", obj)\n\t\t\t}\n\t\t\tgroups = append(groups, *group)\n\n\t\t\treturn &portainer.EdgeGroup{}, nil\n\t\t})\n\n\treturn groups, err\n}", "title": "" }, { "docid": "7f871f3981135b5ee2a0d1e91b230f99", "score": "0.53049976", "text": "func (Equipmentrental) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"equipment\", Equipment.Type).\n\t\t\tRef(\"equipmentrental\").\n\t\t\tUnique(),\n\t\tedge.From(\"employee\", Employee.Type).\n\t\t\tRef(\"equipmentrental\").\n\t\t\tUnique(),\n\t\tedge.From(\"member\", Member.Type).\n\t\t\tRef(\"equipmentrental\").\n\t\t\tUnique(),\n\t\tedge.From(\"equipmenttype\", Equipmenttype.Type).\n\t\t\tRef(\"equipmentrental\").\n\t\t\tUnique(),\n\t\t\n\t}\n}", "title": "" }, { "docid": "9eed3e8c71833952079762cabbb863a8", "score": "0.5290175", "text": "func (Nurse) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"queue\", Queue.Type).StorageKey(edge.Column(\"nurse_id\")),\n\t\tedge.To(\"medicalfiles\", Medicalfile.Type).StorageKey(edge.Column(\"nurse_id\")),\n\t\tedge.To(\"dentalexpenses\", Dentalexpense.Type).StorageKey(edge.Column(\"nurse_id\")),\n\t\tedge.To(\"patients\", Patient.Type).StorageKey(edge.Column(\"nurse_id\")),\n\t\tedge.To(\"dentists\", Dentist.Type).StorageKey(edge.Column(\"nurse_id\")),\n\t\tedge.To(\"appointment\", Appointment.Type).StorageKey(edge.Column(\"nurse_id\")),\n\t\t\n\t}\n}", "title": "" }, { "docid": "37f7e1add6da39521aed8c0cb32ae61d", "score": "0.5256872", "text": "func (c *CachedBackend) GetEdgeNodes(e *Edge, t Context, parentMetadata, childMetadata ElementMatcher) ([]*Node, []*Node) {\n\tmode := c.cacheMode.Load()\n\n\tif t.TimeSlice == nil || mode == CacheOnlyMode || c.persistent == nil {\n\t\treturn c.memory.GetEdgeNodes(e, t, parentMetadata, childMetadata)\n\t}\n\n\treturn c.persistent.GetEdgeNodes(e, t, parentMetadata, childMetadata)\n}", "title": "" }, { "docid": "5442634bb171c9a5580dccb2a8a97686", "score": "0.5253291", "text": "func (Namespace) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"workflows\", Workflow.Type),\n\t}\n}", "title": "" }, { "docid": "8a4e8a9fd3a8bed79bd4a49c5ad72fc5", "score": "0.5243627", "text": "func (Exchange) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"stocks\", Entity.Type).Ref(\"exchanges\"),\n\t}\n}", "title": "" }, { "docid": "44bd5d01e68a5e665e252974283ed921", "score": "0.52357846", "text": "func (Entity) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"exchanges\", Exchange.Type), // stocks can be listed on multiple exchanges\n\t\tedge.To(\"intervals\", Interval.Type), // stocks will have multiple intervals (trades, 1min, 1day, etc)\n\t\tedge.To(\"dividends\", Dividend.Type),\n\t\tedge.To(\"splits\", Split.Type),\n\t\tedge.To(\"financials\", Financial.Type),\n\t}\n}", "title": "" }, { "docid": "487cde5943cca23dcc76952a8819567b", "score": "0.5227827", "text": "func (Survey) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"flow\", Flow.Type).Unique().Required(),\n\t\tedge.From(\"for\", Person.Type).Ref(\"surveys\").Unique().Required(),\n\t\tedge.From(\"owner\", Domain.Type).Ref(\"surveys\").Unique(),\n\t}\n}", "title": "" }, { "docid": "f0b40fbd436df362e198e99855e93fda", "score": "0.5227231", "text": "func (Food) Edges() []ent.Edge {\n\treturn nil\n}", "title": "" }, { "docid": "62427c09dbf8297e7bf012aaee224a45", "score": "0.52167803", "text": "func (Post) Edges() []ent.Edge {\n\treturn nil\n}", "title": "" }, { "docid": "beb4e62af500d6d3e664e6c42976712e", "score": "0.5207279", "text": "func (g *Graph) Edges() [][]int {\n\tvar edges [][]int\n\n\tfor _, v := range g.Vertices {\n\t\tfor _, n := range v.Neighbors {\n\t\t\tedges = append(edges, []int{v.Value, n.Value})\n\t\t}\n\t}\n\n\treturn edges\n}", "title": "" }, { "docid": "7f8574d66ee872b354e3b50c25475cbc", "score": "0.5206657", "text": "func (Article) Edges() []ent.Edge {\n\treturn nil\n}", "title": "" }, { "docid": "7f8574d66ee872b354e3b50c25475cbc", "score": "0.5206657", "text": "func (Article) Edges() []ent.Edge {\n\treturn nil\n}", "title": "" }, { "docid": "6d3d6900d1de3240ad33fbf933a054f1", "score": "0.5204125", "text": "func (Appointment) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"patient\", Patient.Type).Ref(\"appointment\").Unique(),\n\t\tedge.From(\"room\", Room.Type).Ref(\"appointment\").Unique(),\n\t\tedge.From(\"dentist\", Dentist.Type).Ref(\"appointment\").Unique(),\n\t}\n}", "title": "" }, { "docid": "571e084d5bc6a359b58c87d72004e43c", "score": "0.52006817", "text": "func (Lease) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"Wifi\", Wifi.Type).\n\t\t\tRef(\"wifis\").\n\t\t\tUnique(),\n\t\tedge.From(\"Roomdetail\", Roomdetail.Type).\n\t\t\tRef(\"roomdetails\").\n\t\t\tUnique().\n\t\t\tRequired(),\n\t\tedge.From(\"employee\", Employee.Type).Ref(\"leasess\").Unique(),\n\t\tedge.To(\"leases\", Deposit.Type).StorageKey(edge.Column(\"lease_id\")),\n\t\tedge.To(\"bill\", Bill.Type).StorageKey(edge.Column(\"lease_id\")),\n\t\tedge.To(\"repairinvoices\", Repairinvoice.Type).StorageKey(edge.Column(\"lease_id\")),\n\t}\n}", "title": "" }, { "docid": "1cb29b554032186126f04987f8f40f90", "score": "0.5193474", "text": "func (Appointment) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"patient\", Patient.Type).Ref(\"appointment\").Unique(),\n\t\tedge.From(\"room\", Room.Type).Ref(\"appointment\").Unique(),\n\t\tedge.From(\"dentist\", Dentist.Type).Ref(\"appointment\").Unique(),\n\t\tedge.From(\"nurse\", Nurse.Type).Ref(\"appointment\").Unique(),\n\t}\n}", "title": "" }, { "docid": "cd6b65ff54cb1ee17fa50da8e57d8e67", "score": "0.5193458", "text": "func (g *UndirectedGraph) Edges() []graph.Edge {\n\tvar edges []graph.Edge\n\n\tseen := make(map[[2]int]struct{})\n\tfor _, u := range g.edges {\n\t\tu.Visit(func(neighbor int, e graph.Edge) {\n\t\t\tuid := e.From().ID()\n\t\t\tvid := e.To().ID()\n\t\t\tif _, ok := seen[[2]int{uid, vid}]; ok {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tseen[[2]int{uid, vid}] = struct{}{}\n\t\t\tseen[[2]int{vid, uid}] = struct{}{}\n\t\t\tedges = append(edges, e)\n\t\t})\n\t}\n\n\treturn edges\n}", "title": "" }, { "docid": "901501d821c666c6ad1a59795752fa24", "score": "0.51899934", "text": "func (m *BookingMutation) AddedEdges() []string {\n\tedges := make([]string, 0, 3)\n\tif m.usedby != nil {\n\t\tedges = append(edges, booking.EdgeUsedby)\n\t}\n\tif m.getservice != nil {\n\t\tedges = append(edges, booking.EdgeGetservice)\n\t}\n\tif m.using != nil {\n\t\tedges = append(edges, booking.EdgeUsing)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "b77772b182b76c8da850ee1749c3dffe", "score": "0.5189847", "text": "func (obj *Edge) GetAttributes() ([]tgdb.TGAttribute, tgdb.TGError) {\n\treturn obj.getAttributes()\n}", "title": "" }, { "docid": "2ca567d87194edff807df9226572456f", "score": "0.518315", "text": "func (g *DirectedMatrix) Edges() graph.Edges {\n\tvar edges []graph.Edge\n\tr, _ := g.mat.Dims()\n\tfor i := 0; i < r; i++ {\n\t\tfor j := 0; j < r; j++ {\n\t\t\tif i == j {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif w := g.mat.At(i, j); !isSame(w, g.absent) {\n\t\t\t\tedges = append(edges, WeightedEdge{F: g.Node(int64(i)), T: g.Node(int64(j)), W: w})\n\t\t\t}\n\t\t}\n\t}\n\tif len(edges) == 0 {\n\t\treturn graph.Empty\n\t}\n\treturn iterator.NewOrderedEdges(edges)\n}", "title": "" }, { "docid": "1377d719bf5e94140fbea048274be861", "score": "0.5172131", "text": "func (m *BookMutation) AddedEdges() []string {\n\tedges := make([]string, 0, 5)\n\tif m.category != nil {\n\t\tedges = append(edges, book.EdgeCategory)\n\t}\n\tif m.author != nil {\n\t\tedges = append(edges, book.EdgeAuthor)\n\t}\n\tif m.user != nil {\n\t\tedges = append(edges, book.EdgeUser)\n\t}\n\tif m.status != nil {\n\t\tedges = append(edges, book.EdgeStatus)\n\t}\n\tif m.booklist != nil {\n\t\tedges = append(edges, book.EdgeBooklist)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "5f1f704930f23338b564ce69890976f7", "score": "0.51709044", "text": "func (b *ElasticSearchBackend) GetEdgeNodes(e *Edge, t Context, parentMetadata, childMetadata ElementMatcher) (parents []*Node, children []*Node) {\n\tfor _, parent := range b.GetNode(e.Parent, t) {\n\t\tif parent.MatchMetadata(parentMetadata) {\n\t\t\tparents = append(parents, parent)\n\t\t}\n\t}\n\n\tfor _, child := range b.GetNode(e.Child, t) {\n\t\tif child.MatchMetadata(childMetadata) {\n\t\t\tchildren = append(children, child)\n\t\t}\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "09056a72168eaa8cd459ad7e6b0b3eca", "score": "0.5156151", "text": "func (obj *Edge) GetVertices() []tgdb.TGNode {\n\treturn []tgdb.TGNode{obj.FromNode, obj.ToNode}\n}", "title": "" }, { "docid": "d437423dc5fae9ee4866cb2fc916c531", "score": "0.5152669", "text": "func (City) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"owner\", User.Type).Unique().Ref(\"cities\"),\n\t\tedge.To(\"constructions\", Construction.Type),\n\t\tedge.To(\"queue\", QueueItem.Type),\n\t}\n}", "title": "" }, { "docid": "91ced05968dc535295eb75393dcaa1da", "score": "0.515115", "text": "func (Resource) Edges() []ent.Edge {\n\treturn nil\n}", "title": "" }, { "docid": "367be2f156bc0079efa8b0705e4f7a43", "score": "0.5150731", "text": "func (AccessControl) Edges() []ent.Edge {\n\treturn nil\n}", "title": "" }, { "docid": "537ae9bad3c237f2aac62ec95e386768", "score": "0.514524", "text": "func (Auth) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"user\", User.Type).\n\t\t\tRef(\"auths\").\n\t\t\tField(\"userId\").\n\t\t\tUnique().\n\t\t\tRequired(),\n\t}\n}", "title": "" }, { "docid": "738488fc5d89bba57f11194662cda4c9", "score": "0.5145141", "text": "func (Patientrights) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\n\t\t\n\t\tedge.From(\"PatientrightsPatientrightstype\", Patientrightstype.Type).\n\t\t\tRef(\"PatientrightstypePatientrights\").\n\t\t\tUnique(),\n\t\t\t//รูปแบบสิทธิ์\n\n\t\t/*\n\t\tedge.To(\"Patientrights___\", ___.Type).StorageKey(edge.Column(\"Patientrights_id\")),\n\n\t\t*/\n\n\t\tedge.From(\"PatientrightsInsurance\", Insurance.Type).\n\t\t\tRef(\"InsurancePatientrights\").\n\t\t\tUnique(),\n\t\t\t//ผู้จ่ายเงิน\n\n\t\tedge.From(\"PatientrightsPatientrecord\", Patientrecord.Type).\n\t\t\tRef(\"PatientrecordPatientrights\").\n\t\t\tUnique(),\n\t\t\t//ผู้ป่วย ผู้รับสิทธิ์\n\n\t\tedge.From(\"PatientrightsMedicalrecordstaff\", Medicalrecordstaff.Type).\n\t\t\tRef(\"MedicalrecordstaffPatientrights\").\n\t\t\tUnique(),\n\t\t\t//พนักงานผู้กรอกข้อมูล\n\n\t}\n}", "title": "" }, { "docid": "b564774b440e626d6aa5f1d4a6d025f1", "score": "0.51392144", "text": "func (c CronJobResource) BuildEdges(ns NodeStore) []Edge {\n\t//no op for now to implement interface\n\treturn []Edge{}\n}", "title": "" }, { "docid": "db43715cc1d04e0e41ab765e99906f98", "score": "0.5131151", "text": "func (Product) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"belongs\", Category.Type).Ref(\"contains\").Unique(),\n\t}\n}", "title": "" }, { "docid": "1265c97faa5bd01f0d5b7e314188daaa", "score": "0.51217705", "text": "func (s SubscriptionResource) BuildEdges(ns NodeStore) []Edge {\n\tret := []Edge{}\n\tUID := s.node.UID\n\n\tnodeInfo := NodeInfo{\n\t\tNameSpace: s.node.Properties[\"namespace\"].(string),\n\t\tUID: UID,\n\t\tEdgeType: \"to\",\n\t\tKind: s.node.Properties[\"kind\"].(string),\n\t\tName: s.node.Properties[\"name\"].(string)}\n\tchannelMap := make(map[string]struct{})\n\n\t// TODO: This will work only for subscription in hub cluster - confirm logic\n\t// TODO: Connect subscription and channel in remote cluster as they might not be in the same namespace\n\tif len(s.Spec.Channel) > 0 {\n\t\tfor _, channel := range strings.Split(s.Spec.Channel, \",\") {\n\t\t\tchannelMap[channel] = struct{}{}\n\t\t}\n\t\tret = append(ret, edgesByDestinationName(channelMap, \"Channel\", nodeInfo, ns, []string{})...)\n\t}\n\t// refersTo edges\n\t// Builds edges between subscription and placement rules\n\tif s.Spec.Placement != nil && s.Spec.Placement.PlacementRef != nil && s.Spec.Placement.PlacementRef.Name != \"\" {\n\t\tnodeInfo.EdgeType = \"refersTo\"\n\t\tplacementRuleMap := make(map[string]struct{})\n\t\tplacementRuleMap[s.Spec.Placement.PlacementRef.Name] = struct{}{}\n\t\tret = append(ret, edgesByDestinationName(placementRuleMap, \"PlacementRule\", nodeInfo, ns, []string{})...)\n\t}\n\t//subscribesTo edges\n\tif len(s.annotations[\"apps.open-cluster-management.io/deployables\"]) > 0 {\n\t\tnodeInfo.EdgeType = \"subscribesTo\"\n\t\tdeployableMap := make(map[string]struct{})\n\t\tfor _, deployable := range strings.Split(s.annotations[\"apps.open-cluster-management.io/deployables\"], \",\") {\n\t\t\tdeployableMap[deployable] = struct{}{}\n\t\t}\n\t\tret = append(ret, edgesByDestinationName(deployableMap, \"Deployable\", nodeInfo, ns, []string{})...)\n\t}\n\n\treturn ret\n}", "title": "" }, { "docid": "12af40132806c783dbecbce0c2dc62c2", "score": "0.5120314", "text": "func (StatementEndingBalanc) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\t// Create an inverse-edge called \"owner\" of type `User`\n\t\t// and reference it to the \"cars\" edge (in User schema)\n\t\t// explicitly using the `Ref` method.\n\t\tedge.From(\"bank\", Bankdetail.Type).\n\t\t\tRef(\"statements\").\n\t\t\t// setting the edge to unique, ensure\n\t\t\t// that a car can have only one owner.\n\t\t\tUnique(),\n\t}\n}", "title": "" }, { "docid": "356775ea5540841c2604b7698a214c01", "score": "0.51191765", "text": "func (Category) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"posts\", Post.Type).Annotations(entsql.Annotation{\n\t\t\tOnDelete: entsql.SetNull,\n\t\t}),\n\t\tedge.To(\"unsaved_posts\", UnsavedPost.Type).Annotations(entsql.Annotation{\n\t\t\tOnDelete: entsql.SetNull,\n\t\t}),\n\t}\n}", "title": "" }, { "docid": "34ea9758938244fc2ff7651d2c71f412", "score": "0.5113545", "text": "func (SFModel) Edges() []ent.Edge {\n\treturn nil\n}", "title": "" }, { "docid": "1a645d910662cd52ec7426eb43e73426", "score": "0.510926", "text": "func (Patient) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"Gender\", Gender.Type).Ref(\"Patient\").Unique(),\n\t\tedge.To(\"physicaltherapyrecord\", Physicaltherapyrecord.Type).StorageKey(edge.Column(\"Patient_id\")),\n\t\tedge.To(\"Bonedisease\", Bonedisease.Type).StorageKey(edge.Column(\"Patient_id\")),\n\t\tedge.To(\"Checksymptom\", Checksymptom.Type).StorageKey(edge.Column(\"Patient_id\")),\n\t\tedge.To(\"Dentalappointment\", Dentalappointment.Type).StorageKey(edge.Column(\"Patient_id\")),\n\t\tedge.To(\"Antenatalinformation\", Antenatalinformation.Type).StorageKey(edge.Column(\"Patient_id\")),\n\t\tedge.To(\"Surgeryappointment\", Surgeryappointment.Type).StorageKey(edge.Column(\"Patient_id\")),\t\n\t}\n}", "title": "" }, { "docid": "5b2679635ce1049677229ce044a24883", "score": "0.50983274", "text": "func (t *Tree) Edges() [][]Edge {\n\n\tvar edges [][]Edge\n\n\tpaths := t.DFS(t.Root)\n\tfor _, v := range paths {\n\t\tvar path []Edge\n\t\tfor i := len(v) - 1; i > 0; i-- {\n\t\t\tpath = append(path, Edge{v[i].Label, v[i-1].Label})\n\t\t}\n\t\tedges = append(edges, path)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "eedbecb3c2ae9ae153fe500c48ecc398", "score": "0.5084175", "text": "func (Employee) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"queue\", Queue.Type).StorageKey(edge.Column(\"employee_id\")),\n\t}\n}", "title": "" }, { "docid": "a8dfb1119d396d0d23dc11f2b442f8df", "score": "0.50699604", "text": "func (m *BookborrowMutation) AddedEdges() []string {\n\tedges := make([]string, 0, 5)\n\tif m._USER != nil {\n\t\tedges = append(edges, bookborrow.EdgeUSER)\n\t}\n\tif m._BOOK != nil {\n\t\tedges = append(edges, bookborrow.EdgeBOOK)\n\t}\n\tif m._SERVICEPOINT != nil {\n\t\tedges = append(edges, bookborrow.EdgeSERVICEPOINT)\n\t}\n\tif m._STATUS != nil {\n\t\tedges = append(edges, bookborrow.EdgeSTATUS)\n\t}\n\tif m.borrowed != nil {\n\t\tedges = append(edges, bookborrow.EdgeBorrowed)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "32f9b41ba3ab8606807d8630390445ad", "score": "0.5068867", "text": "func (Customer) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"address\", Address.Type).\n\t\t\tUnique(),\n\t\tedge.To(\"ratings\", Rating.Type),\n\t\tedge.To(\"orders\", Order.Type),\n\t\tedge.To(\"payment_method\", PaymentMethod.Type),\n\t}\n}", "title": "" }, { "docid": "e22b1170630227ceed5944d9036b7c22", "score": "0.50649256", "text": "func (m FieldMap) Edges() []*FieldMappingDescriptor {\n\tvar out []*FieldMappingDescriptor\n\tfor _, f := range m {\n\t\tif f.IsEdgeField {\n\t\t\tout = append(out, f)\n\t\t}\n\t}\n\tsort.Slice(out, func(i, j int) bool {\n\t\treturn out[i].PbStructField() < out[j].PbStructField()\n\t})\n\n\treturn out\n}", "title": "" }, { "docid": "aff4ac92e302ab70e61a80b12d2b2a70", "score": "0.5064125", "text": "func (Transport) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"send\",Hospital.Type).\n\t\t\tRef(\"send\").Unique(),\n\t\tedge.From(\"receive\",Hospital.Type).\n\t\t\tRef(\"receive\").Unique(),\n\n\t\tedge.From(\"user\", User.Type).\n\t\t\tRef(\"user\").Unique(),\n\n\t\tedge.From(\"ambulance\",Ambulance.Type).\n\t\t\tRef(\"ambulance\").Unique(),\n\t\t\t\n\t}\n}", "title": "" }, { "docid": "d77afe273875ac8e283665411ceef664", "score": "0.506238", "text": "func (txl *ERC20TransactionList) Edges() []*ERC20TransactionListEdge {\n\t// do we have any items? return empty list if not\n\tif txl.Collection == nil || len(txl.Collection) == 0 {\n\t\treturn make([]*ERC20TransactionListEdge, 0)\n\t}\n\n\t// make the list\n\tedges := make([]*ERC20TransactionListEdge, len(txl.Collection))\n\tfor i, c := range txl.Collection {\n\t\t// make the element\n\t\tedge := ERC20TransactionListEdge{\n\t\t\tTrx: NewErc20Transaction(c),\n\t\t}\n\t\tedges[i] = &edge\n\t}\n\n\treturn edges\n}", "title": "" }, { "docid": "fc07bbc7660399b6e44e95408407f638", "score": "0.5056853", "text": "func (Admin) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"sessions\", AdminSession.Type),\n\t\tedge.To(\"posts\", Post.Type),\n\t\tedge.To(\"unsaved_posts\", UnsavedPost.Type),\n\t}\n}", "title": "" }, { "docid": "35980168906f0cc8d4bb121feaff39d4", "score": "0.5056069", "text": "func (Workflow) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"namespace\", Namespace.Type).\n\t\t\tRef(\"workflows\").\n\t\t\tUnique().Required(),\n\t\tedge.To(\"instances\", WorkflowInstance.Type),\n\t\tedge.To(\"wfevents\", WorkflowEvents.Type),\n\t}\n}", "title": "" }, { "docid": "41dca8b8abebfdac5169ddbb273d7962", "score": "0.505306", "text": "func (listener *Listener) GetEndpoints() []string {\n\treturn listener.endpoints\n}", "title": "" }, { "docid": "3d4ff4230b115879dbe61cdd25ce340b", "score": "0.50446063", "text": "func (Activity) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.From(\"acti_stud\", Student.Type).Ref(\"stud_acti\").Unique(),\n\t\tedge.From(\"acti_place\", Place.Type).Ref(\"place_acti\").Unique(),\n\t\tedge.From(\"acti_agen\", Agency.Type).Ref(\"agen_acti\").Unique(),\n\t\tedge.From(\"acti_year\", Year.Type).Ref(\"year_acti\").Unique(),\n\t\tedge.From(\"acti_term\", Term.Type).Ref(\"term_acti\").Unique(),\n\t}\n}", "title": "" }, { "docid": "682c807dc2899698ae7e2dc78f83b7ae", "score": "0.5042356", "text": "func (m *RestaurantMutation) AddedEdges() []string {\n\tedges := make([]string, 0, 5)\n\tif m.address != nil {\n\t\tedges = append(edges, restaurant.EdgeAddress)\n\t}\n\tif m.tags != nil {\n\t\tedges = append(edges, restaurant.EdgeTags)\n\t}\n\tif m.owner != nil {\n\t\tedges = append(edges, restaurant.EdgeOwner)\n\t}\n\tif m.products != nil {\n\t\tedges = append(edges, restaurant.EdgeProducts)\n\t}\n\tif m.images != nil {\n\t\tedges = append(edges, restaurant.EdgeImages)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "0ef0c199e600e79fc349e2bf41430f44", "score": "0.50401986", "text": "func (Machine) Edges() []ent.Edge {\n\treturn []ent.Edge{\n\t\tedge.To(\"alerts\", Alert.Type),\n\t}\n}", "title": "" }, { "docid": "2942516225dcecdbcffc7c91bfc64f53", "score": "0.5039929", "text": "func (m *EmployeeMutation) AddedEdges() []string {\n\tedges := make([]string, 0, 1)\n\tif m.statistic != nil {\n\t\tedges = append(edges, employee.EdgeStatistic)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "abcc4c0a09816bad1316d66872af9b34", "score": "0.50381327", "text": "func (m *RatingMutation) AddedEdges() []string {\n\tedges := make([]string, 0, 2)\n\tif m.customer != nil {\n\t\tedges = append(edges, rating.EdgeCustomer)\n\t}\n\tif m.product != nil {\n\t\tedges = append(edges, rating.EdgeProduct)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "e3c79f3f81e3dfaae70597a5a2d547b9", "score": "0.50379276", "text": "func (m *ReviewMutation) AddedEdges() []string {\n\tedges := make([]string, 0, 2)\n\tif m.transaction != nil {\n\t\tedges = append(edges, review.EdgeTransaction)\n\t}\n\tif m.coin != nil {\n\t\tedges = append(edges, review.EdgeCoin)\n\t}\n\treturn edges\n}", "title": "" }, { "docid": "a766c6fd021a62b33461bec49af2ca6c", "score": "0.5035988", "text": "func (m *ClientEntityMutation) AddedEdges() []string {\n\tedges := make([]string, 0, 2)\n\tif m.booked != nil {\n\t\tedges = append(edges, cliententity.EdgeBooked)\n\t}\n\tif m.state != nil {\n\t\tedges = append(edges, cliententity.EdgeState)\n\t}\n\treturn edges\n}", "title": "" } ]
9f0714e1ea3bf088dcbb359a90029804
SetFlags sets or updates the flags served by the handler for the given SDK key. It can be called concurrently with other Handler methods. Use RandomSDKKey to create a new SDK key.
[ { "docid": "d4818130d2fdeda2b5fc0873c0b638ee", "score": "0.7496804", "text": "func (h *Handler) SetFlags(sdkKey string, flags map[string]*Flag) error {\n\tif sdkKey == \"\" {\n\t\treturn fmt.Errorf(\"empty SDK key passed to configcattest.Handler.SetFlags\")\n\t}\n\tdata, err := makeContent(flags)\n\tif err != nil {\n\t\treturn err\n\t}\n\th.mu.Lock()\n\tdefer h.mu.Unlock()\n\tif h.contents == nil {\n\t\th.contents = make(map[string][]byte)\n\t}\n\th.contents[\"/configuration-files/\"+sdkKey+\"/config_v5.json\"] = data\n\treturn nil\n}", "title": "" } ]
[ { "docid": "c68ae10403af3c3392d287dbdb02cb27", "score": "0.5487848", "text": "func (l *Logger) SetFlags(flag int) {\n\tl.mu.Lock()\n\tl.flag = flag\n\tl.mu.Unlock()\n\n\tl.instance.SetFlags(flag)\n}", "title": "" }, { "docid": "b2b9838abf8eee88522f3b611bf74800", "score": "0.54377264", "text": "func (Command) SetFlags(f *flag.FlagSet) {}", "title": "" }, { "docid": "11c319e020b1c121e4c5c96441facbfe", "score": "0.5430101", "text": "func (d *Request) SetFlags(flags int32) {\n\tC.gocef_request_set_flags(d.toNative(), C.int(flags), d.set_flags)\n}", "title": "" }, { "docid": "c7b6dd3cb59846945811e46a6bd62ab2", "score": "0.537459", "text": "func (c *Client) SetFlags(flags uint32) {\n\tc.m.Lock()\n\tdefer c.m.Unlock()\n\tc.flags = flags\n}", "title": "" }, { "docid": "63dda29feb454e88a4e2eea04a659fc5", "score": "0.53100324", "text": "func (l *Logger) SetFlags(flag int) {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\tl.flag = flag\n}", "title": "" }, { "docid": "ab963d92939e8bbea6c57664254310c6", "score": "0.5264299", "text": "func (v *Application) SetFlags(flags ApplicationFlags) {\n\tC.g_application_set_flags(v.Native(), C.GApplicationFlags(flags))\n}", "title": "" }, { "docid": "99178ac5158847ca04a70e6aaefd4ae0", "score": "0.52287054", "text": "func (i *InputStickerSetItem) SetFlags() {\n\tif !(i.MaskCoords.Zero()) {\n\t\ti.Flags.Set(0)\n\t}\n\tif !(i.Keywords == \"\") {\n\t\ti.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "1f34f24f00733a897519c9fe4d7d1f63", "score": "0.5184408", "text": "func (cmd *RunCmd) SetFlags(f *flag.FlagSet) {\n}", "title": "" }, { "docid": "ae88244d4d3675850a40bbc12d43a13e", "score": "0.5177201", "text": "func (a *HelpAppUpdate) SetFlags() {\n\tif !(a.CanNotSkip == false) {\n\t\ta.Flags.Set(0)\n\t}\n\tif !(a.Document == nil) {\n\t\ta.Flags.Set(1)\n\t}\n\tif !(a.URL == \"\") {\n\t\ta.Flags.Set(2)\n\t}\n\tif !(a.Sticker == nil) {\n\t\ta.Flags.Set(3)\n\t}\n}", "title": "" }, { "docid": "c075ca55e67c41afa9a994917c9b79a3", "score": "0.5154144", "text": "func (c *cmdBase) SetFlags(f *flag.FlagSet) {\n\tf.StringVar(&c.flagSRC, \"src\", \"\", \"source scheme://user:pass@host:port[,...]\")\n\tf.StringVar(&c.flagSRV, \"srv\", \"\", \"network address for WEB server scheme://domain.com\")\n\n\tf.StringVar(&c.flagKey, \"key\", \"\", \"service key\")\n\tf.StringVar(&c.flagTag, \"tag\", \"\", \"service tag\")\n\n\tf.StringVar(&c.flagMGn, \"mgn\", \"\", \"mailgun service mail://api:key@box.mailgun.org\")\n\tf.StringVar(&c.flagMFm, \"mfm\", \"noreplay@example.com\", \"mailgun from\")\n\tf.StringVar(&c.flagMTo, \"mto\", \"\", \"mailgun to\")\n\n\tif i, ok := c.cmd.(flager); ok {\n\t\ti.setFlags(f)\n\t}\n}", "title": "" }, { "docid": "a243d83c0e039c1ca92e20b2a3d7156e", "score": "0.5066664", "text": "func (s *MessagesSendEncryptedRequest) SetFlags() {\n\tif !(s.Silent == false) {\n\t\ts.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "493a825d9e88f5ed369e1333e87260f5", "score": "0.5062136", "text": "func (l *Logger) SetFlags(flags int) {\n\tl.flags = flags\n}", "title": "" }, { "docid": "d156a24dea611aca1f835c59add73468", "score": "0.5061276", "text": "func SetFlags(hilt *cli.Hilt, root *cobra.Command) *cobra.Command {\n\t// Required flags. Will be replaced by args.\n\troot.PersistentFlags().StringVarP(&hilt.Bucket, \"bucket\", \"b\", \"\", \"A path in Vault.\")\n\troot.PersistentFlags().StringVarP(&hilt.Key, \"key\", \"k\", \"\", \"A key in Vault.\")\n\troot.MarkPersistentFlagRequired(\"bucket\")\n\troot.MarkPersistentFlagRequired(\"key\")\n\n\t// Optional flags.\n\troot.PersistentFlags().StringVarP(&hilt.Addr, \"addr\", \"a\", \"\", \"Address of Vault server.\")\n\troot.PersistentFlags().StringVarP(&hilt.Token, \"tkn\", \"t\", \"\", \"Vault auth token.\")\n\troot.PersistentFlags().StringVarP(&hilt.TokenPath, \"tknp\", \"p\", \"~/.vault-token\", \"Path to Vault auth token.\")\n\troot.PersistentFlags().BoolVarP(&hilt.HidePrompt, \"hide\", \"h\", false, \"Hide prompt to print to stdout.\")\n\n\treturn root\n}", "title": "" }, { "docid": "bafbe942f4460316612be83dd0baf422", "score": "0.50263625", "text": "func SetFlags(flags int) {\n\tlogger.SetFlags(flags)\n}", "title": "" }, { "docid": "0a36b7831aed68e354a3819b3867cf77", "score": "0.501682", "text": "func SetFlags(flag int) {\n\tstd.SetFlags(flag)\n}", "title": "" }, { "docid": "27c1232f127b0d48a4725e556b8e98e9", "score": "0.5013789", "text": "func (m *MessageActionPaymentSentMe) SetFlags() {\n\tif !(m.RecurringInit == false) {\n\t\tm.Flags.Set(2)\n\t}\n\tif !(m.RecurringUsed == false) {\n\t\tm.Flags.Set(3)\n\t}\n\tif !(m.Info.Zero()) {\n\t\tm.Flags.Set(0)\n\t}\n\tif !(m.ShippingOptionID == \"\") {\n\t\tm.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "6a7ae94476e845d4081bcf40c9d6892b", "score": "0.5011869", "text": "func (i *InitConnectionRequest) SetFlags() {\n\tif !(i.Proxy.Zero()) {\n\t\ti.Flags.Set(0)\n\t}\n\tif !(i.Params == nil) {\n\t\ti.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "caccfe7b1941bd21e1a7db4e46069b85", "score": "0.49944228", "text": "func (u *UpdateGroupCallConnection) SetFlags() {\n\tif !(u.Presentation == false) {\n\t\tu.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "d9075975c0706b42c875d03d4e126ca5", "score": "0.49595836", "text": "func SetAPIKey(key string) int {\n APIKey = key\n return 0;\n}", "title": "" }, { "docid": "180ac953199cb08f389f2d30b9427473", "score": "0.49412903", "text": "func (u *UpdateInlineBotCallbackQuery) SetFlags() {\n\tif !(u.Data == nil) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.GameShortName == \"\") {\n\t\tu.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "f0289bbf7c1b8d455bd677818e78405d", "score": "0.4928011", "text": "func SetFlags(flags int) {\n\tdefaultLog._log.SetFlags(flags)\n}", "title": "" }, { "docid": "26ca851ca2d2f319160f20d28a51c029", "score": "0.49238837", "text": "func (u *UpdateBotCallbackQuery) SetFlags() {\n\tif !(u.Data == nil) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.GameShortName == \"\") {\n\t\tu.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "de4a2d640441941965b5f35f95d666ac", "score": "0.4920075", "text": "func (d *MessagesDeletePhoneCallHistoryRequest) SetFlags() {\n\tif !(d.Revoke == false) {\n\t\td.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "7ed0ba699882b4db00f514c1eeb215be", "score": "0.491023", "text": "func (d *DecryptedMessage46) SetFlags() {\n\tif !(d.Media == nil) {\n\t\td.Flags.Set(9)\n\t}\n\tif !(d.Entities == nil) {\n\t\td.Flags.Set(7)\n\t}\n\tif !(d.ViaBotName == \"\") {\n\t\td.Flags.Set(11)\n\t}\n\tif !(d.ReplyToRandomID == 0) {\n\t\td.Flags.Set(3)\n\t}\n}", "title": "" }, { "docid": "a4d644c1f1cdbf5a68307fe2e13c4610", "score": "0.48704946", "text": "func (j *PhoneJoinGroupCallRequest) SetFlags() {\n\tif !(j.Muted == false) {\n\t\tj.Flags.Set(0)\n\t}\n\tif !(j.VideoStopped == false) {\n\t\tj.Flags.Set(2)\n\t}\n\tif !(j.InviteHash == \"\") {\n\t\tj.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "5623c0b6dc17b744ea54b5b476c8a70f", "score": "0.48670548", "text": "func (ac *MockAdminConnector) SetServiceFlags(flags evergreen.ServiceFlags, u *user.DBUser) error {\n\tif ac.MockSettings == nil {\n\t\tac.MockSettings = &evergreen.Settings{}\n\t}\n\tac.MockSettings.ServiceFlags = flags\n\treturn nil\n}", "title": "" }, { "docid": "bed0c44bfccdd116b5ef248512cbdcf1", "score": "0.48568973", "text": "func (bf *BreakdownFlags) Set(keys []string) error {\n\tfor _, key := range keys {\n\t\tswitch key {\n\t\tcase breakdownLabels[FieldFamily]:\n\t\t\tbf.Family = true\n\t\tcase breakdownLabels[FieldSrcAddr]:\n\t\t\tbf.SrcAddr = true\n\t\tcase breakdownLabels[FieldDstAddr]:\n\t\t\tbf.DstAddr = true\n\t\tcase breakdownLabels[FieldProtocol]:\n\t\t\tbf.Protocol = true\n\t\tcase breakdownLabels[FieldIntIn]:\n\t\t\tbf.IntIn = true\n\t\tcase breakdownLabels[FieldIntOut]:\n\t\t\tbf.IntOut = true\n\t\tcase breakdownLabels[FieldNextHop]:\n\t\t\tbf.NextHop = true\n\t\tcase breakdownLabels[FieldSrcAs]:\n\t\t\tbf.SrcAsn = true\n\t\tcase breakdownLabels[FieldDstAs]:\n\t\t\tbf.DstAsn = true\n\t\tcase breakdownLabels[FieldNextHopAs]:\n\t\t\tbf.NextHopAsn = true\n\t\tcase breakdownLabels[FieldSrcPfx]:\n\t\t\tbf.SrcPfx = true\n\t\tcase breakdownLabels[FieldDstPfx]:\n\t\t\tbf.DstPfx = true\n\t\tcase breakdownLabels[FieldSrcPort]:\n\t\t\tbf.SrcPort = true\n\t\tcase breakdownLabels[FieldDstPort]:\n\t\t\tbf.DstPort = true\n\t\tcase breakdownLabels[FieldIntInName]:\n\t\t\tbf.IntInName = true\n\t\tcase breakdownLabels[FieldIntOutName]:\n\t\t\tbf.IntOutName = true\n\n\t\tdefault:\n\t\t\treturn errors.Errorf(\"invalid breakdown key: %s\", key)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "389b05504f39f4aed5a1d07e524e1161", "score": "0.4855653", "text": "func (p *PublishCmd) SetFlags(f *flag.FlagSet) {\n}", "title": "" }, { "docid": "55eb5312a0a6895f15faaa99c76700e1", "score": "0.48445538", "text": "func (a *Applier) SetFlags(cmd *cobra.Command) error {\n\ta.ApplyOptions.DeleteFlags.AddFlags(cmd)\n\tfor _, flag := range []string{\"kustomize\", \"filename\", \"recursive\"} {\n\t\terr := cmd.Flags().MarkHidden(flag)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\ta.ApplyOptions.RecordFlags.AddFlags(cmd)\n\t_ = cmd.Flags().MarkHidden(\"record\")\n\t_ = cmd.Flags().MarkHidden(\"cascade\")\n\t_ = cmd.Flags().MarkHidden(\"force\")\n\t_ = cmd.Flags().MarkHidden(\"grace-period\")\n\t_ = cmd.Flags().MarkHidden(\"timeout\")\n\t_ = cmd.Flags().MarkHidden(\"wait\")\n\ta.StatusOptions.AddFlags(cmd)\n\ta.ApplyOptions.Overwrite = true\n\treturn nil\n}", "title": "" }, { "docid": "784304007d60220cf190b256105fcea0", "score": "0.483384", "text": "func (t *MessagesToggleStickerSetsRequest) SetFlags() {\n\tif !(t.Uninstall == false) {\n\t\tt.Flags.Set(0)\n\t}\n\tif !(t.Archive == false) {\n\t\tt.Flags.Set(1)\n\t}\n\tif !(t.Unarchive == false) {\n\t\tt.Flags.Set(2)\n\t}\n}", "title": "" }, { "docid": "6cfe4c71b3262987c16d10b74f7da7ff", "score": "0.4833645", "text": "func (u *UpdateServiceNotification) SetFlags() {\n\tif !(u.Popup == false) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.InboxDate == 0) {\n\t\tu.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "7f16a5401cc3710921f7985acf9c1878", "score": "0.48312262", "text": "func (m *MessageActionBotAllowed) SetFlags() {\n\tif !(m.AttachMenu == false) {\n\t\tm.Flags.Set(1)\n\t}\n\tif !(m.Domain == \"\") {\n\t\tm.Flags.Set(0)\n\t}\n\tif !(m.App == nil) {\n\t\tm.Flags.Set(2)\n\t}\n}", "title": "" }, { "docid": "567b5fdfb694a4cb4f26b9a14659652f", "score": "0.48261243", "text": "func (u *URLAuthResultRequest) SetFlags() {\n\tif !(u.RequestWriteAccess == false) {\n\t\tu.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "601512ec20362e50df892895c089d028", "score": "0.48245764", "text": "func SetFlags(flags int) {\n\tlogConfig.flags = flags\n\n\tupdateLoggerConfig()\n}", "title": "" }, { "docid": "a7e5ea320d73cbd32aa0fb22d8b62ec0", "score": "0.48106667", "text": "func SetFlags(flags int) {\n\tdefaultWriter.mutex.Lock()\n\tlog.SetFlags(flags)\n\tdefaultWriter.flags = flags\n\tdefaultWriter.mutex.Unlock()\n}", "title": "" }, { "docid": "fb2f578ae5df74afb36537887a27fab8", "score": "0.4807175", "text": "func (a *ContactsAddContactRequest) SetFlags() {\n\tif !(a.AddPhonePrivacyException == false) {\n\t\ta.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "4aec290530d7259d6e275cd41ba71dfd", "score": "0.47944805", "text": "func (l *Logger) SetFlags(flags int) {\n\tl.log.SetFlags(flags)\n}", "title": "" }, { "docid": "f88bc51f213ff82d7372dd9be73c52c9", "score": "0.47901672", "text": "func (*cmd) SetFlags(*flag.FlagSet) {}", "title": "" }, { "docid": "6e6a9f17dfb14469060954936ef439bd", "score": "0.4781834", "text": "func (u *UpdateBotPrecheckoutQuery) SetFlags() {\n\tif !(u.Info.Zero()) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.ShippingOptionID == \"\") {\n\t\tu.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "96b49c97383aa2a8ff63fa9bbaa5fd0d", "score": "0.47780955", "text": "func (m *JoinAcceptPayload) SetFromFlags(flags *pflag.FlagSet, prefix string) (paths []string, err error) {\n\tif val, changed, err := flagsplugin.GetBytes(flags, flagsplugin.Prefix(\"encrypted\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.Encrypted = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"encrypted\", prefix))\n\t}\n\tif val, changed, err := customflags.GetExactBytes(flags, flagsplugin.Prefix(\"join_nonce\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.JoinNonce = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"join_nonce\", prefix))\n\t}\n\tif val, changed, err := customflags.GetExactBytes(flags, flagsplugin.Prefix(\"net_id\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.NetId = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"net_id\", prefix))\n\t}\n\tif val, changed, err := customflags.GetExactBytes(flags, flagsplugin.Prefix(\"dev_addr\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.DevAddr = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"dev_addr\", prefix))\n\t}\n\tif changed := flagsplugin.IsAnyPrefixSet(flags, flagsplugin.Prefix(\"dl_settings\", prefix)); changed {\n\t\tif m.DlSettings == nil {\n\t\t\tm.DlSettings = &DLSettings{}\n\t\t}\n\t\tif setPaths, err := m.DlSettings.SetFromFlags(flags, flagsplugin.Prefix(\"dl_settings\", prefix)); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tpaths = append(paths, setPaths...)\n\t\t}\n\t}\n\tif val, changed, err := flagsplugin.GetString(flags, flagsplugin.Prefix(\"rx_delay\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tenumValue, err := flagsplugin.SetEnumString(val, RxDelay_value, RxDelay_customvalue)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tm.RxDelay = RxDelay(enumValue)\n\t\tpaths = append(paths, flagsplugin.Prefix(\"rx_delay\", prefix))\n\t}\n\tif changed := flagsplugin.IsAnyPrefixSet(flags, flagsplugin.Prefix(\"cf_list\", prefix)); changed {\n\t\tif m.CfList == nil {\n\t\t\tm.CfList = &CFList{}\n\t\t}\n\t\tif setPaths, err := m.CfList.SetFromFlags(flags, flagsplugin.Prefix(\"cf_list\", prefix)); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tpaths = append(paths, setPaths...)\n\t\t}\n\t}\n\treturn paths, nil\n}", "title": "" }, { "docid": "f62e7af7b252908dec0e7561492ccc91", "score": "0.47739416", "text": "func (u *UpdateTranscribedAudio) SetFlags() {\n\tif !(u.Pending == false) {\n\t\tu.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "2347061e801bcdf331b0054713027c28", "score": "0.47547445", "text": "func (m *Magic) SetFlags(flags int) error {\n\tif m.ptr == nil {\n\t\treturn ConnectionError\n\t}\n\n\tif C.magic_setflags(m.ptr, C.int(flags)) < 0 {\n\t\treturn errors.New(\"Magic: FlagPreserveATime is not supported by this system.\")\n\t}\n\n\treturn m.check()\n}", "title": "" }, { "docid": "fda3105f125b1f652db9c6a55b596b37", "score": "0.47474542", "text": "func SetInfoFlags(flags string) {\n setFlags(&infoFlags, flags)\n if infoLogger != nil {\n if infoFlags == -1 {\n infoLogger.SetFlags(defaultFlags)\n } else {\n infoLogger.SetFlags(infoFlags)\n }\n }\n}", "title": "" }, { "docid": "1e5c244d13850db0a0b7c7a53c5ecaaa", "score": "0.47400445", "text": "func (u *UpdateBotInlineSend) SetFlags() {\n\tif !(u.Geo == nil) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.MsgID == nil) {\n\t\tu.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "c7d95dabf7040f061ba0afb65ff0d843", "score": "0.47067282", "text": "func (d *DecryptedMessage) SetFlags() {\n\tif !(d.Silent == false) {\n\t\td.Flags.Set(5)\n\t}\n\tif !(d.Media == nil) {\n\t\td.Flags.Set(9)\n\t}\n\tif !(d.Entities == nil) {\n\t\td.Flags.Set(7)\n\t}\n\tif !(d.ViaBotName == \"\") {\n\t\td.Flags.Set(11)\n\t}\n\tif !(d.ReplyToRandomID == 0) {\n\t\td.Flags.Set(3)\n\t}\n\tif !(d.GroupedID == 0) {\n\t\td.Flags.Set(17)\n\t}\n}", "title": "" }, { "docid": "f41590e8ed8b3c5c39fcf314b2432fd6", "score": "0.470201", "text": "func (f *File) setFlags(flags string) error {\n\tif len(flags) != 4 {\n\t\treturn fmt.Errorf(\"invalid number of flags: %v\", flags)\n\t}\n\n\tvar err error\n\t// set file type\n\tif f.Type, err = typeFromFlag(flags[0]); err != nil {\n\t\treturn err\n\t}\n\t// set status\n\tif f.Status, err = statusFromFlag(flags[1]); err != nil {\n\t\treturn err\n\t}\n\t// set modifier\n\tif f.Modifier, err = modifierFromFlag(flags[2]); err != nil {\n\t\treturn err\n\t}\n\t// set rename flag\n\tif f.Rename, err = renameFromFlag(flags[3]); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "1c8fc7b6941e0aacb4fa52b0fdee046e", "score": "0.46925184", "text": "func (g *UpdatesGetChannelDifferenceRequest) SetFlags() {\n\tif !(g.Force == false) {\n\t\tg.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "c08b33423ec0c20d696a77a07234193a", "score": "0.466248", "text": "func (a *Authorization) SetFlags() {\n\tif !(a.Current == false) {\n\t\ta.Flags.Set(0)\n\t}\n\tif !(a.OfficialApp == false) {\n\t\ta.Flags.Set(1)\n\t}\n\tif !(a.PasswordPending == false) {\n\t\ta.Flags.Set(2)\n\t}\n\tif !(a.EncryptedRequestsDisabled == false) {\n\t\ta.Flags.Set(3)\n\t}\n\tif !(a.CallRequestsDisabled == false) {\n\t\ta.Flags.Set(4)\n\t}\n}", "title": "" }, { "docid": "e2ff20591b90e0af2cbf2e68129a27e9", "score": "0.4653958", "text": "func (u *UpdateDialogFilter) SetFlags() {\n\tif !(u.Filter == nil) {\n\t\tu.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "6d106925966808e0a460d2338609a182", "score": "0.46525383", "text": "func (u *UpdateChannelTooLong) SetFlags() {\n\tif !(u.Pts == 0) {\n\t\tu.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "da1263325dc44d42e535d780a930579c", "score": "0.46397436", "text": "func SetAPIKey(a string) {\n\tapiKey = a\n}", "title": "" }, { "docid": "f4fc5ace2dd023225dda742fa1e290d3", "score": "0.46384022", "text": "func (l *Logger) SetFlags(layouts int) {\n\tl.logger.SetFlags(layouts)\n}", "title": "" }, { "docid": "4a0b356b8bc5e12364d9c5f7afafed55", "score": "0.46383733", "text": "func (m *MessageActionGroupCall) SetFlags() {\n\tif !(m.Duration == 0) {\n\t\tm.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "0c5ac001ab330d09be2542240b52d1de", "score": "0.46378487", "text": "func AddFlags(o *Options) {\n\tflag.StringVar(&o.KeyID, \"keyId\", \"\", \"AWS IAM User Key ID to use, if left blank will attempt to use a role, if defined secret-key must also be defined.\")\n\tflag.StringVar(&o.SecretKey, \"secretKey\", \"\", \"AWS IAM User Secret Key to use, if left blank will attempt to use a role, if defined key-id must also be defined.\")\n\tflag.StringVar(&o.Profile, \"profile\", \"\", \"AWS credential profile to use, mutually exclusive to key-id and secret-key.\")\n\tflag.StringVar(&o.TableName, \"tableName\", \"aws-service-broker\", \"DynamoDB table to use for persistent data storage.\")\n\tflag.StringVar(&o.Region, \"region\", \"us-east-1\", \"AWS Region the DynamoDB table and S3 bucket are stored in.\")\n\tflag.StringVar(&o.S3Bucket, \"s3Bucket\", \"awsservicebroker\", \"S3 bucket name where templates are stored.\")\n\tflag.StringVar(&o.S3Region, \"s3Region\", \"us-east-1\", \"region S3 bucket is located in.\")\n\tflag.StringVar(&o.S3Key, \"s3Key\", \"templates/latest/\", \"S3 key where templates are stored.\")\n\tflag.StringVar(&o.TemplateFilter, \"templateFilter\", \"-main.yaml\", \"only process templates with the defined suffix.\")\n\tflag.StringVar(&o.CatalogPath, \"catalogPath\", \"\", \"The path to the catalog.\")\n\tflag.StringVar(&o.BrokerID, \"brokerId\", \"awsservicebroker\", \"An ID to use for partitioning broker data in DynamoDb. if multiple brokers are used in the same AWS account, this value must be unique per broker\")\n\tflag.BoolVar(&o.PrescribeOverrides, \"prescribeOverrides\", false, \"Plan properties that are globally overridden will be removed from service plan parameters, this enforces their values for users and simplifies the list of required parameters. Common overrides are aws_access_key, aws_secret_key, region and VpcId\")\n}", "title": "" }, { "docid": "0508998d2951d17e7b5202c93e41f91a", "score": "0.46338964", "text": "func (g *PaymentsGetPaymentFormRequest) SetFlags() {\n\tif !(g.ThemeParams.Zero()) {\n\t\tg.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "ebf49ca6902ab6596385237a58eb890c", "score": "0.4631385", "text": "func (m *MessageActionGiftPremium) SetFlags() {\n\tif !(m.CryptoCurrency == \"\") {\n\t\tm.Flags.Set(0)\n\t}\n\tif !(m.CryptoAmount == 0) {\n\t\tm.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "f74510cf61ebc33301f7dcfd84cb0698", "score": "0.4631252", "text": "func (u *PhotosUploadContactProfilePhotoRequest) SetFlags() {\n\tif !(u.Suggest == false) {\n\t\tu.Flags.Set(3)\n\t}\n\tif !(u.Save == false) {\n\t\tu.Flags.Set(4)\n\t}\n\tif !(u.File == nil) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.Video == nil) {\n\t\tu.Flags.Set(1)\n\t}\n\tif !(u.VideoStartTs == 0) {\n\t\tu.Flags.Set(2)\n\t}\n\tif !(u.VideoEmojiMarkup == nil) {\n\t\tu.Flags.Set(5)\n\t}\n}", "title": "" }, { "docid": "167326f5c8b60b0cd845f5f5e6422c48", "score": "0.46296483", "text": "func (m *MessageActionPaymentSent) SetFlags() {\n\tif !(m.RecurringInit == false) {\n\t\tm.Flags.Set(2)\n\t}\n\tif !(m.RecurringUsed == false) {\n\t\tm.Flags.Set(3)\n\t}\n\tif !(m.InvoiceSlug == \"\") {\n\t\tm.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "1f2ebf629aa135f8ad1e0e84015c962a", "score": "0.4629008", "text": "func (u *ContactsUnblockRequest) SetFlags() {\n\tif !(u.MyStoriesFrom == false) {\n\t\tu.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "77acc413476da380e9457ad048ebf42b", "score": "0.4609227", "text": "func (c *ChannelsCreateChannelRequest) SetFlags() {\n\tif !(c.Broadcast == false) {\n\t\tc.Flags.Set(0)\n\t}\n\tif !(c.Megagroup == false) {\n\t\tc.Flags.Set(1)\n\t}\n\tif !(c.ForImport == false) {\n\t\tc.Flags.Set(3)\n\t}\n\tif !(c.Forum == false) {\n\t\tc.Flags.Set(5)\n\t}\n\tif !(c.GeoPoint == nil) {\n\t\tc.Flags.Set(2)\n\t}\n\tif !(c.Address == \"\") {\n\t\tc.Flags.Set(2)\n\t}\n\tif !(c.TTLPeriod == 0) {\n\t\tc.Flags.Set(4)\n\t}\n}", "title": "" }, { "docid": "a37f728e7ce312b9a84da2bc6bf00089", "score": "0.46064836", "text": "func (i *AccountInitTakeoutSessionRequest) SetFlags() {\n\tif !(i.Contacts == false) {\n\t\ti.Flags.Set(0)\n\t}\n\tif !(i.MessageUsers == false) {\n\t\ti.Flags.Set(1)\n\t}\n\tif !(i.MessageChats == false) {\n\t\ti.Flags.Set(2)\n\t}\n\tif !(i.MessageMegagroups == false) {\n\t\ti.Flags.Set(3)\n\t}\n\tif !(i.MessageChannels == false) {\n\t\ti.Flags.Set(4)\n\t}\n\tif !(i.Files == false) {\n\t\ti.Flags.Set(5)\n\t}\n\tif !(i.FileMaxSize == 0) {\n\t\ti.Flags.Set(5)\n\t}\n}", "title": "" }, { "docid": "a8b7ca8aa46203662b6903e5981c4c12", "score": "0.46040997", "text": "func (s *MessagesSearchGlobalRequest) SetFlags() {\n\tif !(s.FolderID == 0) {\n\t\ts.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "7cf0405e157be13e8128b53c25548c42", "score": "0.4602138", "text": "func (i *InputSecureValue) SetFlags() {\n\tif !(i.Data.Zero()) {\n\t\ti.Flags.Set(0)\n\t}\n\tif !(i.FrontSide == nil) {\n\t\ti.Flags.Set(1)\n\t}\n\tif !(i.ReverseSide == nil) {\n\t\ti.Flags.Set(2)\n\t}\n\tif !(i.Selfie == nil) {\n\t\ti.Flags.Set(3)\n\t}\n\tif !(i.Translation == nil) {\n\t\ti.Flags.Set(6)\n\t}\n\tif !(i.Files == nil) {\n\t\ti.Flags.Set(4)\n\t}\n\tif !(i.PlainData == nil) {\n\t\ti.Flags.Set(5)\n\t}\n}", "title": "" }, { "docid": "e75df125dbce95fe727f32bf041ac148", "score": "0.45945543", "text": "func (aeh *ApplicationEnvHandler) SetKeysToMask(keysToMask []string) {\n\taeh.ApplicationEnvRetriever.SetKeysToMask(keysToMask)\n}", "title": "" }, { "docid": "5a295dff8d162488c2e8c89e644b742d", "score": "0.4591821", "text": "func (u *UpdateStickerSetsOrder) SetFlags() {\n\tif !(u.Masks == false) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.Emojis == false) {\n\t\tu.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "6425fb685dbf68bef3f0ce51c5291e14", "score": "0.4568178", "text": "func (u *UpdateMessageReactions) SetFlags() {\n\tif !(u.TopMsgID == 0) {\n\t\tu.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "32db6e167a5f9d50b26ade44861618fe", "score": "0.4554347", "text": "func (c *Mock) SetAPIKey(apiKey string) {\n\tc.apiKey = apiKey\n}", "title": "" }, { "docid": "48771f0fda168709f35890091b192f12", "score": "0.454815", "text": "func (s *StoryViews) SetFlags() {\n\tif !(s.RecentViewers == nil) {\n\t\ts.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "3f89b3649fa412379dc3141317d2bbd7", "score": "0.45441452", "text": "func (d *DocumentAttributeAudio) SetFlags() {\n\tif !(d.Voice == false) {\n\t\td.Flags.Set(10)\n\t}\n\tif !(d.Title == \"\") {\n\t\td.Flags.Set(0)\n\t}\n\tif !(d.Performer == \"\") {\n\t\td.Flags.Set(1)\n\t}\n\tif !(d.Waveform == nil) {\n\t\td.Flags.Set(2)\n\t}\n}", "title": "" }, { "docid": "ebac63ef660d9fc0e4a64f3812aa36d8", "score": "0.45256338", "text": "func (f *FDTable) SetFlags(ctx context.Context, fd int32, flags FDFlags) error {\n\tif fd < 0 {\n\t\t// Don't accept negative FDs.\n\t\treturn unix.EBADF\n\t}\n\n\tf.mu.Lock()\n\tdefer f.mu.Unlock()\n\n\tfile, _, _ := f.get(fd)\n\tif file == nil {\n\t\t// No file found.\n\t\treturn unix.EBADF\n\t}\n\n\t// Update the flags.\n\tf.set(fd, file, flags)\n\treturn nil\n}", "title": "" }, { "docid": "b224da5b8bc18b57e0f7450ff72dfc48", "score": "0.45244303", "text": "func (s *MessagesSetGameScoreRequest) SetFlags() {\n\tif !(s.EditMessage == false) {\n\t\ts.Flags.Set(0)\n\t}\n\tif !(s.Force == false) {\n\t\ts.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "378d9488fd0ec124f086eb6728d683c2", "score": "0.45160404", "text": "func (d *DocumentAttributeVideo) SetFlags() {\n\tif !(d.RoundMessage == false) {\n\t\td.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "819777c8e4060ed019762eec1cf3fedd", "score": "0.450864", "text": "func SetConfigFlags(flags byte) {\n\tjs.Global.Get(\"Module\").Call(\"_SetConfigFlags\", flags)\n}", "title": "" }, { "docid": "af89fc57e760fd58699dac93e7e358ab", "score": "0.44933754", "text": "func (app *App) SetFlags() {\n\n\tapp.Flags = []cli.Flag{\n\t\tcli.StringFlag{\n\t\t\tName: \"language, lang, l\",\n\t\t\tValue: \"english\",\n\t\t\tUsage: \"language for generator instructions (options: \\\"francais\\\", \\\"italiano\\\")\",\n\t\t\tDestination: &language,\n\t\t},\n\t}\n\n}", "title": "" }, { "docid": "cb056d3057b2967f6d1e1921c6fef249", "score": "0.4491592", "text": "func (m *FHDR) SetFromFlags(flags *pflag.FlagSet, prefix string) (paths []string, err error) {\n\tif val, changed, err := customflags.GetExactBytes(flags, flagsplugin.Prefix(\"dev_addr\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.DevAddr = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"dev_addr\", prefix))\n\t}\n\t// FIXME: Skipping FCtrl because it does not seem to implement AddSetFlags.\n\tif val, changed, err := flagsplugin.GetUint32(flags, flagsplugin.Prefix(\"f_cnt\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.FCnt = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"f_cnt\", prefix))\n\t}\n\tif val, changed, err := flagsplugin.GetBytes(flags, flagsplugin.Prefix(\"f_opts\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.FOpts = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"f_opts\", prefix))\n\t}\n\treturn paths, nil\n}", "title": "" }, { "docid": "8e3916f9530085735e6fcf6d700da0fe", "score": "0.44877034", "text": "func (m Mitigate) SetFlags(f *flag.FlagSet) {\n\tf.BoolVar(&m.dryRun, \"dryrun\", false, \"run the command without changing system\")\n\tf.BoolVar(&m.reverse, \"reverse\", false, \"reverse mitigate by enabling all CPUs\")\n\tm.other.setFlags(f)\n\tm.path = cpuInfo\n\tif m.reverse {\n\t\tm.path = allPossibleCPUs\n\t}\n}", "title": "" }, { "docid": "86c0302a4c4d9ebdcb0b7076e8450e96", "score": "0.4479087", "text": "func (ac *DBAdminConnector) SetServiceFlags(flags evergreen.ServiceFlags, u *user.DBUser) error {\n\toldSettings, err := evergreen.GetConfig()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = evergreen.SetServiceFlags(flags)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn event.LogServiceChanged(oldSettings.ServiceFlags, flags, u)\n}", "title": "" }, { "docid": "315d8bbf71149cc4a18aa7f0c91c816b", "score": "0.44719076", "text": "func SetFlags(cmd *cobra.Command) {\n\tlist.SetFlags(cmd)\n\tsetFlags(cmd)\n\thideFlags(cmd)\n}", "title": "" }, { "docid": "f533a7c1e18e0c80c893629e19cd3d64", "score": "0.44673958", "text": "func configureFlags(api *operations.SawtoothRESTAPI) {\n\t// api.CommandLineOptionsGroups = []swag.CommandLineOptionsGroup{ ... }\n}", "title": "" }, { "docid": "7f276c9d870358ade33bdb1d7c9f9b7a", "score": "0.44609356", "text": "func (d *DialogFilterChatlist) SetFlags() {\n\tif !(d.HasMyInvites == false) {\n\t\td.Flags.Set(26)\n\t}\n\tif !(d.Emoticon == \"\") {\n\t\td.Flags.Set(25)\n\t}\n}", "title": "" }, { "docid": "69531410e4f54ca1348493ef76a4ad34", "score": "0.44595605", "text": "func (p *MessagesProlongWebViewRequest) SetFlags() {\n\tif !(p.Silent == false) {\n\t\tp.Flags.Set(5)\n\t}\n\tif !(p.ReplyTo == nil) {\n\t\tp.Flags.Set(0)\n\t}\n\tif !(p.SendAs == nil) {\n\t\tp.Flags.Set(13)\n\t}\n}", "title": "" }, { "docid": "ce725ba69d560c7b1afbb03ab63dbcd4", "score": "0.44553426", "text": "func defineServerFlags(f *flag.FlagSet) {\n\t// short function to force successful secret reads\n\t//readSecret := func(s string) string {\n\t//\tdata, err := ioutil.ReadFile(s)\n\t//\tif err != nil {\n\t//\t\tlog.Fatalln(\"error reading secret at path:\", s, \"\\nsuppressing error output for security reasons.\")\n\t//\t}\n\t//\treturn string(data)\n\t//}\n\n\tf.StringVar(&serverURL, \"url\", \"https://ofx.chase.com\", \"Financial institution's OFX Server URL (see ofxhome.com if you don't know it)\")\n\t//f.StringVar(&clientUID, \"clientuid\", readSecret(\"/secrets/auth/clientuid.txt\"), \"Client UID (only required by a few FIs, like Chase)\")\n\t//f.StringVar(&username, \"username\", readSecret(\"/secrets/auth/username.txt\"), \"Your username at financial institution\")\n\t//f.StringVar(&password, \"password\", readSecret(\"/secrets/auth/password.txt\"), \"Your password at financial institution\")\n\tf.StringVar(&clientUID, \"clientuid\", os.Getenv(\"CHASE_CLIENTUID\"), \"Client UID (only required by a few FIs, like Chase)\")\n\tf.StringVar(&username, \"username\", os.Getenv(\"CHASE_USERNAME\"), \"Your username at financial institution\")\n\tf.StringVar(&password, \"password\", os.Getenv(\"CHASE_PASSWORD\"), \"Your password at financial institution\")\n\tf.StringVar(&org, \"org\", \"B1\", \"'ORG' for your financial institution\")\n\tf.StringVar(&fid, \"fid\", \"10898\", \"'FID' for your financial institution\")\n\tf.StringVar(&appID, \"appid\", \"QWIN\", \"'APPID' to pretend to be\")\n\tf.StringVar(&appVer, \"appver\", \"2700\", \"'APPVER' to pretend to be\")\n\tf.StringVar(&ofxVersion, \"ofxversion\", \"220\", \"OFX version to use\")\n\tf.BoolVar(&noIndentRequests, \"noindent\", false, \"Don't indent OFX requests\")\n}", "title": "" }, { "docid": "b0a1711630622c2cf1f29a9b5522496f", "score": "0.44515654", "text": "func (m *MessageActionTopicCreate) SetFlags() {\n\tif !(m.IconEmojiID == 0) {\n\t\tm.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "258ea6b6eb6b84d10265b0caa6ed2ac5", "score": "0.4447263", "text": "func (environment *Environment) UpdateFlags(idEnv, flags string) error {\n\tif err := environment.DB.Model(&TLSEnvironment{}).Where(\"name = ? OR uuid = ?\", idEnv, idEnv).Update(\"flags\", flags).Error; err != nil {\n\t\treturn fmt.Errorf(\"Update flags %v\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "7375335cc52af97571bc2af41c25c30e", "score": "0.4442007", "text": "func (p *PhoneConnectionWebrtc) SetFlags() {\n\tif !(p.Turn == false) {\n\t\tp.Flags.Set(0)\n\t}\n\tif !(p.Stun == false) {\n\t\tp.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "0f61b99813a1fe4da9cac677c47885bc", "score": "0.44385228", "text": "func (stk *StkAPI) SetSSLFlag(fname string) {\n\tif len(stk.sslFlagsEnabled) == 0 {\n\t\tstk.sslFlagsEnabled = make(map[string]bool)\n\t}\n\tstk.sslFlagsEnabled[fname] = true\n}", "title": "" }, { "docid": "a924e7e9d46ddc4904675536a0b6d8d2", "score": "0.44311595", "text": "func (s *Store) SetControlFlags(root string, flags core.DiskControlFlags) (err core.Error) {\n\t// Linear search is acceptable since this is very rare.\n\ts.lock.Lock()\n\tfor _, di := range s.disks[:] {\n\t\tif di.root == root {\n\t\t\ts.lock.Unlock()\n\t\t\treturn di.d.SetControlFlags(flags)\n\t\t}\n\t}\n\ts.lock.Unlock()\n\treturn core.ErrFileNotFound\n}", "title": "" }, { "docid": "42793eb1d78c29180d6325a09a1ff34e", "score": "0.44291523", "text": "func (t *TestCmd) SetFlags(f *flag.FlagSet) {\n\tf.BoolVar(&t.foo, \"foo\", false, \"foo the bar in the biz baz for the buzz\")\n}", "title": "" }, { "docid": "f616693bc989bd1851ec0a2da7481315", "score": "0.4427735", "text": "func (n *Notepad) SetFlags(flags int) {\n\tn.flags = flags\n\tn.init()\n}", "title": "" }, { "docid": "f616693bc989bd1851ec0a2da7481315", "score": "0.4427735", "text": "func (n *Notepad) SetFlags(flags int) {\n\tn.flags = flags\n\tn.init()\n}", "title": "" }, { "docid": "65bb0f93114821d175a0e3d6c5e2b723", "score": "0.44243822", "text": "func SetDebugFlags(flags string) {\n setFlags(&debugFlags, flags)\n if debugLogger != nil {\n if debugFlags == -1 {\n debugLogger.SetFlags(defaultFlags)\n } else {\n debugLogger.SetFlags(debugFlags)\n }\n }\n}", "title": "" }, { "docid": "8e91eb0b8e3e22a0b6082db1dcedb88e", "score": "0.44236484", "text": "func (s *MessagesSearchRequest) SetFlags() {\n\tif !(s.FromID == nil) {\n\t\ts.Flags.Set(0)\n\t}\n\tif !(s.TopMsgID == 0) {\n\t\ts.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "6794af85134a1a63dc68d4e5b18370da", "score": "0.441919", "text": "func (r *MessagesReadMentionsRequest) SetFlags() {\n\tif !(r.TopMsgID == 0) {\n\t\tr.Flags.Set(0)\n\t}\n}", "title": "" }, { "docid": "c538334d1e7e4eaa5b69c974f0c7f2b7", "score": "0.44186226", "text": "func (u *UpdatePeerBlocked) SetFlags() {\n\tif !(u.Blocked == false) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.BlockedMyStoriesFrom == false) {\n\t\tu.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "a5a462177cce10992119ed44991fd9ea", "score": "0.44144645", "text": "func (t *Command) SetFlags(f *flag.FlagSet) {\n\tf.StringVar(&t.configString, \"configs\", \"\", \"A comma separated list of built in configs to use\")\n\tf.StringVar(&t.customConfig, \"custom-config\", \"\", \"Custom config file for osde2e\")\n}", "title": "" }, { "docid": "e0454783eb7dc53c8b3e1927f99556d6", "score": "0.44086534", "text": "func (m *JoinRequestPayload) SetFromFlags(flags *pflag.FlagSet, prefix string) (paths []string, err error) {\n\tif val, changed, err := customflags.GetExactBytes(flags, flagsplugin.Prefix(\"join_eui\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.JoinEui = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"join_eui\", prefix))\n\t}\n\tif val, changed, err := customflags.GetExactBytes(flags, flagsplugin.Prefix(\"dev_eui\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.DevEui = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"dev_eui\", prefix))\n\t}\n\tif val, changed, err := customflags.GetExactBytes(flags, flagsplugin.Prefix(\"dev_nonce\", prefix)); err != nil {\n\t\treturn nil, err\n\t} else if changed {\n\t\tm.DevNonce = val\n\t\tpaths = append(paths, flagsplugin.Prefix(\"dev_nonce\", prefix))\n\t}\n\treturn paths, nil\n}", "title": "" }, { "docid": "e5b6a96215ec1dc1e9c857f5ad9f0459", "score": "0.44064412", "text": "func (m *MessageActionPhoneCall) SetFlags() {\n\tif !(m.Video == false) {\n\t\tm.Flags.Set(2)\n\t}\n\tif !(m.Reason == nil) {\n\t\tm.Flags.Set(0)\n\t}\n\tif !(m.Duration == 0) {\n\t\tm.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "cde24facb65cace6c860029a07edb65a", "score": "0.44061318", "text": "func SetSharedFlags(cmd *base.Command) {\n\tsetSharedFlags(cmd)\n}", "title": "" }, { "docid": "bff9e0d64a1217d83a82e3e3a3e48309", "score": "0.44042358", "text": "func (u *UpdateMoveStickerSetToTop) SetFlags() {\n\tif !(u.Masks == false) {\n\t\tu.Flags.Set(0)\n\t}\n\tif !(u.Emojis == false) {\n\t\tu.Flags.Set(1)\n\t}\n}", "title": "" }, { "docid": "4ea2786b4771717b4b5b6e20444e0d7c", "score": "0.43937862", "text": "func (setup *ViperSetup) SetupFlags(\n\tcmd *cobra.Command,\n\tflagsConfig *SetupFlagsConfig,\n) {\n\tfs := flag.NewFlagSet(\"log\", flag.ExitOnError)\n\tklog.InitFlags(fs)\n\n\tverboseFlag := fs.Lookup(\"v\")\n\tverboseFlag.Name = \"vll\" // Otherwise conflicts with completion.\n\n\tcmd.PersistentFlags().AddGoFlag(verboseFlag)\n\tcmd.PersistentFlags().AddGoFlag(fs.Lookup(\"logtostderr\"))\n\tcmd.PersistentFlags().AddGoFlag(fs.Lookup(\"log_file\"))\n\tcmd.PersistentFlags().AddGoFlag(fs.Lookup(\"log_dir\"))\n\tcmd.PersistentFlags().AddGoFlag(fs.Lookup(\"alsologtostderr\"))\n\tcmd.PersistentFlags().AddGoFlag(fs.Lookup(\"stderrthreshold\"))\n\n\tif !flagsConfig.DisableWriteConfigFlag {\n\t\tsetup.writeConfigFlag = cmd.Flags().BoolP(\n\t\t\t\"write-given-settings\", \"w\", false,\n\t\t\t\"writes all given persistent flags to the config file and exits the program\",\n\t\t)\n\t}\n}", "title": "" } ]
a51992f0673061ef1bcd23d23c0c8f29
IsDir always returns true
[ { "docid": "9143d54516c99e11a0291aa0f0fb9950", "score": "0.871398", "text": "func (f Folder) IsDir() bool { return true }", "title": "" } ]
[ { "docid": "cdd67b11ae7cda659a77eb46e990ef52", "score": "0.8780864", "text": "func (*Root) IsDir() bool { return true }", "title": "" }, { "docid": "f8d018f28d028bcfa935a050a628e86c", "score": "0.8659028", "text": "func (f File) IsDir() bool { return false }", "title": "" }, { "docid": "6946e43d9aa81f1f14edb28147153b69", "score": "0.8613337", "text": "func (fif *impl) IsDir() bool { return fif.isDir }", "title": "" }, { "docid": "217227ec85663d2595b26d7179116cc9", "score": "0.84352994", "text": "func (fi *fileInfo) IsDir() bool { return fi.Mode().IsDir() }", "title": "" }, { "docid": "b9a00e073627e51eb70ff63eb64fc21f", "score": "0.8419024", "text": "func (info Info) IsDir() bool { return len(info.Files) != 0 }", "title": "" }, { "docid": "01ac4068d4417bbcc7e7d53e683df7ca", "score": "0.8380797", "text": "func (o *Object) IsDir() bool {\n return false\n}", "title": "" }, { "docid": "3537ee0863072381efefb17d3315c053", "score": "0.8260801", "text": "func (a *Attr) IsDir() bool { return (uint32(a.Mode) & syscall.S_IFMT) == syscall.S_IFDIR }", "title": "" }, { "docid": "666f8c2dfc8b38ffa3dc313e175c24cb", "score": "0.82414806", "text": "func (me *Node) IsDir() bool { return me.Mode&ModeDir != 0 }", "title": "" }, { "docid": "ea2db84c1549efd1c8fd03f001de69c4", "score": "0.8144315", "text": "func (n Node) IsDir() bool { return len(n.Spec.Name) == 0 || n.Spec.Name[len(n.Spec.Name)-1] == '/' }", "title": "" }, { "docid": "8bf4bc9c13a734574fdbe7be08e790b3", "score": "0.8110513", "text": "func (m Rgetattr) IsDir() bool {\n\treturn m.Mode&unix.S_IFDIR != 0\n}", "title": "" }, { "docid": "9f1998a63b0fc2ebf1620091804b5505", "score": "0.80484843", "text": "func (l *localLister) IsDir() bool {\n\treturn l.info.IsDir()\n}", "title": "" }, { "docid": "ca5386f99c38cecf13c6d3b1fb17d4ce", "score": "0.80436647", "text": "func (m FileMode) IsDir() bool {\n\treturn m&ModeDir != 0\n}", "title": "" }, { "docid": "40c3030539afb6ff57a3c451a68fa951", "score": "0.8038688", "text": "func (m MockFileInfo) IsDir() bool { return m.isDir }", "title": "" }, { "docid": "739574fbe801364f14173cc71ed33f73", "score": "0.8003335", "text": "func (df DummyFile) IsDir() bool {\n\treturn df.IsDirectory\n}", "title": "" }, { "docid": "ce4aaf7ec740c6cb44741f08e2e07a9c", "score": "0.8003226", "text": "func isDir(name string) bool {\n\tf, err := os.Open(name)\n\tif err != nil {\n\t\treturn false\n\t}\n\tdefer f.Close()\n\n\td, err := f.Stat()\n\tif err != nil {\n\t\treturn false\n\t}\n\n\treturn d.IsDir()\n}", "title": "" }, { "docid": "629ccf82fd5f8cf4c8f177f534057182", "score": "0.7981487", "text": "func (a *archive) IsDir() bool {\n\treturn false\n}", "title": "" }, { "docid": "2ba134b046e06040139cad7019702d15", "score": "0.7968049", "text": "func (fi bindataFileInfo) IsDir() bool {\r\n\treturn fi.mode&os.ModeDir != 0\r\n}", "title": "" }, { "docid": "4167e24256852161003db437a65bffef", "score": "0.79341245", "text": "func (f Folder) IsDir() bool {\n\treturn true\n}", "title": "" }, { "docid": "b9821e5d8ea0a8fec60aa4478363df4f", "score": "0.7923141", "text": "func isDir(name string) bool {\n\tinfo, err := fileStat(name)\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn info.IsDir()\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" }, { "docid": "8976a7eae129cd2c534a4e730c9fa72b", "score": "0.78941107", "text": "func (fi bindataFileInfo) IsDir() bool {\n\treturn fi.mode&os.ModeDir != 0\n}", "title": "" } ]
329429c60e819c0cf59c6d9ec171fdc8
NewGetAppDetailsParams creates a new GetAppDetailsParams object with the default values initialized.
[ { "docid": "ccaede17be38fbbd1244012d5305ad0e", "score": "0.8701661", "text": "func NewGetAppDetailsParams() *GetAppDetailsParams {\n\tvar ()\n\treturn &GetAppDetailsParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" } ]
[ { "docid": "c43a1bf5e82f49fff8a0122414e1ac03", "score": "0.7124225", "text": "func NewGetAppDetailsParamsWithTimeout(timeout time.Duration) *GetAppDetailsParams {\n\tvar ()\n\treturn &GetAppDetailsParams{\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "c43a7a892686c4c8214a32045c5ee222", "score": "0.6280435", "text": "func NewGetAppMessagesParams() *GetAppMessagesParams {\n\tvar (\n\t\tlimitDefault = int64(100)\n\t)\n\treturn &GetAppMessagesParams{\n\t\tLimit: &limitDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "8209205470bfe9e8609da893591959e5", "score": "0.6220304", "text": "func NewGetAppsParams() *GetAppsParams {\n\tvar (\n\t\tlimitDefault = int64(20)\n\t\tsinceDefault = int64(0)\n\t)\n\treturn &GetAppsParams{\n\t\tLimit: &limitDefault,\n\t\tSince: &sinceDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "5fd5f00337576debe6455b7c276e6952", "score": "0.61818135", "text": "func NewAppDetailRequestWithAllParams(\n regionId string,\n clientId string,\n) *AppDetailRequest {\n\n return &AppDetailRequest{\n JDCloudRequest: core.JDCloudRequest{\n URL: \"/regions/{regionId}/operate_backend/app/{clientId}\",\n Method: \"GET\",\n Header: nil,\n Version: \"v1\",\n },\n RegionId: regionId,\n ClientId: clientId,\n }\n}", "title": "" }, { "docid": "c8086e0ec02d8d9eebbabc6ea474a64b", "score": "0.6169746", "text": "func NewAppDetailRequestWithoutParam() *AppDetailRequest {\n\n return &AppDetailRequest{\n JDCloudRequest: core.JDCloudRequest{\n URL: \"/regions/{regionId}/operate_backend/app/{clientId}\",\n Method: \"GET\",\n Header: nil,\n Version: \"v1\",\n },\n }\n}", "title": "" }, { "docid": "674f6914c102bc9a0cf7dc6b611a9f25", "score": "0.6077882", "text": "func NewAppDetailsRequest(server string, namespace string, app string) (*http.Request, error) {\n\tvar err error\n\n\tvar pathParam0 string\n\n\tpathParam0, err = runtime.StyleParam(\"simple\", false, \"namespace\", namespace)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar pathParam1 string\n\n\tpathParam1, err = runtime.StyleParam(\"simple\", false, \"app\", app)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tqueryUrl, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbasePath := fmt.Sprintf(\"/namespaces/%s/apps/%s\", pathParam0, pathParam1)\n\tif basePath[0] == '/' {\n\t\tbasePath = basePath[1:]\n\t}\n\n\tqueryUrl, err = queryUrl.Parse(basePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(\"GET\", queryUrl.String(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn req, nil\n}", "title": "" }, { "docid": "03013ce85f619fa31a5ba3d9f30235da", "score": "0.6039247", "text": "func NewGetAppLogsParams() *GetAppLogsParams {\n\tvar (\n\t\tfollowDefault = bool(false)\n\t\tlinesDefault = int64(10)\n\t)\n\treturn &GetAppLogsParams{\n\t\tFollow: &followDefault,\n\t\tLines: &linesDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "3c598994ad55de258b2ba9458a207a5c", "score": "0.5761609", "text": "func NewGetMessageParams() *GetMessageParams {\n\tvar (\n\t\taPIVersionDefault = string(\"1.0\")\n\t\tlimitDefault = int64(10)\n\t\toffsetDefault = int64(0)\n\t)\n\treturn &GetMessageParams{\n\t\tAPIVersion: &aPIVersionDefault,\n\t\tLimit: &limitDefault,\n\t\tOffset: &offsetDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "a0ddd8aec48aa619c22900680e4e149a", "score": "0.5723445", "text": "func newGetCookingParams() *spoonacular.GetCookingParams {\n\treturn &spoonacular.GetCookingParams{\n\t\tFoodName: \"\",\n\t\tDietTypes: \"\",\n\t\tOffset: \"0\",\n\t}\n}", "title": "" }, { "docid": "e63cea6e8784d145d60ceebcf34054dc", "score": "0.5645868", "text": "func NewGetApplicationsParams() *GetApplicationsParams {\n\tvar (\n\t\tcountDefault = int32(10)\n\t\tindexDefault = int32(0)\n\t)\n\treturn &GetApplicationsParams{\n\t\tCount: &countDefault,\n\t\tIndex: &indexDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "5b52846a2428bd393b5354ff093b00c8", "score": "0.564311", "text": "func NewGetPageParams() *GetPageParams {\n\tvar ()\n\treturn &GetPageParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "b1e466e5140338196e2f178d98271378", "score": "0.5642152", "text": "func NewGetParams() *GetParams {\n\tvar ()\n\treturn &GetParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "bdea6783e75aab7fae844c1a2700c725", "score": "0.5586222", "text": "func NewApplicationCollectionGetParams() *ApplicationCollectionGetParams {\n\treturn &ApplicationCollectionGetParams{\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "b19c1b6a5bd9d68237922d5e02ec914b", "score": "0.556808", "text": "func (o *GetAppDetailsParams) WithAppID(appID int64) *GetAppDetailsParams {\n\to.AppID = appID\n\treturn o\n}", "title": "" }, { "docid": "102349dc7813be339d9eee239227916f", "score": "0.550992", "text": "func NewCustomerGatewayGetApplicationParams() *CustomerGatewayGetApplicationParams {\n\tvar ()\n\treturn &CustomerGatewayGetApplicationParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "e142c1ef0024f5bc820433b3d4a5c29c", "score": "0.55043656", "text": "func NewGetAppsParamsWithTimeout(timeout time.Duration) *GetAppsParams {\n\tvar (\n\t\tlimitDefault int64 = int64(20)\n\t\tsinceDefault int64 = int64(0)\n\t)\n\treturn &GetAppsParams{\n\t\tLimit: &limitDefault,\n\t\tSince: &sinceDefault,\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "7ce0ca675b56d211d3a568f67c657695", "score": "0.5437053", "text": "func NewInspectParams() *InspectParams {\n\tvar ()\n\treturn &InspectParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "82ef17f41bb1d22ca62fa698263fb50e", "score": "0.5435679", "text": "func (*CodeEngineV2) NewGetAppOptions(projectID string, name string) *GetAppOptions {\n\treturn &GetAppOptions{\n\t\tProjectID: core.StringPtr(projectID),\n\t\tName: core.StringPtr(name),\n\t}\n}", "title": "" }, { "docid": "3fa793d6ec6fa8da343a66cd64541eac", "score": "0.53918564", "text": "func NewGetLatestParams() *GetLatestParams {\n\tvar ()\n\treturn &GetLatestParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "84abe5915200559a2ec340a441d80a9e", "score": "0.53847075", "text": "func NewParams(ctx *cli.Context) *Params {\n\tc := &Params{}\n\n\tc.Name = ctx.App.Name\n\tc.Copyright = ctx.App.Copyright\n\tc.Version = ctx.App.Version\n\tc.ConfigFile = fs.Abs(ctx.GlobalString(\"config-file\"))\n\n\tif err := c.Load(c.ConfigFile); err != nil {\n\t\tlog.Debug(err)\n\t}\n\n\tif err := c.SetContext(ctx); err != nil {\n\t\tlog.Error(err)\n\t}\n\n\treturn c\n}", "title": "" }, { "docid": "24722919cdcf9712c5dde1ec32cd4e0c", "score": "0.5364134", "text": "func NewGetAppMessagesParamsWithTimeout(timeout time.Duration) *GetAppMessagesParams {\n\tvar (\n\t\tlimitDefault = int64(100)\n\t)\n\treturn &GetAppMessagesParams{\n\t\tLimit: &limitDefault,\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "a161353fef50dd958486276163366226", "score": "0.53544855", "text": "func NewGetPolicyParams() *GetPolicyParams {\n\tvar ()\n\treturn &GetPolicyParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "64c64f2fe52d0ec8a5b0e535949786ec", "score": "0.53244036", "text": "func NewAppDetailRequest(\n regionId string,\n clientId string,\n) *AppDetailRequest {\n\n\treturn &AppDetailRequest{\n JDCloudRequest: core.JDCloudRequest{\n\t\t\tURL: \"/regions/{regionId}/operate_backend/app/{clientId}\",\n\t\t\tMethod: \"GET\",\n\t\t\tHeader: nil,\n\t\t\tVersion: \"v1\",\n\t\t},\n RegionId: regionId,\n ClientId: clientId,\n\t}\n}", "title": "" }, { "docid": "4b23d95a8600a8969ff03507acdda767", "score": "0.524975", "text": "func NewCabwiseGetParams() *CabwiseGetParams {\n\tvar ()\n\treturn &CabwiseGetParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "40c5de1858762ebef8298ef0768ac36c", "score": "0.5243538", "text": "func NewGetApplicationKeyOptionalParameters() *GetApplicationKeyOptionalParameters {\n\tthis := GetApplicationKeyOptionalParameters{}\n\treturn &this\n}", "title": "" }, { "docid": "f21ae2627e138fbb2c80f7bc02552fe4", "score": "0.5236096", "text": "func NewGetAppTagsParams() *GetAppTagsParams {\n\tvar ()\n\treturn &GetAppTagsParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "3d9077595462aae88241efe12c058d17", "score": "0.5166416", "text": "func NewGetMessageParamsWithTimeout(timeout time.Duration) *GetMessageParams {\n\tvar (\n\t\taPIVersionDefault = string(\"1.0\")\n\t\tlimitDefault = int64(10)\n\t\toffsetDefault = int64(0)\n\t)\n\treturn &GetMessageParams{\n\t\tAPIVersion: &aPIVersionDefault,\n\t\tLimit: &limitDefault,\n\t\tOffset: &offsetDefault,\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "3789b7e1a952b8c57911d2fddc7b8137", "score": "0.5160901", "text": "func NewGetAppLogsParamsWithTimeout(timeout time.Duration) *GetAppLogsParams {\n\tvar (\n\t\tfollowDefault bool = bool(false)\n\t\tlinesDefault int64 = int64(10)\n\t)\n\treturn &GetAppLogsParams{\n\t\tFollow: &followDefault,\n\t\tLines: &linesDefault,\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "bfa3137f777efda00a091c99f01ed1d5", "score": "0.5138588", "text": "func NewGetApplicationsParamsWithTimeout(timeout time.Duration) *GetApplicationsParams {\n\tvar (\n\t\tcountDefault = int32(10)\n\t\tindexDefault = int32(0)\n\t)\n\treturn &GetApplicationsParams{\n\t\tCount: &countDefault,\n\t\tIndex: &indexDefault,\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "361106bc98a315d9cd9d230f36c5eafd", "score": "0.512505", "text": "func AppDetails(w http.ResponseWriter, r *http.Request) {\n\tparams := mux.Vars(r)\n\t// Get business layer\n\tbusiness, err := getBusiness(r)\n\tif err != nil {\n\t\tRespondWithError(w, http.StatusInternalServerError, \"Services initialization error: \"+err.Error())\n\t\treturn\n\t}\n\tnamespace := params[\"namespace\"]\n\tapp := params[\"app\"]\n\n\t// Fetch and build app\n\tappDetails, err := business.App.GetApp(namespace, app)\n\tif err != nil {\n\t\thandleErrorResponse(w, err)\n\t\treturn\n\t}\n\n\tRespondWithJSON(w, http.StatusOK, appDetails)\n}", "title": "" }, { "docid": "48c231a18044c3cf7cfb7efd623be06e", "score": "0.5100899", "text": "func NewGetMetricsParams() GetMetricsParams {\n\tvar (\n\t\tfollowDefault = bool(false)\n\t\tsinceTimeDefault = string(\"\")\n\t\tversionDefault = string(\"2017-06-07\")\n\t)\n\treturn GetMetricsParams{\n\t\tFollow: &followDefault,\n\n\t\tSinceTime: &sinceTimeDefault,\n\n\t\tVersion: versionDefault,\n\t}\n}", "title": "" }, { "docid": "7695b92a89d399f02c23a0e624f5ece9", "score": "0.5094305", "text": "func NewParams(c echo.Context) Params {\n\treturn NewEchoParams(c)\n}", "title": "" }, { "docid": "41aa95bb5b0588d2e5a847cfbbb94672", "score": "0.5065219", "text": "func NewDescribeConfigRequestWithoutParam() *DescribeConfigRequest {\n\n return &DescribeConfigRequest{\n JDCloudRequest: core.JDCloudRequest{\n URL: \"/regions/{regionId}/instances/{instanceId}/hardwareId/{hardwareId}/os/{osId}/edgeApp:describeConfig\",\n Method: \"POST\",\n Header: nil,\n Version: \"v2\",\n },\n }\n}", "title": "" }, { "docid": "47eab4517e2caa20b5df5c8b7bba2cec", "score": "0.5063559", "text": "func NewGetAppMessagesParamsWithHTTPClient(client *http.Client) *GetAppMessagesParams {\n\tvar (\n\t\tlimitDefault = int64(100)\n\t)\n\treturn &GetAppMessagesParams{\n\t\tLimit: &limitDefault,\n\t\tHTTPClient: client,\n\t}\n}", "title": "" }, { "docid": "cdedd99c0d2816c4e8838c22a7346d2f", "score": "0.5054137", "text": "func NewGetUserParams() *GetUserParams {\n\tvar ()\n\treturn &GetUserParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "3ca4840a40cede989e6a2ffcc27a419b", "score": "0.5048104", "text": "func NewGetMessagesParams() GetMessagesParams {\n\n\tvar (\n\t\t// initialize parameters with default values\n\n\t\tlimitDefault = int64(100)\n\t\toffsetDefault = int64(0)\n\t)\n\n\treturn GetMessagesParams{\n\t\tLimit: &limitDefault,\n\n\t\tOffset: &offsetDefault,\n\t}\n}", "title": "" }, { "docid": "13f56e2232112d03f472fc1433667853", "score": "0.5044294", "text": "func NewGetPackageParams() *GetPackageParams {\n\tvar ()\n\treturn &GetPackageParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "878b0105057220387e7a49e645f5bd2f", "score": "0.50333655", "text": "func NewParams(active bool, rewards Rewards) Params {\n\treturn Params{\n\t\tActive: active,\n\t\tRewards: rewards,\n\t}\n}", "title": "" }, { "docid": "061474920a28fe4ac46b3ca630d05c14", "score": "0.50296056", "text": "func NewCustomerGatewayGetApplicationParamsWithTimeout(timeout time.Duration) *CustomerGatewayGetApplicationParams {\n\tvar ()\n\treturn &CustomerGatewayGetApplicationParams{\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "25d65c97b20548590e3d50755f4dc143", "score": "0.50262547", "text": "func NewGetConfigParams() GetConfigParams {\n\n\treturn GetConfigParams{}\n}", "title": "" }, { "docid": "bb93b71194c3b73b3b6351936a180157", "score": "0.5009604", "text": "func NewUpdateAppParams() *UpdateAppParams {\n\tvar ()\n\treturn &UpdateAppParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "ec7d45686f259cf237fd20c5c44390eb", "score": "0.49958286", "text": "func NewGetCryptokeyParams() *GetCryptokeyParams {\n\tvar ()\n\treturn &GetCryptokeyParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "95c20fae814d8532ce1e635db8aa5845", "score": "0.49873564", "text": "func NewGetMemoryDumpParams() *GetMemoryDumpParams {\n\treturn &GetMemoryDumpParams{\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "e7c1d8baa0b8bc49d442dddade549d84", "score": "0.4983622", "text": "func NewGetProductModelsParams() *GetProductModelsParams {\n\tvar (\n\t\tlimitDefault = int64(10)\n\t\tpageDefault = int64(1)\n\t\tpaginationTypeDefault = string(\"page\")\n\t\tsearchAfterDefault = string(\"cursor to the first page\")\n\t\twithCountDefault = bool(false)\n\t)\n\treturn &GetProductModelsParams{\n\t\tLimit: &limitDefault,\n\t\tPage: &pageDefault,\n\t\tPaginationType: &paginationTypeDefault,\n\t\tSearchAfter: &searchAfterDefault,\n\t\tWithCount: &withCountDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "e636164ba8f003bfa2fcc6e3efeb33dc", "score": "0.49833566", "text": "func (s GatewayRouter_openApiSession_Params) NewParams() (apisession.ApiSession_Params, error) {\n\tss, err := apisession.NewApiSession_Params(capnp.Struct(s).Segment())\n\tif err != nil {\n\t\treturn apisession.ApiSession_Params{}, err\n\t}\n\terr = capnp.Struct(s).SetPtr(1, capnp.Struct(ss).ToPtr())\n\treturn ss, err\n}", "title": "" }, { "docid": "457bd81f607183e29bf4e4fa96757954", "score": "0.4966786", "text": "func (co *PlatformAppConfiguration) GetAppBasicDetails() (ApplicationDetail, error) {\n var (\n rawRequest *RawRequest\n response []byte\n err error\n getAppBasicDetailsResponse ApplicationDetail\n\t )\n\n \n\n \n\n \n \n \n \n \n //API call\n rawRequest = NewRequest(\n co.config,\n \"get\",\n fmt.Sprintf(\"/service/platform/configuration/v1.0/company/%s/application/%s/detail\",co.CompanyID, co.ApplicationID),\n nil,\n nil,\n nil)\n response, err = rawRequest.Execute()\n if err != nil {\n return ApplicationDetail{}, err\n\t }\n \n err = json.Unmarshal(response, &getAppBasicDetailsResponse)\n if err != nil {\n return ApplicationDetail{}, common.NewFDKError(err.Error())\n }\n return getAppBasicDetailsResponse, nil\n \n }", "title": "" }, { "docid": "f2f7daafc3ba0944fe6e257d3b578e59", "score": "0.49605104", "text": "func NewGetCpeParams() *GetCpeParams {\n\tvar ()\n\treturn &GetCpeParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "4092c47c3f429e6d6954d855d2f5d103", "score": "0.49526504", "text": "func NewGetSupplierParams() *GetSupplierParams {\n\tvar (\n\t\tcountDefault = int64(1000)\n\t\tfromDefault = int64(0)\n\t\tisInactiveDefault = bool(false)\n\t)\n\treturn &GetSupplierParams{\n\t\tCount: &countDefault,\n\t\tFrom: &fromDefault,\n\t\tIsInactive: &isInactiveDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "59d403b51466083c26b90a9b7b0b5282", "score": "0.49428478", "text": "func NewParams(client *maps.Client) Params {\n\treturn Params{client: client}\n}", "title": "" }, { "docid": "b79bdfbdf0f97d22482093096acdc2bd", "score": "0.49385786", "text": "func NewParams() Params {\n\treturn Params{}\n}", "title": "" }, { "docid": "b79bdfbdf0f97d22482093096acdc2bd", "score": "0.49385786", "text": "func NewParams() Params {\n\treturn Params{}\n}", "title": "" }, { "docid": "aa4641c555257027b4b7e268eb624848", "score": "0.49334395", "text": "func NewGetLatestParamsWithTimeout(timeout time.Duration) *GetLatestParams {\n\tvar ()\n\treturn &GetLatestParams{\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "59e94f532e48e7a85885c24aa3cbaf9e", "score": "0.49196893", "text": "func NewParams(ap AssetParams,\n) Params {\n\treturn Params{\n\t\tAssetParams: ap,\n\t}\n}", "title": "" }, { "docid": "cb84a178bd69faed7f59c26238a8f92b", "score": "0.49180022", "text": "func NewGetProgramsParams() *GetProgramsParams {\n\tvar ()\n\treturn &GetProgramsParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "ddfec2e3ec731bdb86c73c76d2895b74", "score": "0.4909843", "text": "func NewGetDbConfigParams() *GetDbConfigParams {\n\tvar ()\n\treturn &GetDbConfigParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "2e44f9f7afd22270932e2c0a4c767867", "score": "0.4908754", "text": "func NewGetPracticesParams() *GetPracticesParams {\n\treturn &GetPracticesParams{\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "7ff50133cc8bf0bba9a47dd7a893d43f", "score": "0.48946208", "text": "func NewGetCredentialsParams() *GetCredentialsParams {\n\tvar ()\n\treturn &GetCredentialsParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "49b0f9e11939aede684df916c092f5e3", "score": "0.4894544", "text": "func NewWeaviateThingsGetParams() *WeaviateThingsGetParams {\n\tvar ()\n\treturn &WeaviateThingsGetParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "e86e11704c977ada05ccf78c4622753b", "score": "0.4892984", "text": "func NewGetPageParamsWithTimeout(timeout time.Duration) *GetPageParams {\n\tvar ()\n\treturn &GetPageParams{\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "c7940340257ce823c42d4234bcc7e565", "score": "0.4884088", "text": "func (p CreateTradeParam) ExtraParams() map[string]string {\n\treturn map[string]string{\n\t\t\"app_auth_token\": p.AppAuthToken,\n\t\t\"notify_url\": p.NotifyURL,\n\t}\n}", "title": "" }, { "docid": "97889556aa3cd557b9f42275b24fae48", "score": "0.48832375", "text": "func (s WebSession_get_Params) NewContext() (WebSession_Context, error) {\n\tss, err := NewWebSession_Context(s.Struct.Segment())\n\tif err != nil {\n\t\treturn WebSession_Context{}, err\n\t}\n\terr = s.Struct.SetPtr(1, ss.Struct.ToPtr())\n\treturn ss, err\n}", "title": "" }, { "docid": "750e5a6c5d068d9fa39cf3c653c5e6e8", "score": "0.48814955", "text": "func NewGetIPIPTaskParams() *GetIPIPTaskParams {\n\tvar ()\n\treturn &GetIPIPTaskParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "7be2493fbe2bc12d7add56fc2d96a8f4", "score": "0.48783606", "text": "func NewGetFlowParams() *GetFlowParams {\n\treturn &GetFlowParams{\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "44ad1748e2cd211632e53ce904ff4514", "score": "0.48734224", "text": "func NewGetDevicesParams() *GetDevicesParams {\n\tvar (\n\t\tcapabilitiesModeDefault = string(\"and\")\n\t)\n\treturn &GetDevicesParams{\n\t\tCapabilitiesMode: &capabilitiesModeDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "5032ff69037ff0d97c7fbf09444ee476", "score": "0.48670328", "text": "func NewPostServiceDetailsParams() *PostServiceDetailsParams {\n\tvar ()\n\treturn &PostServiceDetailsParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "6d8a71e65b8aac0cd638a837f89fb2a4", "score": "0.48496664", "text": "func NewImageGetParams() *ImageGetParams {\n\tvar ()\n\treturn &ImageGetParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "09a180dad4d6cc1673d89cd41ca4e087", "score": "0.48463127", "text": "func (s GatewayRouter_openUiSession_Params) NewParams() (websession.WebSession_Params, error) {\n\tss, err := websession.NewWebSession_Params(capnp.Struct(s).Segment())\n\tif err != nil {\n\t\treturn websession.WebSession_Params{}, err\n\t}\n\terr = capnp.Struct(s).SetPtr(1, capnp.Struct(ss).ToPtr())\n\treturn ss, err\n}", "title": "" }, { "docid": "4e0759787f2ee0f9576348e183590d7a", "score": "0.48458937", "text": "func NewGetEventsParams() GetEventsParams {\n\n\treturn GetEventsParams{}\n}", "title": "" }, { "docid": "e3aeb25b8ad2a0fdb99d04f14d056f8f", "score": "0.48303872", "text": "func NewApplicationCollectionGetParamsWithTimeout(timeout time.Duration) *ApplicationCollectionGetParams {\n\treturn &ApplicationCollectionGetParams{\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "b2397f5f229ce6f7b12bbdbe249782e9", "score": "0.48289707", "text": "func (s GatewayRouter_openUiSession_Params) NewParams() (websession.WebSession_Params, error) {\n\tss, err := websession.NewWebSession_Params(s.Struct.Segment())\n\tif err != nil {\n\t\treturn websession.WebSession_Params{}, err\n\t}\n\terr = s.Struct.SetPtr(1, ss.Struct.ToPtr())\n\treturn ss, err\n}", "title": "" }, { "docid": "d93762d792f9a766a6c0b9a46f0e5efd", "score": "0.48258066", "text": "func NewParams() (Params, error) {\n\treturn Params{\n\t\tBaseParams: codec.BaseParams{\n\t\t\tKeyFrameInterval: 60,\n\n\t\t},\n\t\tH264Profile: VAProfileH264ConstrainedBaseline,\n\t\tRateControlMode: RateControlCBR,\n\t}, nil\n}", "title": "" }, { "docid": "13305403e83f7ef25c79e6a6bb2de643", "score": "0.48121804", "text": "func NewGetParamsWithTimeout(timeout time.Duration) *GetParams {\n\tvar ()\n\treturn &GetParams{\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "9e0858c551f507fcaa360b52006488cc", "score": "0.48117548", "text": "func NewCabwiseGetParamsWithTimeout(timeout time.Duration) *CabwiseGetParams {\n\tvar ()\n\treturn &CabwiseGetParams{\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "5550752915b27cfb6bd6bfb5b6be277e", "score": "0.4810251", "text": "func newBasicParams(user, pass string) *authParams {\n\tp := &authParams{\n\t\tGrantType: \"password\",\n\t\tUsername: user,\n\t\tPassword: pass,\n\t\tVersion: \"5.40\",\n\t}\n\treturn p\n}", "title": "" }, { "docid": "1295dbf8f3ce71cd126773e6d9acd140", "score": "0.4803974", "text": "func NewAppsRequestWithoutParam() *AppsRequest {\n\n return &AppsRequest{\n JDCloudRequest: core.JDCloudRequest{\n URL: \"/regions/{regionId}/operate_backend/apps\",\n Method: \"GET\",\n Header: nil,\n Version: \"v1\",\n },\n }\n}", "title": "" }, { "docid": "e5fa1162fde7619d0577facc22f8c3df", "score": "0.47982973", "text": "func NewGetMeParams() GetMeParams {\n\n\treturn GetMeParams{}\n}", "title": "" }, { "docid": "283c7b0f97b45eddb02af176c615b474", "score": "0.4779955", "text": "func NewGetLocationParams() *GetLocationParams {\n\tvar ()\n\treturn &GetLocationParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "8e638a875a8e42bc60a3af4ed67d446d", "score": "0.47785404", "text": "func GetPagingDetailsFromContext(c echo.Context) *PageDetails {\n\tpageRequest := new(PageDetails)\n\tpSize, err := strconv.Atoi(c.QueryParam(\"pageSize\"))\n\tif err != nil {\n\t\tpSize = 0\n\t}\n\tpNum, err := strconv.Atoi(c.QueryParam(\"pageNumber\"))\n\tif err != nil {\n\t\tpNum = 0\n\t}\n\tpageRequest.PageSize = pSize\n\tpageRequest.PageNumber = pNum\n\treturn pageRequest\n}", "title": "" }, { "docid": "c50da8da6ea870da455085de9a23d1df", "score": "0.47730651", "text": "func NewGetTestRunParams() *GetTestRunParams {\n\tvar ()\n\treturn &GetTestRunParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "114d0216a25822bf48ba8cf045da1478", "score": "0.4763247", "text": "func NewGetAppsIDParams() *GetAppsIDParams {\n\tvar ()\n\treturn &GetAppsIDParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "a7a2ee4a27dfe06af106e2c02db56da2", "score": "0.47609028", "text": "func NewEchoParams(c echo.Context) Params {\n\treturn &echoParams{c: c}\n}", "title": "" }, { "docid": "788da135959cd127d782de33a5001f20", "score": "0.47553673", "text": "func NewGetEventTypeParams() *GetEventTypeParams {\n\tvar ()\n\treturn &GetEventTypeParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "43c3a7fd75df3d689b61af3f46c157f1", "score": "0.4748075", "text": "func NewGetDevicesParams() *GetDevicesParams {\n\treturn &GetDevicesParams{\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "0f820c90b56568aab9281cdabebb8496", "score": "0.47469732", "text": "func NewApiApplication()(*ApiApplication) {\n m := &ApiApplication{\n }\n m.backingStore = ie8677ce2c7e1b4c22e9c3827ecd078d41185424dd9eeb92b7d971ed2d49a392e.BackingStoreFactoryInstance();\n m.SetAdditionalData(make(map[string]any))\n return m\n}", "title": "" }, { "docid": "8f965e89a482a786759d3fcb85511fee", "score": "0.4743712", "text": "func NewParams(evmDenom string) Params {\n\treturn Params{\n\t\tEvmDenom: evmDenom,\n\t}\n}", "title": "" }, { "docid": "44a557e20e4354180935aea020901f2b", "score": "0.473786", "text": "func NewGetLabelParams() *GetLabelParams {\n\treturn &GetLabelParams{\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "fae0e0aedf3e7c2e08ce0ecd470e71ed", "score": "0.47298685", "text": "func NewGetIcecreamParams() *GetIcecreamParams {\n\tvar ()\n\treturn &GetIcecreamParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "5c3783244204ff8b24c935dbda27fc07", "score": "0.47278893", "text": "func NewGetParams() *StreamGetParams {\n\treturn &StreamGetParams{}\n}", "title": "" }, { "docid": "4b72ad105ad1bf14cb190c0fc70314a3", "score": "0.4723892", "text": "func NewGetApplicationsParamsWithHTTPClient(client *http.Client) *GetApplicationsParams {\n\tvar (\n\t\tcountDefault = int32(10)\n\t\tindexDefault = int32(0)\n\t)\n\treturn &GetApplicationsParams{\n\t\tCount: &countDefault,\n\t\tIndex: &indexDefault,\n\t\tHTTPClient: client,\n\t}\n}", "title": "" }, { "docid": "f1c960529dfbf4977fe37b387f9d1d0a", "score": "0.47226414", "text": "func NewHostRecordGetParams() *HostRecordGetParams {\n\treturn &HostRecordGetParams{\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "c545cc03d76ad4452c2df288a021db3d", "score": "0.47226298", "text": "func NewGetProfileParams() *GetProfileParams {\n\tvar ()\n\treturn &GetProfileParams{}\n}", "title": "" }, { "docid": "2b8ba4ecd13613b53a1a754b72b6f5af", "score": "0.4708028", "text": "func (o *ApplicationCollectionGetParams) WithFields(fields []string) *ApplicationCollectionGetParams {\n\to.SetFields(fields)\n\treturn o\n}", "title": "" }, { "docid": "d36e708379a449cb010da33f86de83a7", "score": "0.47045842", "text": "func NewAppsRequestWithAllParams(\n regionId string,\n pin string,\n appName string,\n clientId string,\n multiTenant bool,\n state string,\n scope string,\n startTime int,\n endTime int,\n accountType string,\n pageIndex int,\n pageSize int,\n offset int,\n) *AppsRequest {\n\n return &AppsRequest{\n JDCloudRequest: core.JDCloudRequest{\n URL: \"/regions/{regionId}/operate_backend/apps\",\n Method: \"GET\",\n Header: nil,\n Version: \"v1\",\n },\n RegionId: regionId,\n Pin: pin,\n AppName: appName,\n ClientId: clientId,\n MultiTenant: multiTenant,\n State: state,\n Scope: scope,\n StartTime: startTime,\n EndTime: endTime,\n AccountType: accountType,\n PageIndex: pageIndex,\n PageSize: pageSize,\n Offset: offset,\n }\n}", "title": "" }, { "docid": "a88ae515d739920877bbec9504d7d51d", "score": "0.47034886", "text": "func (a *PlaidApiService) ApplicationGet(ctx _context.Context) ApiApplicationGetRequest {\n\treturn ApiApplicationGetRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "7b9b0ae57c9fd1b7cf14a19b30c4456a", "score": "0.47030568", "text": "func NewSchemaGetParams() *SchemaGetParams {\n\tvar ()\n\treturn &SchemaGetParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "e7ddb2950f85bf23d53f1647ac261956", "score": "0.46926934", "text": "func NewGetAppsAppCallsRouteParams() *GetAppsAppCallsRouteParams {\n\tvar ()\n\treturn &GetAppsAppCallsRouteParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "3e27873310d882f3e3321c9d3512a1db", "score": "0.46886083", "text": "func NewGetDashboardParams() *GetDashboardParams {\n\n\treturn &GetDashboardParams{\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "8589659234d3479188474ff2d528af3a", "score": "0.46743852", "text": "func NewGetSupplierParamsWithTimeout(timeout time.Duration) *GetSupplierParams {\n\tvar (\n\t\tcountDefault = int64(1000)\n\t\tfromDefault = int64(0)\n\t\tisInactiveDefault = bool(false)\n\t)\n\treturn &GetSupplierParams{\n\t\tCount: &countDefault,\n\t\tFrom: &fromDefault,\n\t\tIsInactive: &isInactiveDefault,\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "14bcfbd53127bead60463a92df6f3c19", "score": "0.46712178", "text": "func NewAppManagementConfiguration()(*AppManagementConfiguration) {\n m := &AppManagementConfiguration{\n }\n m.backingStore = ie8677ce2c7e1b4c22e9c3827ecd078d41185424dd9eeb92b7d971ed2d49a392e.BackingStoreFactoryInstance();\n m.SetAdditionalData(make(map[string]any))\n return m\n}", "title": "" }, { "docid": "a264aac997b9c301d0d3faebebee8396", "score": "0.46647358", "text": "func NewLicenseGetParams() *LicenseGetParams {\n\treturn &LicenseGetParams{\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" } ]
20c90145a962f382c234f4eba58693fd
Nonmutating call, not a transaction submission.
[ { "docid": "eb785c0ee943d28fa6f89002293383e4", "score": "0.0", "text": "func (esp *EcdsaSortitionPool) CallWithdrawRewards(\n\targ_operator common.Address,\n\targ_beneficiary common.Address,\n\tblockNumber *big.Int,\n) (*big.Int, error) {\n\tvar result *big.Int\n\n\terr := chainutil.CallAtBlock(\n\t\tesp.transactorOptions.From,\n\t\tblockNumber, nil,\n\t\tesp.contractABI,\n\t\tesp.caller,\n\t\tesp.errorResolver,\n\t\tesp.contractAddress,\n\t\t\"withdrawRewards\",\n\t\t&result,\n\t\targ_operator,\n\t\targ_beneficiary,\n\t)\n\n\treturn result, err\n}", "title": "" } ]
[ { "docid": "d45aa7cdf4fec8339236134d7a532a21", "score": "0.5661579", "text": "func (d *Devops) Mutate(ctx context.Context, mutantSpec *pb.MutantSpec) (*pb.Response, error) {\n\tmutantTx := &pb.MutantTransaction{\n\t\tTxSetID: mutantSpec.TxSetID,\n\t\tTxSetIndex: mutantSpec.Index,\n\t}\n\n\tmutBytes, err := proto.Marshal(mutantTx)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Unable to marshal the mutant transaction (%s)\", err)\n\t}\n\tmarshaledTimestamp, err := proto.Marshal(util.CreateUtcTimestamp())\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Unable to marshal current timestamp. Err: %s\", err)\n\t}\n\tmutBytes = append(mutBytes, marshaledTimestamp...)\n\n\n\tinBlockTx := &pb.InBlockTransaction{\n\t\tTransaction: &pb.InBlockTransaction_MutantTransaction{MutantTransaction: mutantTx},\n\t\tTxid: hex.EncodeToString(util.ComputeCryptoHash(mutBytes)),\n\t\tTimestamp: util.CreateUtcTimestamp(),\n\t}\n\tresp := d.coord.ExecuteTransaction(inBlockTx)\n\tif resp.Status == pb.Response_FAILURE {\n\t\terr = fmt.Errorf(string(resp.Msg))\n\t}\n\treturn resp, err\n}", "title": "" }, { "docid": "3a12975b9fbd94ec02c5b71f3f305a27", "score": "0.54735315", "text": "func (e *Egress) TransactionMessageRecord(){\n\n}", "title": "" }, { "docid": "896c64fcc160bd2d0fba8106fe58e186", "score": "0.53692675", "text": "func (op *triageOp) transaction(c context.Context, job *Job) error {\n\t// Reset state collected in the transaction in case this is a retry.\n\top.garbage = nil\n\n\t// Tidy ActiveInvocations list by moving all recently finished invocations to\n\t// FinishedInvocations list.\n\ttidyOp, err := op.tidyActiveInvocations(c, job)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Process pending triggers set by emitting new invocations. Note that this\n\t// modifies ActiveInvocations list when emitting invocations.\n\ttriggersOp, err := op.processTriggers(c, job)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// If nothing is running anymore, make sure the cron is ticking again. This is\n\t// useful for schedules like \"with 10min interval\" that initiate an invocation\n\t// after some time after the previous one finishes. This call submits at most\n\t// one task to TQ. Note that there's no harm in calling this multiple times,\n\t// only the first call will actually do something.\n\tif len(job.ActiveInvocations) == 0 {\n\t\terr := pokeCron(c, job, op.dispatcher, func(m *cron.Machine) error {\n\t\t\tm.RewindIfNecessary()\n\t\t\treturn nil\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Submit set modifications. This may produce more garbage that we need to\n\t// cleanup later (outside the transaction).\n\tpopOps := []*dsset.PopOp{}\n\tif tidyOp != nil {\n\t\tpopOps = append(popOps, tidyOp)\n\t}\n\tif triggersOp != nil {\n\t\tpopOps = append(popOps, triggersOp)\n\t}\n\tif op.garbage, err = dsset.FinishPop(c, popOps...); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "ec8d552c85fb96223044b0df284a5c80", "score": "0.53456247", "text": "func (s *Sender) SendTransaction(tx *Transaction) {\n\n}", "title": "" }, { "docid": "1e2cb03a3941ed735e2224c529b544ac", "score": "0.52556664", "text": "func updateTransaction_blockedCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, transactionID, clientID, groupbuyID, currency string\n var amount float64\n var err error\n if len(args) != 6 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 6\")\n }\n objectType = \"transaction_blocked\"\n transactionID = args[0]\n clientID = args[1]\n groupbuyID = args[2]\n currency = args[3]\n amount, err = strconv.ParseFloat(args[4], 64)\n status := args[5]\n if err != nil {\n return fmt.Errorf(\"4th argument must be a numeric string\")\n }\n\n\n // Create composite key\n transaction_blockedKeyString := objectType + \"_\" + transactionID + \"_\" + clientID + \"_\" + groupbuyID + \"_\" + currency + \"_\" + status\n transaction_blockedKey, err := stub.CreateCompositeKey(transaction_blockedKeyString, []string{objectType, transactionID, clientID, groupbuyID, currency, status})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateTransaction_blockedCalled \" + transaction_blockedKeyString)\n\n // Get state with composite key\n transaction_blockedTemp := &Transaction_blocked{}\n transaction_blockedAsBytes, err := stub.GetState(transaction_blockedKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + transaction_blockedKeyString)\n } else if transaction_blockedAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n transaction_blockedTemp = &Transaction_blocked{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n TransactionID : transactionID,\n ClientID : clientID,\n GroupbuyID : groupbuyID,\n Currency : currency,\n Amount : amount,\n Status : status,\n }\n fmt.Println(\"Transaction_blocked created: \" + transaction_blockedKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(transaction_blockedAsBytes, &transaction_blockedTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n transaction_blockedJSONasBytes, err := json.Marshal(transaction_blockedTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n\n // Save asset to state\n err = stub.PutState(transaction_blockedKey, transaction_blockedJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end Called (success)\")\n fmt.Println(\"==============================\")\n return nil\n\n\n}", "title": "" }, { "docid": "6c04182ee1f6d9c0587d037baea3eeb7", "score": "0.52552307", "text": "func updateTransaction_blocked_recordCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, transactionID, clientID, groupbuyID, currency string\n var amount float64\n var err error\n if len(args) != 6 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 6\")\n }\n objectType = \"transaction_blocked_record\"\n transactionID = args[0]\n clientID = args[1]\n groupbuyID = args[2]\n currency = args[3]\n amount, err = strconv.ParseFloat(args[4], 64)\n status := args[5]\n if err != nil {\n return fmt.Errorf(\"4th argument must be a numeric string\")\n }\n\n\n // Create composite key\n transaction_blocked_recordKeyString := objectType + \"_\" + transactionID + \"_\" + clientID + \"_\" + groupbuyID + \"_\" + currency + \"_\" + status\n transaction_blocked_recordKey, err := stub.CreateCompositeKey(transaction_blocked_recordKeyString, []string{objectType, transactionID, clientID, groupbuyID, currency, status})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateTransaction_blocked_recordCalled \" + transaction_blocked_recordKeyString)\n\n // Get state with composite key\n transaction_blocked_recordTemp := &Transaction_blocked_record{}\n transaction_blocked_recordAsBytes, err := stub.GetState(transaction_blocked_recordKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + transaction_blocked_recordKeyString)\n } else if transaction_blocked_recordAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n transaction_blocked_recordTemp = &Transaction_blocked_record{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n TransactionID : transactionID,\n ClientID : clientID,\n GroupbuyID : groupbuyID,\n Currency : currency,\n Amount : amount,\n Status : status,\n }\n fmt.Println(\"Transaction_blocked_record created: \" + transaction_blocked_recordKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(transaction_blocked_recordAsBytes, &transaction_blocked_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n transaction_blocked_recordJSONasBytes, err := json.Marshal(transaction_blocked_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n\n // Save asset to state\n err = stub.PutState(transaction_blocked_recordKey, transaction_blocked_recordJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end Called (success)\")\n fmt.Println(\"==============================\")\n return nil\n\n\n}", "title": "" }, { "docid": "9c148dca026cfabb549a91b9c5850650", "score": "0.5249667", "text": "func (t *SimpleChaincode) submitTx(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\n\tfmt.Println(\"Running submitTx\")\n\t\n\t\n\tvar tx Transaction\n\ttx.RefNumber \t= args[0]\n\ttx.Date \t\t= args[1]\n\ttx.Description \t= args[2]\n\ttx.Type \t = args[3]\n\ttx.To \t\t\t= args[5]\n\ttx.From \t\t= args[6]\n\ttx.Contract \t= args[7]\n\ttx.StatusCode \t= 1\n\ttx.StatusMsg \t= \"Transaction Completed\"\n\t\n\t\n\tamountValue, err := strconv.ParseFloat(args[4], 64)\n\tif err != nil {\n\t\ttx.StatusCode = 0\n\t\ttx.StatusMsg = \"Invalid Amount\"\n\t}else{\n\t\ttx.Amount = amountValue\n\t}\n\t\n\t\n\t//***************************************************************\n\t// Get Receiver account from BC\n\trfidBytes, err := stub.GetState(tx.To)\n\tif err != nil {\n\t\treturn nil, errors.New(\"SubmitTx Failed to get User from BC\")\n\t}\n\tvar receiver User\n\tfmt.Println(\"SubmitTx Unmarshalling User Struct\");\n\terr = json.Unmarshal(rfidBytes, &receiver)\n\treceiver.Balance = receiver.Balance + tx.Amount\n\t\n\t\n\t//Commit Receiver to ledger\n\tfmt.Println(\"SubmitTx Commit Updated Sender To Ledger\");\n\ttxsAsBytes, _ := json.Marshal(receiver)\n\terr = stub.PutState(tx.To, txsAsBytes)\t\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t\n\t// Get Sender account from BC\n\trfidBytes, err = stub.GetState(tx.From)\n\tif err != nil {\n\t\treturn nil, errors.New(\"SubmitTx Failed to get Financial Institution\")\n\t}\n\tvar sender FinancialInst\n\tfmt.Println(\"SubmitTx Unmarshalling Financial Institution\");\n\terr = json.Unmarshal(rfidBytes, &sender)\n\tsender.Accounts[0].CashBalance = sender.Accounts[0].CashBalance - tx.Amount\n\t\n\t//Commit Sender to ledger\n\tfmt.Println(\"SubmitTx Commit Updated Sender To Ledger\");\n\ttxsAsBytes, _ = json.Marshal(sender)\n\terr = stub.PutState(tx.From, txsAsBytes)\t\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t\n\t\n\treturn nil, nil\n\t//***********************************************************************\n}", "title": "" }, { "docid": "0dc2c336be513e55e42e9115523eeb57", "score": "0.52492887", "text": "func updateTransaction_actionCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, transactionID, clientID, groupbuyID, status string\n var err error\n if len(args) != 4 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 4\")\n }\n objectType = \"transaction_action\"\n transactionID = args[0]\n clientID = args[1]\n groupbuyID = args[2]\n\tstatus = args[3]\n // Create composite key\n transaction_actionKeyString := objectType + \"_\" + transactionID + \"_\" + clientID + \"_\" + groupbuyID + \"_\" + status\n transaction_actionKey, err := stub.CreateCompositeKey(transaction_actionKeyString, []string{objectType, transactionID, clientID, groupbuyID, status})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateTransaction_actionCalled \" + transaction_actionKeyString)\n\n // Get state with composite key\n transaction_actionTemp := &Transaction_action{}\n transaction_actionAsBytes, err := stub.GetState(transaction_actionKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + transaction_actionKeyString)\n } else if transaction_actionAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n transaction_actionTemp = &Transaction_action{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n TransactionID : transactionID,\n ClientID : clientID,\n GroupbuyID : groupbuyID,\n\t\t\tStatus : status,\n }\n fmt.Println(\"Transaction_action created: \" + transaction_actionKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(transaction_actionAsBytes, &transaction_actionTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n transaction_actionJSONasBytes, err := json.Marshal(transaction_actionTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n\n // Save asset to state\n err = stub.PutState(transaction_actionKey, transaction_actionJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end Called (success)\")\n fmt.Println(\"==============================\")\n return nil\n\n\n}", "title": "" }, { "docid": "b7b194981b783b5a7ac90ccac111e8eb", "score": "0.5237299", "text": "func (txn *Txn) send(ba roachpb.BatchRequest) (*roachpb.BatchResponse, *roachpb.Error) {\n\n\tif txn.Proto.Status != roachpb.PENDING || txn.IsFinalized() {\n\t\treturn nil, roachpb.NewErrorf(\n\t\t\t\"attempting to use transaction with wrong status or finalized: %s\", txn.Proto.Status)\n\t}\n\n\t// It doesn't make sense to use inconsistent reads in a transaction. However,\n\t// we still need to accept it as a parameter for this to compile.\n\tif ba.ReadConsistency != roachpb.CONSISTENT {\n\t\treturn nil, roachpb.NewErrorf(\"cannot use %s ReadConsistency in txn\",\n\t\t\tba.ReadConsistency)\n\t}\n\n\tlastIndex := len(ba.Requests) - 1\n\tif lastIndex < 0 {\n\t\treturn nil, nil\n\t}\n\n\t// firstWriteIndex is set to the index of the first command which is\n\t// a transactional write. If != -1, this indicates an intention to\n\t// write. This is in contrast to txn.Proto.Writing, which is set by\n\t// the coordinator when the first intent has been created, and which\n\t// lives for the life of the transaction.\n\tfirstWriteIndex := -1\n\tvar firstWriteKey roachpb.Key\n\n\tfor i, ru := range ba.Requests {\n\t\targs := ru.GetInner()\n\t\tif i < lastIndex {\n\t\t\tif _, ok := args.(*roachpb.EndTransactionRequest); ok {\n\t\t\t\treturn nil, roachpb.NewErrorf(\"%s sent as non-terminal call\", args.Method())\n\t\t\t}\n\t\t}\n\t\tif roachpb.IsTransactionWrite(args) && firstWriteIndex == -1 {\n\t\t\tfirstWriteKey = args.Header().Key\n\t\t\tfirstWriteIndex = i\n\t\t}\n\t}\n\n\thaveTxnWrite := firstWriteIndex != -1\n\tendTxnRequest, haveEndTxn := ba.Requests[lastIndex].GetInner().(*roachpb.EndTransactionRequest)\n\tneedBeginTxn := !txn.Proto.Writing && haveTxnWrite\n\tneedEndTxn := txn.Proto.Writing || haveTxnWrite\n\telideEndTxn := haveEndTxn && !needEndTxn\n\n\t// If we're not yet writing in this txn, but intend to, insert a\n\t// begin transaction request before the first write command.\n\tif needBeginTxn {\n\t\t// If the transaction already has a key (we're in a restart), make\n\t\t// sure we set the key in the begin transaction request to the original.\n\t\tbt := &roachpb.BeginTransactionRequest{\n\t\t\tSpan: roachpb.Span{\n\t\t\t\tKey: firstWriteKey,\n\t\t\t},\n\t\t}\n\t\tif txn.Proto.Key != nil {\n\t\t\tbt.Key = txn.Proto.Key\n\t\t}\n\t\t// Inject the new request before position firstWriteIndex, taking\n\t\t// care to avoid unnecessary allocations.\n\t\toldRequests := ba.Requests\n\t\tba.Requests = make([]roachpb.RequestUnion, len(ba.Requests)+1)\n\t\tcopy(ba.Requests, oldRequests[:firstWriteIndex])\n\t\tba.Requests[firstWriteIndex].MustSetInner(bt)\n\t\tcopy(ba.Requests[firstWriteIndex+1:], oldRequests[firstWriteIndex:])\n\t}\n\n\tif elideEndTxn {\n\t\tba.Requests = ba.Requests[:lastIndex]\n\t}\n\n\tbr, pErr := txn.db.send(ba)\n\tif elideEndTxn && pErr == nil {\n\t\t// Check that read only transactions do not violate their deadline. This can NOT\n\t\t// happen since the txn deadline is normally updated when it is about to expire\n\t\t// or expired. We will just keep the code for safety (see TestReacquireLeaseOnRestart).\n\t\tif endTxnRequest.Deadline != nil {\n\t\t\tif endTxnRequest.Deadline.Less(txn.Proto.Timestamp) {\n\t\t\t\treturn nil, roachpb.NewErrorWithTxn(roachpb.NewTransactionAbortedError(), &txn.Proto)\n\t\t\t}\n\t\t}\n\t\t// This normally happens on the server and sent back in response\n\t\t// headers, but this transaction was optimized away. The caller may\n\t\t// still inspect the transaction struct, so we manually update it\n\t\t// here to emulate a true transaction.\n\t\tif endTxnRequest.Commit {\n\t\t\ttxn.Proto.Status = roachpb.COMMITTED\n\t\t} else {\n\t\t\ttxn.Proto.Status = roachpb.ABORTED\n\t\t}\n\t\ttxn.finalized = true\n\t}\n\n\t// If we inserted a begin transaction request, remove it here.\n\tif needBeginTxn {\n\t\tif br != nil && br.Responses != nil {\n\t\t\tbr.Responses = append(br.Responses[:firstWriteIndex], br.Responses[firstWriteIndex+1:]...)\n\t\t}\n\t\t// Handle case where inserted begin txn confused an indexed error.\n\t\tif pErr != nil && pErr.Index != nil {\n\t\t\tidx := pErr.Index.Index\n\t\t\tif idx == int32(firstWriteIndex) {\n\t\t\t\t// An error was encountered on begin txn; disallow the indexing.\n\t\t\t\tpErr.Index = nil\n\t\t\t} else if idx > int32(firstWriteIndex) {\n\t\t\t\t// An error was encountered after begin txn; decrement index.\n\t\t\t\tpErr.SetErrorIndex(idx - 1)\n\t\t\t}\n\t\t}\n\t}\n\treturn br, pErr\n}", "title": "" }, { "docid": "5c6a7b006c815a21a2ef0046b5bef42f", "score": "0.5227334", "text": "func Submit(p PlasmaTransaction) (*TransactionSubmitResponse, error) {\n\treturn p.Submit()\n}", "title": "" }, { "docid": "8088171831b439b177e7e17a5580eedd", "score": "0.51783556", "text": "func (_Worker *WorkerRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Worker.Contract.WorkerTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "2a7c40f45652823ed8e72e5fa22ac184", "score": "0.5143667", "text": "func updateTransaction_contract_recordCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, transactionID, clientID, groupbuyID, currency string\n var amount float64\n var err error\n if len(args) != 6 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 6\")\n }\n objectType = \"transaction_contract_record\"\n transactionID = args[0]\n clientID = args[1]\n groupbuyID = args[2]\n currency = args[3]\n amount, err = strconv.ParseFloat(args[4], 64)\n status := args[5]\n if err != nil {\n return fmt.Errorf(\"4th argument must be a numeric string\")\n }\n\n\n // Create composite key\n transaction_contract_recordKeyString := objectType + \"_\" + transactionID + \"_\" + clientID + \"_\" + groupbuyID + \"_\" + currency + \"_\" + status\n transaction_contract_recordKey, err := stub.CreateCompositeKey(transaction_contract_recordKeyString, []string{objectType, transactionID, clientID, groupbuyID, currency, status})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateTransaction_contract_recordCalled \" + transaction_contract_recordKeyString)\n\n // Get state with composite key\n transaction_contract_recordTemp := &Transaction_contract_record{}\n transaction_contract_recordAsBytes, err := stub.GetState(transaction_contract_recordKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + transaction_contract_recordKeyString)\n } else if transaction_contract_recordAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n transaction_contract_recordTemp = &Transaction_contract_record{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n TransactionID : transactionID,\n ClientID : clientID,\n GroupbuyID : groupbuyID,\n Currency : currency,\n Amount : amount,\n Status : status,\n }\n fmt.Println(\"Transaction_contract_record created: \" + transaction_contract_recordKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(transaction_contract_recordAsBytes, &transaction_contract_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n transaction_contract_recordJSONasBytes, err := json.Marshal(transaction_contract_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n\n // Save asset to state\n err = stub.PutState(transaction_contract_recordKey, transaction_contract_recordJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end Called (success)\")\n fmt.Println(\"==============================\")\n return nil\n\n\n}", "title": "" }, { "docid": "3fca29dc167854365a15198898eca97b", "score": "0.5106886", "text": "func (t *Client) submitTransactionInternal(ctx context.Context, transactionActions []TransactionAction, batchUuid uuid.UUID, changesetUuid uuid.UUID, tableSubmitTransactionOptions *SubmitTransactionOptions) (TransactionResponse, error) {\n\tif len(transactionActions) == 0 {\n\t\treturn TransactionResponse{}, errEmptyTransaction\n\t}\n\tchangesetBoundary := fmt.Sprintf(\"changeset_%s\", changesetUuid.String())\n\tchangeSetBody, err := t.generateChangesetBody(changesetBoundary, transactionActions)\n\tif err != nil {\n\t\treturn TransactionResponse{}, err\n\t}\n\treq, err := runtime.NewRequest(ctx, http.MethodPost, runtime.JoinPaths(t.client.Endpoint(), \"$batch\"))\n\tif err != nil {\n\t\treturn TransactionResponse{}, err\n\t}\n\treq.Raw().Header.Set(\"x-ms-version\", \"2019-02-02\")\n\treq.Raw().Header.Set(\"DataServiceVersion\", \"3.0\")\n\treq.Raw().Header.Set(\"Accept\", string(generated.ODataMetadataFormatApplicationJSONODataMinimalmetadata))\n\n\tboundary := fmt.Sprintf(\"batch_%s\", batchUuid.String())\n\tbody := new(bytes.Buffer)\n\twriter := multipart.NewWriter(body)\n\terr = writer.SetBoundary(boundary)\n\tif err != nil {\n\t\treturn TransactionResponse{}, err\n\t}\n\th := make(textproto.MIMEHeader)\n\th.Set(headerContentType, fmt.Sprintf(\"multipart/mixed; boundary=%s\", changesetBoundary))\n\tbatchWriter, err := writer.CreatePart(h)\n\tif err != nil {\n\t\treturn TransactionResponse{}, err\n\t}\n\t_, err = batchWriter.Write(changeSetBody.Bytes())\n\tif err != nil {\n\t\treturn TransactionResponse{}, err\n\t}\n\twriter.Close()\n\n\terr = req.SetBody(streaming.NopCloser(bytes.NewReader(body.Bytes())), fmt.Sprintf(\"multipart/mixed; boundary=%s\", boundary))\n\tif err != nil {\n\t\treturn TransactionResponse{}, err\n\t}\n\n\tresp, err := t.client.Pipeline().Do(req)\n\tif err != nil {\n\t\treturn TransactionResponse{}, err\n\t}\n\n\tif !runtime.HasStatusCode(resp, http.StatusAccepted, http.StatusNoContent) {\n\t\treturn TransactionResponse{}, runtime.NewResponseError(resp)\n\t}\n\n\treturn buildTransactionResponse(req, resp, len(transactionActions))\n}", "title": "" }, { "docid": "ee9a08d44102630f53a40d110acf7d6f", "score": "0.5102352", "text": "func updateTransaction_action_recordCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, transactionID, clientID, groupbuyID, status string\n var err error\n if len(args) != 4 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 4\")\n }\n objectType = \"transaction_action_record\"\n transactionID = args[0]\n clientID = args[1]\n groupbuyID = args[2]\n status = args[3]\n // Create composite key\n transaction_action_recordKeyString := objectType + \"_\" + transactionID + \"_\" + clientID + \"_\" + groupbuyID + \"_\" + status\n transaction_action_recordKey, err := stub.CreateCompositeKey(transaction_action_recordKeyString, []string{objectType, transactionID, clientID, groupbuyID, status})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateTransaction_action_recordCalled \" + transaction_action_recordKeyString)\n\n // Get state with composite key\n transaction_action_recordTemp := &Transaction_action_record{}\n transaction_action_recordAsBytes, err := stub.GetState(transaction_action_recordKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + transaction_action_recordKeyString)\n } else if transaction_action_recordAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n transaction_action_recordTemp = &Transaction_action_record{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n TransactionID : transactionID,\n ClientID : clientID,\n GroupbuyID : groupbuyID,\n Status : status,\n }\n fmt.Println(\"Transaction_action_record created: \" + transaction_action_recordKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(transaction_action_recordAsBytes, &transaction_action_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n transaction_action_recordJSONasBytes, err := json.Marshal(transaction_action_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n\n // Save asset to state\n err = stub.PutState(transaction_action_recordKey, transaction_action_recordJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end Called (success)\")\n fmt.Println(\"==============================\")\n return nil\n\n\n}", "title": "" }, { "docid": "9d40aa43e5bf24669f4b4f17c4cce252", "score": "0.50841784", "text": "func (t *TsohueChainCode) liquidated(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n var err error\n\n if len(args) != 5 {\n return shim.Error(\"Incorrect number of arguments. Expecting 5\")\n }\n transactionID := args[0]\n clientID := args[1]\n groupbuyID := args[2]\n currency := args[3]\n amount := args[4]\n status := \"liquidate\"\n\n\n\n // Add to Transaction_contract\n transaction_contractArgs := []string{transactionID, clientID, groupbuyID, currency, amount, status}\n err = updateTransaction_contractCalled(stub, transaction_contractArgs)\n if err != nil {\n return shim.Error(err.Error())\n }\n\n\n\n // Add to Transaction_contract_record\n transaction_contract_recordArgs := []string{transactionID, clientID, groupbuyID, currency, amount, status}\n err = updateTransaction_contract_recordCalled(stub, transaction_contract_recordArgs)\n if err != nil {\n return shim.Error(err.Error())\n }\n\n\n\n fmt.Println(\"Groupbuy joined success!\")\n return shim.Success(nil)\n\n}", "title": "" }, { "docid": "c7f06592cc69399646ddfd1377c7dc8d", "score": "0.5075758", "text": "func (s *GrpcServer) SubmitTransaction(ctx context.Context, req *pb.SubmitTransactionRequest) (*pb.SubmitTransactionResponse, error) {\n\n\tmsgTx := &wire.MsgTx{}\n\tif err := msgTx.BchDecode(bytes.NewReader(req.Transaction), wire.ProtocolVersion, wire.BaseEncoding); err != nil {\n\t\treturn nil, status.Error(codes.InvalidArgument, \"unable to deserialize transaction\")\n\t}\n\n\tif s.slpIndex != nil && !req.GetSkipSlpValidityCheck() {\n\t\t_, err := s.checkTransactionSlpValidity(msgTx, req.RequiredSlpBurns, false, true)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Use 0 for the tag to represent local node.\n\ttx := bchutil.NewTx(msgTx)\n\tacceptedTxs, err := s.txMemPool.ProcessTransaction(tx, false, false, 0)\n\tif err != nil {\n\t\t// When the error is a rule error, it means the transaction was\n\t\t// simply rejected as opposed to something actually going wrong,\n\t\t// so log it as such. Otherwise, something really did go wrong,\n\t\t// so log it as an actual error. In both cases, a JSON-RPC\n\t\t// error is returned to the client with the deserialization\n\t\t// error code (to match bitcoind behavior).\n\t\tif _, ok := err.(mempool.RuleError); ok {\n\t\t\tlog.Debugf(\"Rejected transaction %v: %v\", tx.Hash(),\n\t\t\t\terr)\n\t\t} else {\n\t\t\tlog.Errorf(\"Failed to process transaction %v: %v\",\n\t\t\t\ttx.Hash(), err)\n\t\t}\n\t\treturn nil, status.Errorf(codes.InvalidArgument, \"tx rejected: %v\", err)\n\t}\n\n\t// When the transaction was accepted it should be the first item in the\n\t// returned array of accepted transactions. The only way this will not\n\t// be true is if the API for ProcessTransaction changes and this code is\n\t// not properly updated, but ensure the condition holds as a safeguard.\n\t//\n\t// Also, since an error is being returned to the caller, ensure the\n\t// transaction is removed from the memory pool.\n\tif len(acceptedTxs) == 0 || !acceptedTxs[0].Tx.Hash().IsEqual(tx.Hash()) {\n\t\ts.txMemPool.RemoveTransaction(tx, true)\n\n\t\treturn nil, status.Errorf(codes.Internal, \"transaction %v is not in accepted list\", tx.Hash())\n\t}\n\n\t// Generate and relay inventory vectors for all newly accepted\n\t// transactions into the memory pool due to the original being\n\t// accepted.\n\ts.netMgr.AnnounceNewTransactions(acceptedTxs)\n\n\t// Keep track of all the sendrawtransaction request txns so that they\n\t// can be rebroadcast if they don't make their way into a block.\n\ttxD := acceptedTxs[0]\n\tiv := wire.NewInvVect(wire.InvTypeTx, txD.Tx.Hash())\n\ts.netMgr.AddRebroadcastInventory(iv, txD)\n\n\tresp := &pb.SubmitTransactionResponse{\n\t\tHash: tx.Hash().CloneBytes(),\n\t}\n\treturn resp, nil\n}", "title": "" }, { "docid": "8487fbecb9b9ce861fa63f05f196657d", "score": "0.5054036", "text": "func (_MultiSigWalletContract *MultiSigWalletContractTransactor) SubmitTransaction(opts *bind.TransactOpts, destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _MultiSigWalletContract.contract.Transact(opts, \"submitTransaction\", destination, value, data)\n}", "title": "" }, { "docid": "f8a34c86590a20af6beb2348715ed4b7", "score": "0.5050577", "text": "func (_Worker *WorkerTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Worker.Contract.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "38696aed8ad4d874ad3efad7fba8f7fb", "score": "0.5048081", "text": "func sendTransaction(gethClientWrapper strpkg.GethClientWrapper, a models.EthTxAttempt) *sendError {\n\tsignedTx, err := a.GetSignedTx()\n\tif err != nil {\n\t\treturn FatalSendError(err)\n\t}\n\terr = gethClientWrapper.GethClient(func(gethClient eth.GethClient) error {\n\t\tctx, cancel := context.WithTimeout(context.Background(), maxEthNodeRequestTime)\n\t\tdefer cancel()\n\t\treturn errors.WithStack(gethClient.SendTransaction(ctx, signedTx))\n\t})\n\n\tlogger.Debugw(\"BulletproofTxManager: Broadcasting transaction\", \"ethTxAttemptID\", a.ID, \"txHash\", signedTx.Hash(), \"gasPriceWei\", a.GasPrice.ToInt().Int64())\n\tsendErr := SendError(err)\n\tif sendErr.IsTransactionAlreadyInMempool() {\n\t\tlogger.Debugw(\"transaction already in mempool\", \"txHash\", signedTx.Hash(), \"nodeErr\", sendErr.Error())\n\t\treturn nil\n\t}\n\treturn SendError(err)\n}", "title": "" }, { "docid": "c401e5909dc1871f88b818e0c0cf9ef4", "score": "0.50207716", "text": "func (_EIP2537Caller *EIP2537CallerSession) CallWithMutation(_address common.Address, _data []byte) (*types.Transaction, error) {\n\treturn _EIP2537Caller.Contract.CallWithMutation(&_EIP2537Caller.TransactOpts, _address, _data)\n}", "title": "" }, { "docid": "7ad5fb08a62fd392b2b06de324fd1b32", "score": "0.5016966", "text": "func (_MultiSigWalletContract *MultiSigWalletContractTransactorSession) SubmitTransaction(destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _MultiSigWalletContract.Contract.SubmitTransaction(&_MultiSigWalletContract.TransactOpts, destination, value, data)\n}", "title": "" }, { "docid": "81468e8310088a44fb013a93940ddd3f", "score": "0.50143623", "text": "func updateTransaction_takeover_recordCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, transactionID, groupbuyID, clientID_sell, clientID_buy, status string\n var err error\n if len(args) != 5 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 5\")\n }\n objectType = \"transaction_takeover_record\"\n transactionID = args[0]\n groupbuyID = args[1]\n clientID_sell = args[2]\n clientID_buy = args[3]\n status = args[4]\n // Create composite key\n transaction_takeover_recordKeyString := objectType + \"_\" + transactionID + \"_\" + groupbuyID + \"_\" + clientID_sell + \"_\" + clientID_buy + \"_\" + status\n transaction_takeover_recordKey, err := stub.CreateCompositeKey(transaction_takeover_recordKeyString, []string{objectType, transactionID, groupbuyID, clientID_sell, clientID_buy, status})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateTransaction_takeover_recordCalled \" + transaction_takeover_recordKeyString)\n\n // Get state with composite key\n transaction_takeover_recordTemp := &Transaction_takeover_record{}\n transaction_takeover_recordAsBytes, err := stub.GetState(transaction_takeover_recordKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + transaction_takeover_recordKeyString)\n } else if transaction_takeover_recordAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n transaction_takeover_recordTemp = &Transaction_takeover_record{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n TransactionID : transactionID,\n GroupbuyID : groupbuyID,\n ClientID_sell : clientID_sell,\n ClientID_buy : clientID_buy,\n Status : status,\n }\n fmt.Println(\"Transaction_takeover_record created: \" + transaction_takeover_recordKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(transaction_takeover_recordAsBytes, &transaction_takeover_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n transaction_takeover_recordJSONasBytes, err := json.Marshal(transaction_takeover_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n\n // Save asset to state\n err = stub.PutState(transaction_takeover_recordKey, transaction_takeover_recordJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end Called (success)\")\n fmt.Println(\"==============================\")\n return nil\n\n\n}", "title": "" }, { "docid": "b8b74179ae101333e26ba65f7a57ef46", "score": "0.5013617", "text": "func (_MultiSig *MultiSigTransactor) SubmitTransaction(opts *bind.TransactOpts, destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _MultiSig.contract.Transact(opts, \"submitTransaction\", destination, value, data)\n}", "title": "" }, { "docid": "2e6c50452e0f1d90fb5a277e2d3368f7", "score": "0.4999212", "text": "func updateTransaction_contractCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, transactionID, clientID, groupbuyID, currency string\n\tvar amount float64\n var err error\n if len(args) != 6 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 6\")\n }\n objectType = \"transaction_contract\"\n transactionID = args[0]\n clientID = args[1]\n groupbuyID = args[2]\n currency = args[3]\n amount, err = strconv.ParseFloat(args[4], 64)\n\tstatus := args[5]\n if err != nil {\n return fmt.Errorf(\"4th argument must be a numeric string\")\n }\n\n\n // Create composite key\n transaction_contractKeyString := objectType + \"_\" + transactionID + \"_\" + clientID + \"_\" + groupbuyID + \"_\" + currency + \"_\" + status\n transaction_contractKey, err := stub.CreateCompositeKey(transaction_contractKeyString, []string{objectType, transactionID, clientID, groupbuyID, currency, status})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateTransaction_contractCalled \" + transaction_contractKeyString)\n\n // Get state with composite key\n transaction_contractTemp := &Transaction_contract{}\n transaction_contractAsBytes, err := stub.GetState(transaction_contractKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + transaction_contractKeyString)\n } else if transaction_contractAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n transaction_contractTemp = &Transaction_contract{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n TransactionID : transactionID,\n ClientID : clientID,\n GroupbuyID : groupbuyID,\n Currency : currency,\n Amount : amount,\n\t\t\tStatus : status,\n }\n fmt.Println(\"Transaction_contract created: \" + transaction_contractKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(transaction_contractAsBytes, &transaction_contractTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n transaction_contractJSONasBytes, err := json.Marshal(transaction_contractTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n\n // Save asset to state\n err = stub.PutState(transaction_contractKey, transaction_contractJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end Called (success)\")\n fmt.Println(\"==============================\")\n return nil\n\n\n}", "title": "" }, { "docid": "1a33271f7995a0dc474bff32acc2091d", "score": "0.49876657", "text": "func (_MultiSigWalletContract *MultiSigWalletContractSession) SubmitTransaction(destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _MultiSigWalletContract.Contract.SubmitTransaction(&_MultiSigWalletContract.TransactOpts, destination, value, data)\n}", "title": "" }, { "docid": "9f60f5b9baf8efdda257260c520af5cf", "score": "0.49812508", "text": "func (_EIP2537Caller *EIP2537CallerTransactorSession) CallWithMutation(_address common.Address, _data []byte) (*types.Transaction, error) {\n\treturn _EIP2537Caller.Contract.CallWithMutation(&_EIP2537Caller.TransactOpts, _address, _data)\n}", "title": "" }, { "docid": "997119a02eaea63513964a7089e9883c", "score": "0.49715623", "text": "func (tx *Txn) sendUnsignedTxn(ctx context.Context, rpc RPCClient) (string, error) {\n\tdata := hexutil.Bytes(tx.EthTX.Data())\n\tvar nonce *hexutil.Uint64\n\tif !tx.NodeAssignNonce {\n\t\thexNonce := hexutil.Uint64(tx.EthTX.Nonce())\n\t\tnonce = &hexNonce\n\t}\n\targs := sendTxArgs{\n\t\tNonce: nonce,\n\t\tFrom: tx.From.Hex(),\n\t\tGas: hexutil.Uint64(tx.EthTX.Gas()),\n\t\tGasPrice: hexutil.Big(*tx.EthTX.GasPrice()),\n\t\tValue: hexutil.Big(*tx.EthTX.Value()),\n\t\tData: &data,\n\t}\n\t// if tx.PrivateFrom != \"\" {\n\t// \targs.PrivateFrom = tx.PrivateFrom\n\t// \targs.PrivateFor = tx.PrivateFor\n\t// }\n\tvar to = tx.EthTX.To()\n\tif to != nil {\n\t\targs.To = to.Hex()\n\t}\n\tvar txHash string\n\terr := rpc.CallContext(ctx, &txHash, \"eth_sendTransaction\", args)\n\treturn txHash, err\n}", "title": "" }, { "docid": "76aae14388d1266eba7f12fa32712bbe", "score": "0.4943268", "text": "func mutate(ctx context.Context, m *Mutations, left *Mutations) error {\n\t// Running the set instructions first.\n\tif err := runMutations(ctx, m.Set, posting.Set, left); err != nil {\n\t\treturn err\n\t}\n\tif err := runMutations(ctx, m.Del, posting.Del, left); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3442ead914d3b8440c0dfaa9171dd199", "score": "0.49426854", "text": "func (s *Stmgr) Call(ctx context.Context, msg *types.Message, ts *types.TipSet) (*types.InvocResult, error) {\n\t// Copy the message as we modify it below.\n\tmsgCopy := *msg\n\tmsg = &msgCopy\n\n\tif msg.GasLimit == 0 {\n\t\tmsg.GasLimit = constants.BlockGasLimit\n\t}\n\tif msg.GasFeeCap == types.EmptyInt {\n\t\tmsg.GasFeeCap = types.NewInt(0)\n\t}\n\tif msg.GasPremium == types.EmptyInt {\n\t\tmsg.GasPremium = types.NewInt(0)\n\t}\n\tif msg.Value == types.EmptyInt {\n\t\tmsg.Value = types.NewInt(0)\n\t}\n\n\treturn s.callInternal(ctx, msg, nil, ts, cid.Undef, s.GetNetworkVersion, false, false)\n}", "title": "" }, { "docid": "9b477d5d0aa5d3a6188a3b0c576410ba", "score": "0.49389318", "text": "func (_EIP2537Caller *EIP2537CallerTransactor) CallWithMutation(opts *bind.TransactOpts, _address common.Address, _data []byte) (*types.Transaction, error) {\n\treturn _EIP2537Caller.contract.Transact(opts, \"call_with_mutation\", _address, _data)\n}", "title": "" }, { "docid": "643344cff0ec0b78fa60701c722791a5", "score": "0.4937138", "text": "func (r *Request) Punish() {\n\tr.p.Metrics.InvalidTransactions.Inc()\n}", "title": "" }, { "docid": "d718420724f375c508e16b8ffe51ef49", "score": "0.4922734", "text": "func (_MultiSig *MultiSigTransactorSession) SubmitTransaction(destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _MultiSig.Contract.SubmitTransaction(&_MultiSig.TransactOpts, destination, value, data)\n}", "title": "" }, { "docid": "08b07e2a107731797c26ac5ef2fb04b1", "score": "0.49113292", "text": "func (view *UtxoViewpoint) commit() {\n\tfor txHash, entry := range view.entries {\n\t\tif entry == nil || (entry.modified && entry.IsFullySpent()) {\n\t\t\tdelete(view.entries, txHash)\n\t\t\tcontinue\n\t\t}\n\n\t\tentry.modified = false\n\t}\n}", "title": "" }, { "docid": "594f13bd0cfdfb53b825939cc9b2e024", "score": "0.49112737", "text": "func updateGroupbuy_recordCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, groupbuyID, transactionID, currency, status string\n\tvar share float64\n\tvar target_amount float64\n var err error\n\n if len(args) != 6 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 6\")\n }\n objectType = \"groupbuy_record\"\n transactionID = args[0]\n\tgroupbuyID = args[1]\n currency = args[2]\n\ttarget_amount, err = strconv.ParseFloat(args[3], 64)\n\tif err != nil {\n return fmt.Errorf(\"3th argument must be a numeric string\")\n }\n share, err = strconv.ParseFloat(args[4], 64)\n if err != nil {\n return fmt.Errorf(\"4th argument must be a numeric string\")\n }\n status = args[5]\n\n // Create composite key\n groupbuy_recordKeyString := objectType + \"_\" + groupbuyID + \"_\" + transactionID\n groupbuy_recordKey, err := stub.CreateCompositeKey(groupbuy_recordKeyString, []string{objectType, groupbuyID, transactionID})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateGroupbuyCalled \" + groupbuy_recordKeyString)\n\n // Get state with composite key\n groupbuy_recordTemp := &Groupbuy_record{}\n groupbuy_recordAsBytes, err := stub.GetState(groupbuy_recordKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + groupbuy_recordKeyString)\n } else if groupbuy_recordAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n groupbuy_recordTemp = &Groupbuy_record{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n\t\t TransactionID :transactionID,\n\t\t\tGroupbuyID : groupbuyID,\n Currency : currency,\n\t\t\tTarget_amount : target_amount,\n Share : 0,\n Status : status,\n }\n fmt.Println(\"Groupbuy_record created: \" + groupbuy_recordKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(groupbuy_recordAsBytes, &groupbuy_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n // Add the share. (quantity can be a negative number), update timestamp and status\n if groupbuy_recordTemp.Share + share >= 0 {\n groupbuy_recordTemp.Share += share\n groupbuy_recordTemp.Timestamp = time.Now().String()\n groupbuy_recordTemp.Status = status\n } else {\n return fmt.Errorf(\"Not sufficient shares for \" + groupbuyID)\n }\n\n groupbuy_recordJSONasBytes, err := json.Marshal(groupbuy_recordTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n // Save asset to state\n err = stub.PutState(groupbuy_recordKey, groupbuy_recordJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end updateGroupbuy_recordCalled (success)\")\n fmt.Println(\"==============================\")\n return nil\n}", "title": "" }, { "docid": "aee78ceb56073d86dc23256da2ff0368", "score": "0.49108943", "text": "func (_AssetProxyOwner *AssetProxyOwnerTransactor) SubmitTransaction(opts *bind.TransactOpts, destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _AssetProxyOwner.contract.Transact(opts, \"submitTransaction\", destination, value, data)\n}", "title": "" }, { "docid": "2072639bb490da1d2d3544614e7737ea", "score": "0.49032068", "text": "func (t *transaction) Reset() {}", "title": "" }, { "docid": "864900930daeacabf72868742ba10cd4", "score": "0.4898768", "text": "func (_AssetProxyOwner *AssetProxyOwnerTransactorSession) SubmitTransaction(destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _AssetProxyOwner.Contract.SubmitTransaction(&_AssetProxyOwner.TransactOpts, destination, value, data)\n}", "title": "" }, { "docid": "fb713e56b8ec39a5da0a3ce7511589a7", "score": "0.4894662", "text": "func internalRunInTransaction(ctx context.Context, tx PgxTxLoader, fn func(ctx context.Context, tx PgxTxLoader) error) error {\n\tdefer func() {\n\t\tif err := recover(); err != nil {\n\t\t\t_ = tx.Rollback(ctx)\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tif err := fn(ctx, tx); err != nil {\n\t\t_ = tx.Rollback(ctx)\n\t\treturn err\n\t}\n\n\treturn tx.Commit(ctx)\n}", "title": "" }, { "docid": "f31c72fe17f9ca8c11e05aa7a1e3fa73", "score": "0.4886533", "text": "func (ts *txnSender) Send(ctx context.Context, ba roachpb.BatchRequest) (*roachpb.BatchResponse, *roachpb.Error) {\n\t// Send call through wrapped sender.\n\tba.Txn = &ts.Proto\n\t// For testing purposes, ts.UserPriority can be a negative value (see\n\t// MakePriority).\n\tif ts.UserPriority != 0 {\n\t\tba.UserPriority = ts.UserPriority\n\t}\n\n\tbr, pErr := ts.wrapped.Send(ts.Context, ba)\n\tif br != nil && br.Error != nil {\n\t\tpanic(roachpb.ErrorUnexpectedlySet(ts.wrapped, br))\n\t}\n\n\tif br != nil {\n\t\tfor _, encSp := range br.CollectedSpans {\n\t\t\tvar newSp basictracer.RawSpan\n\t\t\tif err := tracing.DecodeRawSpan(encSp, &newSp); err != nil {\n\t\t\t\treturn nil, roachpb.NewError(err)\n\t\t\t}\n\t\t\tts.CollectedSpans = append(ts.CollectedSpans, newSp)\n\t\t}\n\t}\n\t// Only successful requests can carry an updated Txn in their response\n\t// header. Any error (e.g. a restart) can have a Txn attached to them as\n\t// well; those update our local state in the same way for the next attempt.\n\t// The exception is if our transaction was aborted and needs to restart\n\t// from scratch, in which case we do just that.\n\tif pErr == nil {\n\t\tts.Proto.Update(br.Txn)\n\t\treturn br, nil\n\t} else if _, ok := pErr.GetDetail().(*roachpb.TransactionAbortedError); ok {\n\t\t// On Abort, reset the transaction so we start anew on restart.\n\t\tts.Proto = roachpb.Transaction{\n\t\t\tTxnMeta: enginepb.TxnMeta{\n\t\t\t\tIsolation: ts.Proto.Isolation,\n\t\t\t},\n\t\t\tName: ts.Proto.Name,\n\t\t}\n\t\t// Acts as a minimum priority on restart.\n\t\tif pErr.GetTxn() != nil {\n\t\t\tts.Proto.Priority = pErr.GetTxn().Priority\n\t\t}\n\t} else if pErr.TransactionRestart != roachpb.TransactionRestart_NONE {\n\t\tts.Proto.Update(pErr.GetTxn())\n\t}\n\treturn nil, pErr\n}", "title": "" }, { "docid": "fe12d4208f36dce3a2087acc533e1bfa", "score": "0.48845807", "text": "func (_MultiSig *MultiSigSession) SubmitTransaction(destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _MultiSig.Contract.SubmitTransaction(&_MultiSig.TransactOpts, destination, value, data)\n}", "title": "" }, { "docid": "4518796d714f9d3879d416483fd7d0a7", "score": "0.4881047", "text": "func (_AggregateUpdater *AggregateUpdaterTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _AggregateUpdater.Contract.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "0c413872fecf264e485e10f997f1f836", "score": "0.48810223", "text": "func (m *Mutations) Send(ctx context.Context, domainID string, update *pb.EntryUpdate) error {\n\tglog.Infof(\"mutationstorage: Send(%v, <mutation>)\", domainID)\n\tmData, err := proto.Marshal(update)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// TODO(gbelvin): Implement retry with backoff for retryable errors if\n\t// we get timestamp contention.\n\treturn m.send(ctx, domainID, mData, time.Now())\n}", "title": "" }, { "docid": "e691739b501f02d6cde7dac35634b54f", "score": "0.48774093", "text": "func verifyTransaction(inc *Transaction) {\n\tif verifySignature(inc) {\n\t\t\tledger.NewBlock.Transactions = append(ledger.NewBlock.Transactions, inc)\n\t\t\tledger.Transactions = append(ledger.Transactions, inc)\n\t\t\t//executeTransaction(inc)\n\t} else {\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "c520e218254f9d7bc0c6cdf5dc06955f", "score": "0.48714334", "text": "func (b *buffer) push(tx Transaction) {\n\tb.lock.Lock()\n\tdefer b.lock.Unlock()\n\tb.data = append(b.data, tx)\n}", "title": "" }, { "docid": "e0ab0bb300632efcab5bec82f4f77887", "score": "0.48704672", "text": "func (_AssetProxyOwner *AssetProxyOwnerSession) SubmitTransaction(destination common.Address, value *big.Int, data []byte) (*types.Transaction, error) {\n\treturn _AssetProxyOwner.Contract.SubmitTransaction(&_AssetProxyOwner.TransactOpts, destination, value, data)\n}", "title": "" }, { "docid": "6603a093bce46318c10d7f6b64da77ad", "score": "0.4869155", "text": "func (_Committee *CommitteeRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error {\n\treturn _Committee.Contract.CommitteeCaller.contract.Call(opts, result, method, params...)\n}", "title": "" }, { "docid": "4ce854e5f9fa6c6d831781e88377562a", "score": "0.48681042", "text": "func (_AggregateUpdater *AggregateUpdaterRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _AggregateUpdater.Contract.AggregateUpdaterTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "6d9c2b2175160647db6752aadf9efcde", "score": "0.48558423", "text": "func (_Token *TokenTransactor) Permit(opts *bind.TransactOpts, owner common.Address, spender common.Address, value *big.Int, deadline *big.Int, v uint8, r [32]byte, s [32]byte) (*types.Transaction, error) {\n\treturn _Token.contract.Transact(opts, \"permit\", owner, spender, value, deadline, v, r, s)\n}", "title": "" }, { "docid": "b22cfe4617380c9939d746846bd90452", "score": "0.48553672", "text": "func updateGroupbuyCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, groupbuyID, productID, currency, status, dividend, expiry_date, capital string\n var share float64\n\tvar target_amount float64\n var err error\n\n if len(args) != 9 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 9\")\n }\n objectType = \"groupbuy\"\n\tgroupbuyID = args[0]\n productID = args[1]\n currency = args[2]\n\ttarget_amount, err = strconv.ParseFloat(args[3], 64)\n share, err = strconv.ParseFloat(args[4], 64)\n if err != nil {\n return fmt.Errorf(\"4th argument must be a numeric string\")\n }\n status = args[5]\n\tdividend = args[6]\n\texpiry_date = args[7]\n\tcapital = args[8]\n\n // Create composite key\n groupbuyKeyString := objectType + \"_\" + productID \n groupbuyKey, err := stub.CreateCompositeKey(groupbuyKeyString, []string{objectType, groupbuyID})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateGroupbuyCalled \" + groupbuyKeyString)\n\n // Get state with composite key\n groupbuyTemp := &Groupbuy{}\n groupbuyAsBytes, err := stub.GetState(groupbuyKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + groupbuyKeyString)\n } else if groupbuyAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n groupbuyTemp = &Groupbuy{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n\t\t\tGroupbuyID : groupbuyID,\n ProductID : productID,\n Currency : currency,\n\t\t\tTarget_amount : 0, \n Share : 0,\n Status : status,\n\t\t\tDividend : dividend,\n\t\t\tExpiry_date : expiry_date,\n\t\t\tCapital : capital,\n }\n fmt.Println(\"Groupbuy created: \" + groupbuyKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(groupbuyAsBytes, &groupbuyTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n // Add the share. (quantity can be a negative number), update timestamp and status\n if groupbuyTemp.Share + share >= 0 {\n groupbuyTemp.Share += share\n groupbuyTemp.Timestamp = time.Now().String()\n groupbuyTemp.Status = status\n } else {\n return fmt.Errorf(\"Not sufficient shares for \" + productID)\n }\n\n // Add the target_amount. (quantity can be a negative number), update timestamp and status\n if groupbuyTemp.Target_amount + target_amount >= 0 {\n groupbuyTemp.Target_amount += target_amount\n groupbuyTemp.Timestamp = time.Now().String()\n groupbuyTemp.Status = status\n } else {\n return fmt.Errorf(\"Not sufficient shares for \" + productID)\n }\n\n\n groupbuyJSONasBytes, err := json.Marshal(groupbuyTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n // Save asset to state\n err = stub.PutState(groupbuyKey, groupbuyJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end updateGroupbuyCalled (success)\")\n fmt.Println(\"==============================\")\n return nil\n}", "title": "" }, { "docid": "539d66e85d83f9e80897343a7c3a7961", "score": "0.48486948", "text": "func (view *UtxoViewpoint) commit() {\n\tfor outpoint, entry := range view.entries {\n\t\tif entry == nil || (entry.isModified() && entry.IsSpent()) {\n\t\t\tdelete(view.entries, outpoint)\n\t\t\tcontinue\n\t\t}\n\n\t\tentry.packedFlags ^= tfModified\n\t}\n}", "title": "" }, { "docid": "e01843bb332f19b7d98f5ef7d3bacbf4", "score": "0.48477894", "text": "func InsertTransaction(transaction Models.Transaction)(bool, Models.Transaction, error){\n\tvar storedTransaction Models.Transaction\n\ttransaction.Type = \"TRANSACTION\"\n\ttransaction.DType = []string{\"Transaction\"}\n\tctx := context.TODO()\n\tdGraph, cancel := Database.GetDgraphClient()\n\tdefer cancel()\n\top := &api.Operation{}\n\top.Schema = `\n\t\tid: string @index(exact) .\n\t\tbuyerId: string @index(exact) .\n\t\tip: string @index(exact) .\n\t\tdevice: string .\n\t\tproductIds: [string] .\n\t\ttype: string @index(exact) .\n\n\t\ttype Transaction {\n\t\t\tid: string\n\t\t\tbuyerId: string\n\t\t\tip: string\n\t\t\tdevice: string\n\t\t\tproductIds: [string]\n\t\t\ttype: string\n\t\t}\n\t`\n\tif err := dGraph.Alter(ctx, op); err != nil {\n\t\tlog.Println(\"Error alter DGraph, Error: \", err)\n\t\treturn false, storedTransaction, err\n\t}\n\tmu := &api.Mutation{\n\t\tCommitNow: true,\n\t}\n\n\ttb, err := json.Marshal(transaction)\n\tif err != nil {\n\t\tlog.Println(\"failed to marshal\", err)\n\t\treturn false, storedTransaction, err\n\t}\n\tmu.SetJson = tb\n\t_, err = dGraph.NewTxn().Mutate(ctx, mu)\n\tif err != nil {\n\t\tlog.Println(\"failed to marshal\", err)\n\t\treturn false, storedTransaction, err\n\t}\n\tvariables := map[string]string{\"$id\": transaction.Id}\n\tq := `query Transaction($id: string){\n\t\ttransaction(func: eq(id, $id)) {\n\t\t\tid\n\t\t\tbuyerId\n\t\t\tip\n\t\t\tdevice\n\t\t\tproductIds\n\t\t}\n\t}`\n\tresp, err := dGraph.NewTxn().QueryWithVars(ctx, q, variables)\n\tif err != nil {\n\t\tlog.Println(\"Error getting the transaction, error: \", err)\n\t\treturn false, storedTransaction, err\n\t}\n\ttype Root struct {\n\t\tTransaction []Models.Transaction `json:\"transaction\"`\n\t}\n\tvar r Root\n\terr = json.Unmarshal(resp.Json, &r)\n\tif err != nil {\n\t\tlog.Println(\"Error unmarshall error: \", err)\n\t\treturn false, storedTransaction, err\n\t}\n\tstoredTransaction = r.Transaction[0]\n\treturn true, storedTransaction, nil\n}", "title": "" }, { "docid": "fbb0f786670537289ac7bf82c5229796", "score": "0.4837239", "text": "func (bc *Blockchain) Commit(db *db.TinyDB) error {\n\n}", "title": "" }, { "docid": "d32cf0ac2fb8d3c1f6ce6eaf99216505", "score": "0.4837131", "text": "func updateTransaction_takeoverCalled(stub shim.ChaincodeStubInterface, args []string) error {\n var objectType, transactionID, groupbuyID, clientID_sell, clientID_buy, status string\n\tvar err error\n if len(args) != 5 {\n return fmt.Errorf(\"Incorrect number of arguments. Expecting 5\")\n }\n objectType = \"transaction_takeover\"\n\ttransactionID = args[0]\n\tgroupbuyID = args[1]\n\tclientID_sell = args[2]\n\tclientID_buy = args[3]\n\tstatus = args[4] \n // Create composite key\n transaction_takeoverKeyString := objectType + \"_\" + transactionID + \"_\" + groupbuyID + \"_\" + clientID_sell + \"_\" + clientID_buy + \"_\" + status\n transaction_takeoverKey, err := stub.CreateCompositeKey(transaction_takeoverKeyString, []string{objectType, transactionID, groupbuyID, clientID_sell, clientID_buy, status})\n if err != nil {\n fmt.Errorf(err.Error())\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- start updateTransaction_takeoverCalled \" + transaction_takeoverKeyString)\n\n // Get state with composite key\n transaction_takeoverTemp := &Transaction_takeover{}\n transaction_takeoverAsBytes, err := stub.GetState(transaction_takeoverKey)\n if err != nil {\n return fmt.Errorf(\"Failed to get state for :\" + transaction_takeoverKeyString)\n } else if transaction_takeoverAsBytes == nil {\n // We don't need to check if client has joined groupbuy already.\n // But if client doesn't have existing groupbuy, create one\n transaction_takeoverTemp = &Transaction_takeover{\n ObjectType : objectType,\n Timestamp : time.Now().String(),\n TransactionID : transactionID,\n GroupbuyID : groupbuyID,\n ClientID_sell : clientID_sell,\n ClientID_buy : clientID_buy,\n\t\t\tStatus : status,\n }\n fmt.Println(\"Transaction_takeover created: \" + transaction_takeoverKeyString)\n } else {\n // Unmarshal client and update value\n err = json.Unmarshal(transaction_takeoverAsBytes, &transaction_takeoverTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n }\n\n transaction_takeoverJSONasBytes, err := json.Marshal(transaction_takeoverTemp)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n\n // Save asset to state\n err = stub.PutState(transaction_takeoverKey, transaction_takeoverJSONasBytes)\n if err != nil {\n return fmt.Errorf(err.Error())\n }\n fmt.Println(\"- end Called (success)\")\n fmt.Println(\"==============================\")\n return nil\n\n\n}", "title": "" }, { "docid": "1b0ac2ff9796dc29008c468d6b291805", "score": "0.4835314", "text": "func (agmr *AppGroupMongoRepository) CommitTransaction() {}", "title": "" }, { "docid": "a0b01bd2bfb87e4049d9705ac7807c07", "score": "0.48266464", "text": "func (t * TsohueChainCode) groupbuyUnblocked(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n var err error\n if len(args) != 1 {\n return shim.Error(\"Incorrect number of arguments. Expecting 1\")\n }\n\ttransactionID := args[0]\n\tstatus_contract := \"join_amount\"\n\n\n\n\t//Update Transaction_blocked\n //query Transaction_blocked\n queryString := fmt.Sprintf(\"{\\\"selector\\\":{\\\"ObjectType\\\":\\\"transaction_blocked\\\",\\\"transactionID\\\":\\\"%s\\\"}}\", transactionID)\n // Get Groupbuy results from queryString\n resultsIterator, err := stub.GetQueryResult(queryString)\n if err != nil {\n return shim.Error(err.Error())\n }\n defer resultsIterator.Close()\n // Loop through all records of Groupbuy,\n for resultsIterator.HasNext() {\n responseRange, err := resultsIterator.Next()\n if err != nil {\n return shim.Error(err.Error())\n }\n transaction_blockedTemp := &Transaction_blocked{}\n transaction_blockedKey := responseRange.Key\n\n // Unmarshal responseRange.Value to JSON\n err = json.Unmarshal(responseRange.Value, &transaction_blockedTemp)\n if err != nil {\n return shim.Error(err.Error())\n }\n\n // Update groupbuyTemup\n transaction_blockedTemp.Status = \"Unblocked\"\n transaction_blockedTemp.Timestamp = time.Now().String()\n\n // Marshal and Put groupbuy to state\n transaction_blockedJSONasBytes, _ := json.Marshal(transaction_blockedTemp)\n err = stub.PutState(transaction_blockedKey, transaction_blockedJSONasBytes)\n if err != nil {\n return shim.Error(err.Error())\n\n }\n // Add to Transaction_contract\n transaction_contractArgs := []string{transactionID, transaction_blockedTemp.ClientID, transaction_blockedTemp.GroupbuyID, transaction_blockedTemp.Currency, fmt.Sprintf(\"%f\", transaction_blockedTemp.Amount), status_contract}\n err = updateTransaction_contractCalled(stub, transaction_contractArgs)\n if err != nil {\n return shim.Error(err.Error())\n }\n\t\t// Add to Transaction_contract_record\n transaction_contract_recordArgs := []string{transactionID, transaction_blockedTemp.ClientID, transaction_blockedTemp.GroupbuyID, transaction_blockedTemp.Currency, fmt.Sprintf(\"%f\", transaction_blockedTemp.Amount), status_contract}\n err = updateTransaction_contract_recordCalled(stub, transaction_contract_recordArgs)\n if err != nil {\n return shim.Error(err.Error())\n }\n\t\t// Add to Transaction_blocked_record\n transaction_blocked_recordArgs := []string{transactionID, transaction_blockedTemp.ClientID, transaction_blockedTemp.GroupbuyID, transaction_blockedTemp.Currency, fmt.Sprintf(\"%f\",transaction_blockedTemp.Amount), transaction_blockedTemp.Status}\n err = updateTransaction_blocked_recordCalled(stub, transaction_blocked_recordArgs)\n if err != nil {\n return shim.Error(err.Error())\n }\n\n }\n\n\n\n fmt.Println(\"Groupbuy leaved success!\")\n return shim.Success(nil)\n}", "title": "" }, { "docid": "41d00df1c72ade90c4eec3c2aeb80fe0", "score": "0.48173", "text": "func (op *obcClassic) execute(txRaw []byte) {\n\tif err := op.verify(txRaw); err != nil {\n\t\tlogger.Error(\"Request in transaction did not verify: %s\", err)\n\t\treturn\n\t}\n\n\ttx := &pb.Transaction{}\n\terr := proto.Unmarshal(txRaw, tx)\n\tif err != nil {\n\t\tlogger.Error(\"Unable to unmarshal transaction: %v\", err)\n\t\treturn\n\t}\n\n\ttxs := []*pb.Transaction{tx}\n\ttxBatchID := base64.StdEncoding.EncodeToString(util.ComputeCryptoHash(txRaw))\n\n\tif err := op.cpi.BeginTxBatch(txBatchID); err != nil {\n\t\tlogger.Error(\"Failed to begin transaction %s: %v\", txBatchID, err)\n\t\treturn\n\t}\n\n\t_, errs := op.cpi.ExecTXs(txs)\n\tif errs[len(txs)] != nil {\n\t\tlogger.Error(\"Fail to execute transaction %s: %v\", txBatchID, errs)\n\t\tif err = op.cpi.RollbackTxBatch(txBatchID); err != nil {\n\t\t\tpanic(fmt.Errorf(\"Unable to rollback transaction %s: %v\", txBatchID, err))\n\t\t}\n\t\treturn\n\t}\n\n\tif err = op.cpi.CommitTxBatch(txBatchID, txs, nil); err != nil {\n\t\tlogger.Error(\"Failed to commit transaction %s to the ledger: %v\", txBatchID, err)\n\t\tif err = op.cpi.RollbackTxBatch(txBatchID); err != nil {\n\t\t\tpanic(fmt.Errorf(\"Unable to rollback transaction %s: %v\", txBatchID, err))\n\t\t}\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "538dadd62ca693be6fa8b793d5f3d0fa", "score": "0.48150888", "text": "func (method *TxCommitOk) Sync() bool {\n\treturn true\n}", "title": "" }, { "docid": "11a12ba54abf09817f5085a2f890f125", "score": "0.48143932", "text": "func (_SushiStaking *SushiStakingTransactor) Set(opts *bind.TransactOpts, _pid *big.Int, _allocPoint *big.Int, _withUpdate bool) (*types.Transaction, error) {\n\treturn _SushiStaking.contract.Transact(opts, \"set\", _pid, _allocPoint, _withUpdate)\n}", "title": "" }, { "docid": "acaa42f7732e2dc878ad89ca91f1b2cb", "score": "0.48002973", "text": "func (_Multicall *MulticallRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Multicall.Contract.MulticallTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "149ba7fd84e3cd6c9bc23274646007d8", "score": "0.47995144", "text": "func (_OffchainAggregator *OffchainAggregatorTransactor) Transmit(opts *bind.TransactOpts, _report []byte, _rs [][32]byte, _ss [][32]byte, _rawVs [32]byte) (*types.Transaction, error) {\n\treturn _OffchainAggregator.contract.Transact(opts, \"transmit\", _report, _rs, _ss, _rawVs)\n}", "title": "" }, { "docid": "6eccf9ea9d983715d35253427216b71e", "score": "0.47983593", "text": "func submit(loop *Loop, work *Work) {\n\twork.id = uuid()\n\twork.loop = loop\n\tloop.wq = append(loop.wq, work)\n\n\tgo func() {\n\t\twork.results = work.work.Call(work.args)\n\t\t// pop up work into done queue\n\t\tloop.mutex.Lock()\n\t\twork.loop.wq = remove(work.loop.wq, work)\n\t\twork.loop.dq = append(work.loop.dq, work)\n\t\tloop.mutex.Unlock()\n\t}()\n\n}", "title": "" }, { "docid": "97f59041ff45430d34a73c81ff1632d3", "score": "0.47929335", "text": "func (*RecipientOperator) Recipient() {}", "title": "" }, { "docid": "f6f1d34bcf3649a29bec7c48784c5f4d", "score": "0.4787619", "text": "func (_Owned *OwnedRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Owned.Contract.OwnedTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "e8af043d68913ea797b84945b08fd3e6", "score": "0.47794572", "text": "func (_Ballot *BallotRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Ballot.Contract.BallotTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "265bd7a44877e22c620d37a9d4bf382a", "score": "0.4778611", "text": "func (t *TxnTracker) commit(txn *Txn, applyTxnOpsFunc func(values map[string]proto.Message), values map[string]proto.Message) error {\n\tvar err error\n\tt.lock.Lock()\n\tdefer t.lock.Unlock()\n\tif t.onCommit != nil {\n\t\terr = t.onCommit(txn, t.LatestRevisions)\n\t}\n\tapplyTxnOpsFunc(values)\n\tdelete(t.PendingTxns, txn)\n\tt.CommittedTxns = append(t.CommittedTxns, txn)\n\treturn err\n}", "title": "" }, { "docid": "6b327e9681a0b70dba1d7c2c40a7def4", "score": "0.47716928", "text": "func (h *harness) sendTransaction(clientIdx int, senderPublicKey []byte, senderPrivateKey []byte, contractName string, methodName string, args ...interface{}) (response *codec.SendTransactionResponse, txId string, err error) {\n\tclient := h.clients[clientIdx]\n\tpayload, txId, err := client.CreateTransaction(senderPublicKey, senderPrivateKey, contractName, methodName, args...)\n\tif err != nil {\n\t\treturn nil, txId, err\n\t}\n\n\tvar sendTransaction func(rawTransaction []byte) (response *codec.SendTransactionResponse, err error)\n\tif h.config.isAsync {\n\t\tsendTransaction = client.SendTransactionAsync\n\t} else {\n\t\tsendTransaction = client.SendTransaction\n\t}\n\tresponse, err = sendTransaction(payload)\n\treturn response, txId, err\n}", "title": "" }, { "docid": "a191ef79aa0650c1ce580d6a5d1a8f3f", "score": "0.47691578", "text": "func (_Committee *CommitteeCallerRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error {\n\treturn _Committee.Contract.contract.Call(opts, result, method, params...)\n}", "title": "" }, { "docid": "418f11d74ce9a30766c4d8626f023338", "score": "0.476201", "text": "func executeTransaction(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n fmt.Println(\"************************************************\")\n fmt.Println(\"---------- IN EXECUTE TRANSACTION BANK----------\")\n\n // RETURN ERROR IF ARGS IS NOT 4 IN NUMBER\n if len(args) != 4 {\n fmt.Println(\"**************************\")\n fmt.Println(\"Too few argments... Need 4\")\n fmt.Println(\"**************************\")\n return shim.Error(\"Invalid argument count. Expecting 4.\")\n }\n\n // SET ARGUMENTS INTO LOCAL VARIABLES\n _bankAC := args[0]\n _userName := args[1]\n _transactionType := args[2]\n _amount, _ := strconv.ParseFloat(args[3], 64)\n\n // PREPARE THE KEY TO GET INVESTOR BANK MASTER\n _bankKey, err := stub.CreateCompositeKey(prefixBank, []string{_userName})\n // CHECK FOR ERROR IN CREATING COMPOSITE KEY\n if err != nil {\n return shim.Error(err.Error())\n }\n fmt.Println(\"executeTransaction: set arguments and prepare key completed\")\n\n // USE THE KEY TO RETRIEVE BANK MASTER\n _bankMasterAsBytesRead, err := stub.GetState(_bankKey)\n if err != nil {\n return shim.Error(err.Error())\n }\n _bankMaster := bankMaster{}\n\t err = json.Unmarshal(_bankMasterAsBytesRead, &_bankMaster)\n\t if err != nil {\n\t\t return shim.Error(err.Error())\n }\n fmt.Println(\"executeTransaction: retrieve bank master completed\")\n\n // READY TO EXECUTE TRANSACTION\n _balance := _bankMaster.balance\n if _transactionType == \"DEBIT\" {\n if (_balance < _amount) {\n fmt.Println(\"Not enought balance\")\n return shim.Error(err.Error())\n }\n _balance = _balance - _amount\n fmt.Println(\"executeTransaction: debit completed\")\n } else if _transactionType == \"CREDIT\" {\n _balance = _balance + _amount\n fmt.Println(\"executeTransaction: credit completed\")\n }\n\n // NOW UPDATE BANK MASTER RECORD\n _bankMasterUpdate := bankMaster {\n userName: _userName,\n bankAC: _bankAC,\n balance: _balance,\n }\n\n // MARSHAL THE BANK MASTER RECORD\n _bankMasterAsBytesWrite, err := json.Marshal(_bankMasterUpdate)\n // CHECK FOR ERROR IN MARSHALING\n if err != nil {\n return shim.Error(err.Error())\n }\n\n // NOW WRITE THE BANK MASTER RECORD\n err = stub.PutState(_bankKey, _bankMasterAsBytesWrite)\n // CHECK FOR ERROR\n if err != nil {\n return shim.Error(err.Error())\n }\n fmt.Println(\"executeTransaction: update bank master completed\")\n\n // NOW PREPARE BANK TRANSACTION RECORD TO WRITE\n _currentTime := time.Now()\n _currentTimeStr := _currentTime.String()\n _bankTransaction := bankTransaction {\n transUUID: _currentTimeStr,\n userName: _userName,\n bankAC: _bankAC,\n transDate: _currentTime,\n transAmount: _amount,\n balance: _bankMaster.balance,\n }\n // PREPARE THE KEY TO WRITE BANK TRANSACTION\n _bankTransactionKey, err := stub.CreateCompositeKey(prefixBank, []string{_currentTimeStr})\n // CHECK FOR ERROR IN CREATING COMPOSITE KEY\n if err != nil {\n return shim.Error(err.Error())\n }\n fmt.Println(\"executeTransaction: prepare transaction key completed\")\n\n // MARSHAL THE BANK TRANSACTION RECORD\n _bankTransactionAsBytes, err := json.Marshal(_bankTransaction)\n // CHECK FOR ERROR IN MARSHALING\n if err != nil {\n return shim.Error(err.Error())\n }\n\n // NOW WRITE THE BANK TRANSACTION RECORD\n err = stub.PutState(_bankTransactionKey, _bankTransactionAsBytes)\n // CHECK FOR ERROR\n if err != nil {\n return shim.Error(err.Error())\n }\n fmt.Println(\"executeTransaction: writing bank transaction completed\")\n\n fmt.Println(\"---------- OUT EXECUTE TRANSACTION BANK----------\")\n fmt.Println(\"*************************************************\")\n\n // RETURN SUCCESS\n return shim.Success(_bankTransactionAsBytes)\n}", "title": "" }, { "docid": "97524f81cde6da0a370115515b71f711", "score": "0.4760668", "text": "func (c *Coordinator) Transaction(cmds *raftpb.RaftCommand) (*raftpb.RaftCommand, error) {\n\n\tc.log.Infof(\"Processing Transaction\")\n\ttxid := xid.New().String()\n\tgt := c.newGlobalTransaction(txid, cmds)\n\tgt.StartTime = time.Now().UnixNano()\n\treadOnly := isReadOnly(gt.Cmds.Commands)\n\tnumShards := len(gt.ShardToCommands)\n\tresultCmds := &raftpb.RaftCommand{}\n\n\tc.log.Infof(\"Starting prepare phase for txid: [%s]\", txid)\n\t// Prepare Phase\n\t// Send prepare messages to all the shards involved in transaction.\n\t// This is a synchronous operation atm. It can be asynchronous\n\t// TODO: go routine SendMessageToShard\n\tvar prepareResponses int\n\tvar readOnlyErr error\n\tif readOnly {\n\t\tc.log.Infof(\"[txid %s] is read-only\", txid)\n\t}\n\tfor _, shardops := range gt.ShardToCommands {\n\t\tshardops.ReadOnly = readOnly\n\t\tcmds, err := c.SendMessageToShard(shardops)\n\t\tif err == nil {\n\t\t\tprepareResponses++\n\t\t} else {\n\t\t\tc.log.Infof(\"[txid %s] failed at %v with %s\", txid, shardops, err.Error())\n\t\t}\n\t\tif readOnly {\n\t\t\tresultCmds.Commands = append(resultCmds.Commands, cmds...)\n\t\t\tif err != nil {\n\t\t\t\treadOnlyErr = err\n\t\t\t}\n\t\t}\n\t}\n\tif readOnly {\n\t\tc.log.Infof(\"[txid: %s] read-only transaction, returning after prepare phase\", txid)\n\t\treturn resultCmds, readOnlyErr\n\t}\n\n\tc.log.Infof(\"[txid %s] Prepared sent\", txid)\n\n\tif prepareResponses != numShards {\n\t\t// send abort and report error.\n\t\t// abort will help release the locks\n\t\t// on the shards.\n\t\tc.log.Infof(\"[txid %s] Aborting\\n\", txid)\n\t\tgt.Phase = common.Abort\n\t\t// replicate via raft\n\t\tif err := c.Replicate(txid, common.SET, gt); err != nil {\n\t\t\tc.log.Infof(\"[txid: %s] failed to set Abort state: %s\", txid, err)\n\t\t}\n\n\t\tvar err error\n\t\tvar abortMessages int\n\t\tfor id, shardops := range gt.ShardToCommands {\n\t\t\tgt.ShardToCommands[id].Phase = common.Abort\n\t\t\tshardops.Phase = common.Abort\n\t\t\t// best effort\n\t\t\t_, err = c.SendMessageToShard(shardops)\n\t\t\tif err != nil {\n\t\t\t\tc.log.Infof(\"[txid %s] failed at %v with %s\", txid, shardops, err.Error())\n\t\t\t} else {\n\t\t\t\tabortMessages++\n\t\t\t}\n\t\t}\n\n\t\tif abortMessages == numShards {\n\t\t\tgt.Phase = common.Aborted\n\t\t\t// replicate via raft\n\t\t\tif err := c.Replicate(txid, common.SET, gt); err != nil {\n\t\t\t\tc.log.Infof(\"[txid: %s] failed to set Aborted state: %s\", txid, err)\n\t\t\t}\n\t\t\tc.log.Infof(\"[txid: %s] Aborted Successfully\", txid)\n\t\t}\n\t\treturn nil, err\n\t}\n\n\tc.log.Infof(\"[txid: %s] Prepared recieved: %d Prepared Expected: %d\", txid, prepareResponses, numShards)\n\n\t// c.log the prepared phase and replicate it\n\tgt.Phase = common.Prepared\n\tfor id := range gt.ShardToCommands {\n\t\tgt.ShardToCommands[id].Phase = common.Prepared\n\t}\n\tif err := c.Replicate(txid, common.SET, gt); err != nil {\n\t\tc.log.Errorf(\"[txid: %s] failed to set Prepared state: %s\", txid, err)\n\t\treturn nil, fmt.Errorf(\"unable to complete transaction: %s\", err)\n\t}\n\n\tif c.failmode == FailCommit {\n\t\tc.log.Infof(\"Simulating node failure after prepared phase, kill the node\")\n\t\ttime.Sleep(5 * time.Minute)\n\t\treturn nil, fmt.Errorf(\"transaction unsuccesfull\")\n\t}\n\t//\n\t// Comment: In general, there is nothing much that can be\n\t// done on raft failures. A transaction recovery go-routine\n\t// that runs periodically and does the following\n\t// 1. if transaction is not in commit state (meaning commit message not sent\n\t// to shards), after t seconds (some constant), send an abort and release locks\n\t// 2. if transaction is in commit state, keep trying to send commit and complete\n\t// the transaction.\n\n\t// Commit\n\tvar commitResponses int\n\tfor id, shardOps := range gt.ShardToCommands {\n\t\t// Replicate via Raft\n\n\t\tgt.ShardToCommands[id].Phase = common.Commit\n\t\tshardOps.Phase = common.Commit\n\t\tgt.Phase = common.Commit\n\t\tif err := c.Replicate(txid, common.SET, gt); err != nil {\n\t\t\tc.log.Errorf(\"[txid: %s] failed to set commit state: %s\", txid, err)\n\t\t\treturn nil, fmt.Errorf(\"failed to replicate state: %s\", err)\n\t\t}\n\n\t\tif _, err := c.SendMessageToShard(shardOps); err == nil {\n\t\t\tcommitResponses++\n\t\t}\n\t}\n\n\t// If all commits are not received, it will tried again\n\t// in recovery routine\n\tif commitResponses == numShards {\n\t\tgt.Phase = common.Committed\n\t\tif err := c.Replicate(txid, common.SET, gt); err != nil {\n\t\t\tc.log.Infof(\"[txid: %s] failed to set commited state: %s\", txid, err)\n\t\t\t// Note: there is no need to return with an error here, At this point, the cohorts have\n\t\t\t// already got the commit message. From the client perspective, this transaction\n\t\t\t// is successfull.\n\t\t}\n\t}\n\n\t// wait for all acks since client should complete replication as\n\t// well. not required.\n\n\tc.log.Infof(\"[txid :%s] Commit Ack recieved: %d Ack Expected: %d\", txid, commitResponses, numShards)\n\n\treturn resultCmds, nil\n}", "title": "" }, { "docid": "e6747f90c592f9180d2b63d2d3e429e9", "score": "0.47587365", "text": "func (_Gauge *GaugeTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Gauge.Contract.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "5ef6689bd2a42a86a2d527f43d7dca7f", "score": "0.47570568", "text": "func (t * TsohueChainCode) groupbuyUnblocked_failed(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n var err error\n if len(args) != 1 {\n return shim.Error(\"Incorrect number of arguments. Expecting 1\")\n }\n transactionID := args[0]\n\n\n\n // Update to Transaction_blocked\n //query Transaction_blocked\n queryString := fmt.Sprintf(\"{\\\"selector\\\":{\\\"ObjectType\\\":\\\"transaction_blocked\\\",\\\"TransactionID\\\":\\\"%s\\\"}}\", transactionID)\n // Get Groupbuy results from queryString\n resultsIterator, err := stub.GetQueryResult(queryString)\n if err != nil {\n return shim.Error(err.Error())\n }\n defer resultsIterator.Close()\n // Loop through all records of Groupbuy,\n for resultsIterator.HasNext() {\n responseRange, err := resultsIterator.Next()\n if err != nil {\n return shim.Error(err.Error())\n }\n transaction_blockedTemp := &Transaction_blocked{}\n transaction_blockedKey := responseRange.Key\n\n // Unmarshal responseRange.Value to JSON\n err = json.Unmarshal(responseRange.Value, &transaction_blockedTemp)\n if err != nil {\n return shim.Error(err.Error())\n }\n\n // Update groupbuyTemup\n transaction_blockedTemp.Status = \"blocked\"\n transaction_blockedTemp.Timestamp = time.Now().String()\n\n // Marshal and Put groupbuy to state\n transaction_blockedJSONasBytes, _ := json.Marshal(transaction_blockedTemp)\n err = stub.PutState(transaction_blockedKey, transaction_blockedJSONasBytes)\n if err != nil {\n return shim.Error(err.Error())\n\n }\n\t\t// Add to Transaction_blocked_record\n transaction_blocked_recordArgs := []string{transactionID, transaction_blockedTemp.ClientID, transaction_blockedTemp.GroupbuyID, transaction_blockedTemp.Currency, fmt.Sprintf(\"%f\",transaction_blockedTemp.Amount), transaction_blockedTemp.Status}\n err = updateTransaction_blocked_recordCalled(stub, transaction_blocked_recordArgs)\n if err != nil {\n return shim.Error(err.Error())\n }\n\n }\n\n\n\n // Update to Transaction_contract\n //query Transaction_contract\n queryString = fmt.Sprintf(\"{\\\"selector\\\":{\\\"ObjectType\\\":\\\"transaction_contract\\\",\\\"TransactionID\\\":\\\"%s\\\"}}\", transactionID)\n // Get Groupbuy results from queryString\n resultsIterator, err = stub.GetQueryResult(queryString)\n if err != nil {\n return shim.Error(err.Error())\n }\n defer resultsIterator.Close()\n // Loop through all records of Groupbuy,\n for resultsIterator.HasNext() {\n responseRange, err := resultsIterator.Next()\n if err != nil {\n return shim.Error(err.Error())\n }\n transaction_contractTemp := &Transaction_contract{}\n transaction_contractKey := responseRange.Key\n\n // Unmarshal responseRange.Value to JSON\n err = json.Unmarshal(responseRange.Value, &transaction_contractTemp)\n if err != nil {\n return shim.Error(err.Error())\n }\n\n // Update groupbuyTemup\n transaction_contractTemp.Status = \"fail\"\n transaction_contractTemp.Timestamp = time.Now().String()\n\n // Marshal and Put groupbuy to state\n transaction_contractJSONasBytes, _ := json.Marshal(transaction_contractTemp)\n err = stub.PutState(transaction_contractKey, transaction_contractJSONasBytes)\n if err != nil {\n return shim.Error(err.Error())\n\n }\n // Add to Transaction_contract_record\n transaction_contract_recordArgs := []string{transactionID, transaction_contractTemp.ClientID, transaction_contractTemp.GroupbuyID, transaction_contractTemp.Currency, fmt.Sprintf(\"%f\", transaction_contractTemp.Amount), transaction_contractTemp.Status}\n err = updateTransaction_contract_recordCalled(stub, transaction_contract_recordArgs)\n if err != nil {\n return shim.Error(err.Error())\n }\n\n\n\n }\n\n\n\n fmt.Println(\"Groupbuy leaved success!\")\n return shim.Success(nil)\n}", "title": "" }, { "docid": "4ff3f4e2be4576bd1ff29ca0e5b0ba1c", "score": "0.475615", "text": "func (w *Worker) sendTransactions() error {\n\tok, err := w.db.IsEmpty()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif ok {\n\t\treturn nil\n\t}\n\n\tb, err := w.db.GetPendingBatch()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif b.CheckAvailability() {\n\t\t// TODO send transaction\n\n\t\tb = model.NewBatch(b.ID, w.minValue, w.duration)\n\t\treturn w.db.SaveBatch(b)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "eff89445dc06a7be959022fcb2bee70c", "score": "0.47548416", "text": "func (s *client) submit(metricType string, stat interface{}, value int64, sign bool, sval string, rate float32) error {\n\tif rate < 1 && rand.Float32() > rate {\n\t\treturn nil\n\t}\n\n\tvar buf *bytes.Buffer\n\tswitch m := stat.(type) {\n\tcase string:\n\t\tbuf = &bytes.Buffer{}\n\t\tif s.prefix != \"\" {\n\t\t\tbuf.WriteString(s.prefix)\n\t\t\tbuf.WriteString(\".\")\n\t\t}\n\t\tbuf.WriteString(escape(m))\n\tcase *metric:\n\t\tbuf = m.b\n\tdefault:\n\t\treturn fmt.Errorf(\"Unexpected argument type: %T\", stat)\n\t}\n\n\tbuf.WriteByte(':')\n\tif sval != \"\" {\n\t\tbuf.WriteString(escape(sval))\n\t} else {\n\t\tif sign {\n\t\t\tif value >= 0 {\n\t\t\t\tbuf.WriteByte('+')\n\t\t\t}\n\t\t}\n\t\tbuf.WriteString(strconv.FormatInt(value, 10))\n\t}\n\n\tbuf.WriteByte('|')\n\tbuf.WriteString(metricType)\n\n\tif rate < 1 {\n\t\tbuf.WriteString(\"|@\")\n\t\tbuf.WriteString(strconv.FormatFloat(float64(rate), 'f', -1, 32))\n\t}\n\n\t_, err := buf.WriteTo(s.s)\n\treturn err\n}", "title": "" }, { "docid": "579498abc1bb5c8c62d25b1feeeb65fa", "score": "0.47525504", "text": "func (tb *TokenBucket) Call(\n\tctx context.Context,\n\tprefix string,\n\tcapacity int64,\n\treplenishRate float64,\n\tnumTokens int,\n) (bool, int64, error) {\n\tclient := withClientContext(ctx, tb.c)\n\n\tvar (\n\t\tnow int64\n\t\tnowUs int64\n\t)\n\tif tb.testing {\n\t\tnow, nowUs = timeToRedisPair(tb.timeSource.Now())\n\t}\n\n\targs := []interface{}{\n\t\treplenishRate,\n\t\tcapacity,\n\t\tnumTokens,\n\n\t\t// The script allows us to inject the current time for testing,\n\t\t// but it's superseded by Redis's time in production to protect\n\t\t// against clock drift.\n\t\tnow,\n\t\tnowUs,\n\t\ttb.testing,\n\t}\n\n\tresp := updateTokenBucketScript.Run(\n\t\tclient,\n\t\ttokenBucketKeys(prefix),\n\t\targs...,\n\t)\n\tresult, err := resp.Result()\n\tif err != nil {\n\t\treturn false, 0, err\n\t}\n\n\treturnVals, ok := result.([]interface{})\n\tif !ok {\n\t\treturn false, 0, fmt.Errorf(\"redistb: invalid return type %T (expected []interface{})\", result)\n\t}\n\n\t// The script returns:\n\t// allowed Whether the operation was allowed\n\t// remaining The remaining tokens in the bucket\n\t// now_new The script's view of the current time\n\t// now_new_us The script's view of the current time (microseconds)\n\t//\n\t// We don't use the last two arguments here.\n\n\t// Deserializing turns Lua 'true' into '1', and 'false' into 'nil'\n\tvar allowed bool\n\tif returnVals[0] == nil {\n\t\tallowed = false\n\t} else if i, ok := returnVals[0].(int64); ok {\n\t\tallowed = i == 1\n\t} else {\n\t\treturn false, 0, fmt.Errorf(\"redistb: invalid 'allowed' type %T\", returnVals[0])\n\t}\n\n\tremaining := returnVals[1].(int64)\n\treturn allowed, remaining, nil\n}", "title": "" }, { "docid": "038e1c762a5c964bdbfdd7b4b21de37a", "score": "0.4750508", "text": "func GetNodeTransactions(){\r\n\t\r\n}", "title": "" }, { "docid": "ae2f8ee02a08e6d3da1a9c66aae0f7f4", "score": "0.4749152", "text": "func (e *EVM) SubmitTransaction(ctx context.Context, tx *ethtypes.EthTxArgs) ethtypes.EthHash {\n\tsigned, err := tx.ToRlpSignedMsg()\n\trequire.NoError(e.t, err)\n\n\thash, err := e.EthSendRawTransaction(ctx, signed)\n\trequire.NoError(e.t, err)\n\n\treturn hash\n}", "title": "" }, { "docid": "c874db4218ffd905603a758d6e6de351", "score": "0.4744464", "text": "func (_ReentrancyGuard *ReentrancyGuardRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _ReentrancyGuard.Contract.ReentrancyGuardTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "2ed0da3fa1edb511bbc46692efdae594", "score": "0.47433978", "text": "func (method *TxCommit) Sync() bool {\n\treturn true\n}", "title": "" }, { "docid": "df81cc5f28555efd773ca942b95965aa", "score": "0.47432682", "text": "func (s *transactionSyncer) commitTransaction(err error, networkEndpointMap map[negtypes.NetworkEndpoint]*composite.NetworkEndpoint) {\n\ts.syncLock.Lock()\n\tdefer s.syncLock.Unlock()\n\n\t// If error is not nil, trigger backoff retry\n\t// If any transaction needs reconciliation, trigger resync.\n\t// needRetry indicates if the transaction needs to backoff and retry\n\tneedRetry := false\n\n\tif err != nil {\n\t\t// Trigger NEG initialization if error occurs\n\t\t// This is to prevent if the NEG object is deleted or misconfigured by user\n\t\ts.needInit = true\n\t\tneedRetry = true\n\t\tmetrics.PublishNegControllerErrorCountMetrics(err, false)\n\t}\n\n\tfor networkEndpoint := range networkEndpointMap {\n\t\t_, ok := s.transactions.Get(networkEndpoint)\n\t\t// clear transaction\n\t\tif !ok {\n\t\t\ts.logger.Error(nil, \"Endpoint was not found in the transaction table.\", \"endpoint\", networkEndpoint)\n\t\t\tcontinue\n\t\t}\n\t\ts.transactions.Delete(networkEndpoint)\n\t}\n\n\tif needRetry {\n\t\tif negtypes.IsStrategyQuotaError(err) {\n\t\t\ts.syncer.Sync()\n\t\t} else {\n\t\t\tif retryErr := s.retry.Retry(); retryErr != nil {\n\t\t\t\ts.recordEvent(apiv1.EventTypeWarning, \"RetryFailed\", fmt.Sprintf(\"Failed to retry NEG sync for %q: %v\", s.NegSyncerKey.String(), retryErr))\n\t\t\t\tmetrics.PublishNegControllerErrorCountMetrics(retryErr, false)\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\ts.retry.Reset()\n\t// always trigger Sync to commit pods\n\ts.syncer.Sync()\n}", "title": "" }, { "docid": "4016bdbca985aaf0c958d0b99c0b23b3", "score": "0.47415718", "text": "func (sc *SettlementCreate) Mutation() *SettlementMutation {\n\treturn sc.mutation\n}", "title": "" }, { "docid": "4117c8c3464126005217acf5baa74d67", "score": "0.47289437", "text": "func myPut(a api, key string, value json.RawMessage, rebaseOpts *rebaseOpts) commitTransactionResponse {\n\tputArgs := myPutArgs{Key: key, Value: value}\n\totr := a.openTransaction(\"myPut\", a.marshal(putArgs), rebaseOpts)\n\tputReq := putRequest{transactionRequest: transactionRequest{TransactionID: otr.TransactionID}, Key: key, Value: value}\n\t_, err := Dispatch(a.dbName, \"put\", a.marshal(putReq))\n\ta.assert.NoError(err)\n\tctr := a.commitTransaction(otr.TransactionID)\n\treturn ctr\n}", "title": "" }, { "docid": "033e8c09304402d8c4692bd600fbf5e4", "score": "0.47274312", "text": "func atomicFunctions() {\n\n}", "title": "" }, { "docid": "0a324ec1605da2b790f5c4bc80303008", "score": "0.47221854", "text": "func (c *Coordinator) commit(m *Message) error {\n\tpayload, err := json.Marshal(m)\n\tif err != nil {\n\t\tl.Printf(\"[ERROR] Unable to marshall message %+v: %s\", m, err.Error())\n\t}\n\n\tfor _, counter := range c.Counters {\n\t\tif counter.IsDead {\n\t\t\tcontinue\n\t\t}\n\n\t\turl := fmt.Sprintf(\"http://%s/commit\", counter.Addr)\n\t\tresp, err := c.Do(http.MethodPost, url, bytes.NewBuffer(payload))\n\t\tif err != nil {\n\t\t\tl.Printf(\"[ERROR] Unable to commit %s: %s\", counter.Addr, err.Error())\n\t\t\treturn err\n\t\t}\n\n\t\tif resp.StatusCode != http.StatusOK {\n\t\t\tl.Printf(\"[ERROR] Unable to commit %s: %d\", counter.Addr, resp.StatusCode)\n\t\t\treturn errors.New(\"unable to commit\")\n\t\t}\n\n\t\tcounter.HasItems = true\n\t\tif resp != nil {\n\t\t\tresp.Body.Close()\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5d083117eeac79f3e235da1d4bc78aaf", "score": "0.47187695", "text": "func (_Gauge *GaugeRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Gauge.Contract.GaugeTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "3d15c5b9ded16433079182facf4a02cc", "score": "0.47146326", "text": "func (_Holder *HolderRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Holder.Contract.HolderTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "cf2912e7afe33853240f9cb29cc23bc1", "score": "0.47135586", "text": "func (txn *Txn) Send(\n\tctx context.Context, ba roachpb.BatchRequest,\n) (*roachpb.BatchResponse, *roachpb.Error) {\n\t// It doesn't make sense to use inconsistent reads in a transaction. However,\n\t// we still need to accept it as a parameter for this to compile.\n\tif ba.ReadConsistency != roachpb.CONSISTENT {\n\t\treturn nil, roachpb.NewErrorf(\"cannot use %s ReadConsistency in txn\",\n\t\t\tba.ReadConsistency)\n\t}\n\n\t// Fill in the GatewayNodeID on the batch if the txn knows it.\n\t// NOTE(andrei): It seems a bit ugly that we're filling in the batches here as\n\t// opposed to the point where the requests are being created, but\n\t// unfortunately requests are being created in many ways and this was the best\n\t// place I found to set this field.\n\tif txn.gatewayNodeID != 0 {\n\t\tba.Header.GatewayNodeID = txn.gatewayNodeID\n\t}\n\n\tlastIndex := len(ba.Requests) - 1\n\tif lastIndex < 0 {\n\t\treturn nil, nil\n\t}\n\n\tfirstWriteIdx, pErr := firstWriteIndex(ba)\n\tif pErr != nil {\n\t\treturn nil, pErr\n\t}\n\n\thaveTxnWrite := firstWriteIdx != -1\n\tendTxnRequest, haveEndTxn := ba.Requests[lastIndex].GetInner().(*roachpb.EndTransactionRequest)\n\n\tvar needBeginTxn, elideEndTxn bool\n\tvar sender TxnSender\n\tlockedPrelude := func() *roachpb.Error {\n\t\ttxn.mu.Lock()\n\t\tdefer txn.mu.Unlock()\n\n\t\tsender = txn.mu.sender\n\t\tif txn.mu.Proto.Status != roachpb.PENDING || txn.mu.finalized {\n\t\t\treturn roachpb.NewErrorf(\n\t\t\t\t\"attempting to use transaction with wrong status or finalized: %s %v\",\n\t\t\t\ttxn.mu.Proto.Status, txn.mu.finalized)\n\t\t}\n\n\t\t// For testing purposes, txn.UserPriority can be a negative value (see\n\t\t// roachpb.MakePriority).\n\t\tif txn.mu.UserPriority != 0 {\n\t\t\tba.UserPriority = txn.mu.UserPriority\n\t\t}\n\n\t\tif !txn.mu.active {\n\t\t\tuser := roachpb.MakePriority(ba.UserPriority)\n\t\t\tif txn.mu.Proto.Priority < user {\n\t\t\t\ttxn.mu.Proto.Priority = user\n\t\t\t}\n\t\t\ttxn.mu.active = true\n\t\t}\n\n\t\tneedBeginTxn = !(txn.mu.Proto.Writing || txn.mu.writingTxnRecord) && haveTxnWrite\n\t\tneedEndTxn := txn.mu.Proto.Writing || txn.mu.writingTxnRecord || haveTxnWrite\n\t\telideEndTxn = haveEndTxn && !needEndTxn\n\n\t\t// If we're not yet writing in this txn, but intend to, insert a\n\t\t// begin transaction request before the first write command and update\n\t\t// transaction state accordingly.\n\t\tif needBeginTxn {\n\t\t\t// Set txn key based on the key of the first transactional write if\n\t\t\t// not already set. If the transaction already has a key (we're in a\n\t\t\t// restart), make sure we keep the anchor key the same.\n\t\t\tif len(txn.mu.Proto.Key) == 0 {\n\t\t\t\ttxnAnchorKey := txn.mu.txnAnchorKey\n\t\t\t\tif len(txnAnchorKey) == 0 {\n\t\t\t\t\ttxnAnchorKey = ba.Requests[0].GetInner().Header().Key\n\t\t\t\t}\n\t\t\t\ttxn.mu.Proto.Key = txnAnchorKey\n\t\t\t}\n\t\t\t// Set the key in the begin transaction request to the txn's anchor key.\n\t\t\tbt := &roachpb.BeginTransactionRequest{\n\t\t\t\tSpan: roachpb.Span{\n\t\t\t\t\tKey: txn.mu.Proto.Key,\n\t\t\t\t},\n\t\t\t}\n\t\t\t// Inject the new request before position firstWriteIdx, taking\n\t\t\t// care to avoid unnecessary allocations.\n\t\t\toldRequests := ba.Requests\n\t\t\tba.Requests = make([]roachpb.RequestUnion, len(ba.Requests)+1)\n\t\t\tcopy(ba.Requests, oldRequests[:firstWriteIdx])\n\t\t\tba.Requests[firstWriteIdx].MustSetInner(bt)\n\t\t\tcopy(ba.Requests[firstWriteIdx+1:], oldRequests[firstWriteIdx:])\n\t\t\t// We're going to be writing the transaction record by sending the\n\t\t\t// begin transaction request.\n\t\t\ttxn.mu.writingTxnRecord = true\n\t\t}\n\n\t\tif elideEndTxn {\n\t\t\tba.Requests = ba.Requests[:lastIndex]\n\t\t}\n\n\t\t// Clone the Txn's Proto so that future modifications can be made without\n\t\t// worrying about synchronization.\n\t\tnewTxn := txn.mu.Proto.Clone()\n\t\tba.Txn = &newTxn\n\t\treturn nil\n\t}\n\tif pErr := lockedPrelude(); pErr != nil {\n\t\treturn nil, pErr\n\t}\n\n\t// Send call through the DB.\n\trequestTxnID := ba.Txn.ID\n\tbr, pErr := txn.db.sendUsingSender(ctx, ba, sender)\n\n\t// Lock for the entire response postlude.\n\ttxn.mu.Lock()\n\tdefer txn.mu.Unlock()\n\n\t// If we inserted a begin transaction request, remove it here. We also\n\t// unset the flag writingTxnRecord flag in case another ever needs to\n\t// be sent again (for instance, if we're aborted and need to restart).\n\tif needBeginTxn {\n\t\tif br != nil && br.Responses != nil {\n\t\t\tbr.Responses = append(br.Responses[:firstWriteIdx], br.Responses[firstWriteIdx+1:]...)\n\t\t}\n\t\t// Handle case where inserted begin txn confused an indexed error.\n\t\tif pErr != nil && pErr.Index != nil {\n\t\t\tidx := pErr.Index.Index\n\t\t\tif idx == int32(firstWriteIdx) {\n\t\t\t\t// An error was encountered on begin txn; disallow the indexing.\n\t\t\t\tpErr.Index = nil\n\t\t\t} else if idx > int32(firstWriteIdx) {\n\t\t\t\t// An error was encountered after begin txn; decrement index.\n\t\t\t\tpErr.SetErrorIndex(idx - 1)\n\t\t\t}\n\t\t}\n\n\t\ttxn.mu.writingTxnRecord = false\n\t}\n\tif haveEndTxn {\n\t\tif pErr == nil || !endTxnRequest.Commit {\n\t\t\t// Finalize the transaction if either we sent a successful commit\n\t\t\t// EndTxnRequest, or sent a rollback EndTxnRequest (regardless of\n\t\t\t// if it succeeded).\n\t\t\ttxn.mu.finalized = true\n\t\t}\n\t}\n\n\tif pErr != nil {\n\t\tif log.V(1) {\n\t\t\tlog.Infof(ctx, \"failed batch: %s\", pErr)\n\t\t}\n\t\tswitch t := pErr.GetDetail().(type) {\n\t\tcase *roachpb.HandledRetryableTxnError:\n\t\t\tretryErr := t\n\t\t\tif requestTxnID != retryErr.TxnID {\n\t\t\t\t// KV should not return errors for transactions other than the one that sent\n\t\t\t\t// the request.\n\t\t\t\tlog.Fatalf(ctx, \"retryable error for the wrong txn. \"+\n\t\t\t\t\t\"requestTxnID: %s, retryErr.TxnID: %s. retryErr: %s\",\n\t\t\t\t\trequestTxnID, retryErr.TxnID, retryErr)\n\t\t\t} else if requestTxnID == txn.mu.Proto.ID {\n\t\t\t\t// Our requestTxnID still matches the proto, so update the state.\n\t\t\t\t// If it doesn't match here, it means a concurrent request through\n\t\t\t\t// this Txn object has already aborted and restarted the txn.\n\t\t\t\ttxn.updateStateOnRetryableErrLocked(ctx, retryErr)\n\t\t\t}\n\t\t}\n\t\t// Note that unhandled retryable txn errors are allowed from leaf\n\t\t// transactions. We pass them up through distributed SQL flows to\n\t\t// the root transactions, at the receiver.\n\t\tif txn.typ == RootTxn && pErr.TransactionRestart != roachpb.TransactionRestart_NONE {\n\t\t\tlog.Fatalf(ctx,\n\t\t\t\t\"unexpected retryable error at the client.Txn level: (%T) %s\",\n\t\t\t\tpErr.GetDetail(), pErr)\n\t\t}\n\t\treturn nil, pErr\n\t}\n\n\tif br != nil {\n\t\tif br.Error != nil {\n\t\t\tpanic(roachpb.ErrorUnexpectedlySet(txn.mu.sender, br))\n\t\t}\n\t\tif br.Txn != nil && br.Txn.ID != txn.mu.Proto.ID {\n\t\t\treturn nil, roachpb.NewError(&roachpb.TxnPrevAttemptError{})\n\t\t}\n\n\t\t// Only successful requests can carry an updated Txn in their response\n\t\t// header. Some errors (e.g. a restart) have a Txn attached to them as\n\t\t// well; these errors have been handled above.\n\t\ttxn.mu.Proto.Update(br.Txn)\n\t}\n\n\tif elideEndTxn {\n\t\t// Check that read only transactions do not violate their deadline. This can NOT\n\t\t// happen since the txn deadline is normally updated when it is about to expire\n\t\t// or expired. We will just keep the code for safety (see TestReacquireLeaseOnRestart).\n\t\tif endTxnRequest.Deadline != nil {\n\t\t\tif endTxnRequest.Deadline.Less(txn.mu.Proto.Timestamp) {\n\t\t\t\t// NB: The returned error contains a pointer to txn.mu.Proto, but\n\t\t\t\t// that's ok because we can't have concurrent operations going on while\n\t\t\t\t// committing/aborting.\n\t\t\t\treturn nil, roachpb.NewErrorWithTxn(roachpb.NewTransactionStatusError(\n\t\t\t\t\t\"deadline exceeded before transaction finalization\"), &txn.mu.Proto)\n\t\t\t}\n\t\t}\n\t\t// This normally happens on the server and sent back in response\n\t\t// headers, but this transaction was optimized away. The caller may\n\t\t// still inspect the transaction struct, so we manually update it\n\t\t// here to emulate a true transaction.\n\t\tif endTxnRequest.Commit {\n\t\t\ttxn.mu.Proto.Status = roachpb.COMMITTED\n\t\t} else {\n\t\t\ttxn.mu.Proto.Status = roachpb.ABORTED\n\t\t}\n\t}\n\treturn br, nil\n}", "title": "" }, { "docid": "042f19023183effdf21ede053144294d", "score": "0.47095752", "text": "func (wp *WorkerPool) transact(e outboundEnvelope) {\n\tdefer e.cancel()\n\n\t// bail out early if the request has been on the queue too long\n\tif err := e.request.Context().Err(); err != nil {\n\t\twp.logger.Error(\"Outbound message expired while on queue\", zap.Error(err))\n\t\treturn\n\t}\n\n\tresponse, err := wp.transactor(e.request)\n\tif err != nil {\n\t\twp.logger.Error(\"HTTP transaction error\", zap.Error(err))\n\t\treturn\n\t}\n\n\tif response.StatusCode < 400 {\n\t\twp.logger.Debug(\"HTTP response\", zap.String(\"status\", response.Status), zap.Any(\"url\", e.request.URL))\n\t} else {\n\t\twp.logger.Error(\"HTTP response\", zap.String(\"status\", response.Status), zap.Any(\"url\", e.request.URL))\n\t}\n\n\tio.Copy(io.Discard, response.Body)\n\tresponse.Body.Close()\n}", "title": "" }, { "docid": "465b76a3129713fc44406bfe135759a4", "score": "0.47091", "text": "func (vs *Visor) broadcastTransaction(t coin.Transaction, pool *Pool) error {\n\tif vs.Config.DisableNetworking {\n\t\treturn nil\n\t}\n\n\tm := NewGiveTxnsMessage(coin.Transactions{t})\n\tl, err := pool.Pool.Size()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlogger.Debugf(\"Broadcasting GiveTxnsMessage to %d conns\", l)\n\n\terr = pool.Pool.BroadcastMessage(m)\n\tif err != nil {\n\t\tlogger.Errorf(\"Broadcast GivenTxnsMessage failed: %v\", err)\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "dd0d47eb8df0f028075f4afaaf045d03", "score": "0.47077465", "text": "func (_Ownable *OwnableRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Ownable.Contract.OwnableTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "dd0d47eb8df0f028075f4afaaf045d03", "score": "0.47077465", "text": "func (_Ownable *OwnableRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Ownable.Contract.OwnableTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "dd0d47eb8df0f028075f4afaaf045d03", "score": "0.47077465", "text": "func (_Ownable *OwnableRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Ownable.Contract.OwnableTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "dd0d47eb8df0f028075f4afaaf045d03", "score": "0.47077465", "text": "func (_Ownable *OwnableRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Ownable.Contract.OwnableTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "dd0d47eb8df0f028075f4afaaf045d03", "score": "0.470689", "text": "func (_Ownable *OwnableRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Ownable.Contract.OwnableTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "773061e5882008c1cf2ad4c30b13cd4a", "score": "0.47066224", "text": "func (_CallBLS *CallBLSSession) CallPrec(addr *big.Int, input []byte) (*types.Transaction, error) {\n\treturn _CallBLS.Contract.CallPrec(&_CallBLS.TransactOpts, addr, input)\n}", "title": "" }, { "docid": "e1f8ba4f63d592970319a80f37545826", "score": "0.47003064", "text": "func (_Token *TokenTransactorSession) Permit(owner common.Address, spender common.Address, value *big.Int, deadline *big.Int, v uint8, r [32]byte, s [32]byte) (*types.Transaction, error) {\n\treturn _Token.Contract.Permit(&_Token.TransactOpts, owner, spender, value, deadline, v, r, s)\n}", "title": "" }, { "docid": "a26534de54b10c86466ddcbbb9b9b4f0", "score": "0.46972197", "text": "func incrementApproved(){\r\nApprovedCount++;\r\n}", "title": "" }, { "docid": "fe1cea9f6bc6434039657b2932e8c4a3", "score": "0.46966073", "text": "func makeDeduction(ctx context.Context, req MakeDeductionRequest,\n\tnow time.Time, client *firestore.Client) (*Transaction, error) {\n\n\taccount, err := getAccountByNumber(ctx, req.AccountNumber, client)\n\tif err != nil {\n\t\treturn nil, errors.New(\"invalid account number\")\n\t}\n\n\tif account.Balance < req.Amount {\n\t\treturn nil, errors.New(\"insufficient fund\")\n\t}\n\n\treceiptNo, err := generateReceiptNumber(ctx, client)\n\n\tm := Transaction{\n\t\tAccountNumber: account.Number,\n\t\tType: TransactionType_Withdrawal,\n\t\tAmount: req.Amount,\n\t\tNarration: req.Narration,\n\t\tSalesRepID: req.SalesRepID,\n\t\tSalesRep: req.SalesRep,\n\t\tCustomerID: account.CustomerID,\n\t\tCustomerName: account.Customer,\n\t\tReceiptNo: receiptNo,\n\t\tCreatedAt: now.Unix(),\n\t\tUpdatedAt: now.Unix(),\n\t}\n\n\tbatch := client.Batch()\n\n\tbatch = batch.Create(client.Doc(\"transaction/\"+receiptNo), m)\n\taccount.Balance -= req.Amount\n\tbatch = batch.Update(client.Doc(\"account/\"+req.AccountNumber), []firestore.Update{\n\t\t{Path: \"Balance\", Value: account.Balance},\n\t})\n\n\tif _, err := batch.Commit(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\n\tcustomer, err := getCustomerByID(ctx, account.CustomerID, client)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err = notify.Send(ctx, customer.PhoneNumber, \"sms/payment_withdrawn\",\n\t\tmap[string]interface{}{\n\t\t\t\"Name\": customer.Name,\n\t\t\t\"Amount\": req.Amount,\n\t\t\t\"Balance\": account.Balance,\n\t\t}); err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\treturn &m, nil\n}", "title": "" }, { "docid": "7700c21375b7c32ca2717784390ed6ef", "score": "0.46912932", "text": "func (_Pausable *PausableRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Pausable.Contract.PausableTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "49cc5fac453ac80e9b5bea258015b1e0", "score": "0.46910912", "text": "func (_ReentrancyGuard *ReentrancyGuardTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _ReentrancyGuard.Contract.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "8fad45d73793bfd6bdce1aa09b7c8259", "score": "0.46905956", "text": "func SubmitTransaction(ctx context.Context, b Backend, tx *types.Transaction) (common.Hash, error) {\n\tif err := b.SendTx(ctx, tx); err != nil {\n\t\treturn common.Hash{}, err\n\t}\n\tif tx.To() == nil {\n\t\tsigner := types.MakeSigner(b.ChainConfig(), b.CurrentBlock().Epoch())\n\t\tfrom, err := types.Sender(signer, tx)\n\t\tif err != nil {\n\t\t\treturn common.Hash{}, err\n\t\t}\n\t\taddr := crypto.CreateAddress(from, tx.Nonce())\n\t\tutils.Logger().Info().\n\t\t\tStr(\"fullhash\", tx.Hash().Hex()).\n\t\t\tStr(\"contract\", common2.MustAddressToBech32(addr)).\n\t\t\tMsg(\"Submitted contract creation\")\n\t} else {\n\t\tutils.Logger().Info().\n\t\t\tStr(\"fullhash\", tx.Hash().Hex()).\n\t\t\tStr(\"recipient\", tx.To().Hex()).\n\t\t\tMsg(\"Submitted transaction\")\n\t}\n\treturn tx.Hash(), nil\n}", "title": "" }, { "docid": "7fc04ec9b9e0101156b3f8acf34a9767", "score": "0.4690398", "text": "func (t *Task) Submitted() uint64 {\n\tsubmitted := atomic.LoadUint64(&t.submitted)\n\n\treturn submitted\n}", "title": "" } ]
b4ff2517d0e732eb72fd8d9c99914b02
CreateOrUpdatePreparer prepares the CreateOrUpdate request.
[ { "docid": "3cda761870afea722ccb5439de350782", "score": "0.73248667", "text": "func (client ApplyUpdatesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, providerName string, resourceType string, resourceName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"providerName\": autorest.Encode(\"path\", providerName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"resourceName\": autorest.Encode(\"path\", resourceName),\n\t\t\"resourceType\": autorest.Encode(\"path\", resourceType),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-06-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceType}/{resourceName}/providers/Microsoft.Maintenance/applyUpdates/default\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" } ]
[ { "docid": "6f14c61386cf2323ff6f66d6b94bb95b", "score": "0.74447745", "text": "func (client TasksClient) CreateOrUpdatePreparer(ctx context.Context, parameters ProjectTask, groupName string, serviceName string, projectName string, taskName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"groupName\": autorest.Encode(\"path\", groupName),\n\t\t\"projectName\": autorest.Encode(\"path\", projectName),\n\t\t\"serviceName\": autorest.Encode(\"path\", serviceName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"taskName\": autorest.Encode(\"path\", taskName),\n\t}\n\n\tconst APIVersion = \"2018-04-19\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "68053ebaf8cf92aa8fca95e62f0e6751", "score": "0.71134585", "text": "func (client WebServicesClient) CreateOrUpdatePreparer(createOrUpdatePayload WebService, resourceGroupName string, webServiceName string, cancel <-chan struct{}) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"webServiceName\": autorest.Encode(\"path\", webServiceName),\n\t}\n\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": client.APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearning/webServices/{webServiceName}\", pathParameters),\n\t\tautorest.WithJSON(createOrUpdatePayload),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{Cancel: cancel})\n}", "title": "" }, { "docid": "de1aea9a7220b350d10f3751191e97d0", "score": "0.70880973", "text": "func (client AccessPoliciesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, accessPolicyName string, parameters AccessPolicyEntity) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"accessPolicyName\": autorest.Encode(\"path\", accessPolicyName),\n\t\t\"accountName\": autorest.Encode(\"path\", accountName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-05-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tparameters.SystemData = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/videoAnalyzers/{accountName}/accessPolicies/{accessPolicyName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "174493814335351eb47b8b3583c1baf8", "score": "0.7066222", "text": "func (client VaultsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, vaultName string, parameters VaultCreateOrUpdateParameters) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"vaultName\": autorest.Encode(\"path\", vaultName),\n\t}\n\n\tconst APIVersion = \"2016-10-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "ebbd5b951a215819410cfd812bb996e5", "score": "0.6979492", "text": "func (client AppsClient) CreateOrUpdatePreparer(resourceGroupName string, name string, siteEnvelope Site, skipDNSRegistration *bool, skipCustomDomainVerification *bool, forceDNSRegistration *bool, TTLInSeconds string, cancel <-chan struct{}) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif skipDNSRegistration != nil {\n\t\tqueryParameters[\"skipDnsRegistration\"] = autorest.Encode(\"query\", *skipDNSRegistration)\n\t}\n\tif skipCustomDomainVerification != nil {\n\t\tqueryParameters[\"skipCustomDomainVerification\"] = autorest.Encode(\"query\", *skipCustomDomainVerification)\n\t}\n\tif forceDNSRegistration != nil {\n\t\tqueryParameters[\"forceDnsRegistration\"] = autorest.Encode(\"query\", *forceDNSRegistration)\n\t}\n\tif len(TTLInSeconds) > 0 {\n\t\tqueryParameters[\"ttlInSeconds\"] = autorest.Encode(\"query\", TTLInSeconds)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}\", pathParameters),\n\t\tautorest.WithJSON(siteEnvelope),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{Cancel: cancel})\n}", "title": "" }, { "docid": "2b71bb933a0ece32bb4f2084e71e131f", "score": "0.6899931", "text": "func (client LabsClient) CreateOrUpdatePreparer(ctx context.Context, body Lab, resourceGroupName string, labName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-11-15-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tbody.SystemData = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}\", pathParameters),\n\t\tautorest.WithJSON(body),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "9caa4f2dd61dd74fdc213a11a4d8b1b8", "score": "0.6844018", "text": "func (client ServiceTasksClient) CreateOrUpdatePreparer(ctx context.Context, parameters ProjectTask, groupName string, serviceName string, taskName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"groupName\": autorest.Encode(\"path\", groupName),\n\t\t\"serviceName\": autorest.Encode(\"path\", serviceName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"taskName\": autorest.Encode(\"path\", taskName),\n\t}\n\n\tconst APIVersion = \"2018-07-15-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "d6191968f39422ee5ac37d4fdf7b2972", "score": "0.6834481", "text": "func (client NamespacesClient) CreateOrUpdatePreparer(resourceGroupName string, namespaceName string, parameters EHNamespace, cancel <-chan struct{}) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"namespaceName\": autorest.Encode(\"path\", namespaceName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2017-04-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{Cancel: cancel})\n}", "title": "" }, { "docid": "5c448e1ab2657351a7593d841b7f1c1c", "score": "0.68136483", "text": "func (client ApplyUpdatesClient) CreateOrUpdateParentPreparer(ctx context.Context, resourceGroupName string, providerName string, resourceParentType string, resourceParentName string, resourceType string, resourceName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"providerName\": autorest.Encode(\"path\", providerName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"resourceName\": autorest.Encode(\"path\", resourceName),\n\t\t\"resourceParentName\": autorest.Encode(\"path\", resourceParentName),\n\t\t\"resourceParentType\": autorest.Encode(\"path\", resourceParentType),\n\t\t\"resourceType\": autorest.Encode(\"path\", resourceType),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-06-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceParentType}/{resourceParentName}/{resourceType}/{resourceName}/providers/Microsoft.Maintenance/applyUpdates/default\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "cc19286842bf4741bdb394981f4d0a58", "score": "0.68071043", "text": "func (client VirtualMachineClient) CreateOrUpdateResourcePreparer(ctx context.Context, resourceGroupName string, labName string, name string, labVirtualMachine LabVirtualMachine) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2015-05-21-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevTestLab/labs/{labName}/virtualmachines/{name}\", pathParameters),\n\t\tautorest.WithJSON(labVirtualMachine),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "e7ddcaab4b3be9c194c7d272369c4ede", "score": "0.6787748", "text": "func (client PipelineClient) CreateOrUpdatePipelinePreparer(ctx context.Context, pipelineName string, pipeline PipelineResource, ifMatch string) (*http.Request, error) {\n\turlParameters := map[string]interface{}{\n\t\t\"endpoint\": client.Endpoint,\n\t}\n\n\tpathParameters := map[string]interface{}{\n\t\t\"pipelineName\": autorest.Encode(\"path\", pipelineName),\n\t}\n\n\tconst APIVersion = \"2021-06-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithCustomBaseURL(\"{endpoint}\", urlParameters),\n\t\tautorest.WithPathParameters(\"/pipelines/{pipelineName}\", pathParameters),\n\t\tautorest.WithJSON(pipeline),\n\t\tautorest.WithQueryParameters(queryParameters))\n\tif len(ifMatch) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"If-Match\", autorest.String(ifMatch)))\n\t}\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "e50e14d188ee8b12c8d289ebec1f2f9b", "score": "0.6784101", "text": "func (client ManagedVirtualNetworksClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, factoryName string, managedVirtualNetworkName string, managedVirtualNetwork ManagedVirtualNetworkResource, ifMatch string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"factoryName\": autorest.Encode(\"path\", factoryName),\n\t\t\"managedVirtualNetworkName\": autorest.Encode(\"path\", managedVirtualNetworkName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}\", pathParameters),\n\t\tautorest.WithJSON(managedVirtualNetwork),\n\t\tautorest.WithQueryParameters(queryParameters))\n\tif len(ifMatch) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"If-Match\", autorest.String(ifMatch)))\n\t}\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "fbba9182d341f86fb34f14d566e7e028", "score": "0.6768744", "text": "func (client ManagementClient) CreateKeyPreparer(vaultBaseURL string, keyName string, parameters KeyCreateParameters) (*http.Request, error) {\n\turlParameters := map[string]interface{}{\n\t\t\"vaultBaseUrl\": vaultBaseURL,\n\t}\n\n\tpathParameters := map[string]interface{}{\n\t\t\"key-name\": autorest.Encode(\"path\", keyName),\n\t}\n\n\tconst APIVersion = \"2015-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPost(),\n\t\tautorest.WithCustomBaseURL(\"{vaultBaseUrl}\", urlParameters),\n\t\tautorest.WithPathParameters(\"/keys/{key-name}/create\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "d5dc8c1f625d2c8fd4c8e57b8bc172dc", "score": "0.675607", "text": "func (client FileServersClient) CreateOrUpdatePreparer(ctx context.Context, deviceName string, fileServerName string, fileServer FileServer, resourceGroupName string, managerName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"deviceName\": autorest.Encode(\"path\", deviceName),\n\t\t\"fileServerName\": autorest.Encode(\"path\", fileServerName),\n\t\t\"managerName\": autorest.Encode(\"path\", managerName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-10-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}/fileservers/{fileServerName}\", pathParameters),\n\t\tautorest.WithJSON(fileServer),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "1c6ee6c63d25875ecf3014eb0442f19d", "score": "0.6731528", "text": "func (client ExpressRouteCircuitsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, circuitName string, parameters ExpressRouteCircuit) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"circuitName\": autorest.Encode(\"path\", circuitName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tparameters.Etag = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "bf321b7ff298239c8eaa0fc5321a3d4a", "score": "0.67010856", "text": "func (client GroupsClient) CreatePreparer(ctx context.Context, parameters GroupCreateParameters) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"tenantID\": autorest.Encode(\"path\", client.TenantID),\n\t}\n\n\tconst APIVersion = \"1.6\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/{tenantID}/groups\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "1d6fb7f0f661141c4f46f52189993c06", "score": "0.6672466", "text": "func (client ConfigurationPolicyGroupsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, vpnServerConfigurationName string, configurationPolicyGroupName string, vpnServerConfigurationPolicyGroupParameters VpnServerConfigurationPolicyGroup) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"configurationPolicyGroupName\": autorest.Encode(\"path\", configurationPolicyGroupName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"vpnServerConfigurationName\": autorest.Encode(\"path\", vpnServerConfigurationName),\n\t}\n\n\tconst APIVersion = \"2022-05-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tvpnServerConfigurationPolicyGroupParameters.Etag = nil\n\tvpnServerConfigurationPolicyGroupParameters.Type = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnServerConfigurations/{vpnServerConfigurationName}/configurationPolicyGroups/{configurationPolicyGroupName}\", pathParameters),\n\t\tautorest.WithJSON(vpnServerConfigurationPolicyGroupParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "39c4abd46eb1b99bf82cf7b089ee6458", "score": "0.6613982", "text": "func (client TestJobClient) CreatePreparer(ctx context.Context, resourceGroupName string, automationAccountName string, runbookName string, parameters TestJobCreateParameters) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"automationAccountName\": autorest.Encode(\"path\", automationAccountName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"runbookName\": autorest.Encode(\"path\", runbookName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-06-30\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/runbooks/{runbookName}/draft/testJob\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "e8eb36cb50e67a0adbd7837c68fab8d8", "score": "0.6554077", "text": "func (client BaseClient) CreateSchemaMethodPreparer(ctx context.Context, input SchemaType) (*http.Request, error) {\n preparer := autorest.CreatePreparer(\n autorest.AsContentType(\"application/json-patch+json; charset=utf-8\"),\n autorest.AsPost(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/v1/schemas\"),\n autorest.WithJSON(input))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "8dd35baa41cda459e19545cd68071bf2", "score": "0.6543414", "text": "func (client RulesClient) CreatePreparer(ctx context.Context, resourceGroupName string, profileName string, ruleSetName string, ruleName string, rule Rule) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"profileName\": autorest.Encode(\"path\", profileName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"ruleName\": autorest.Encode(\"path\", ruleName),\n\t\t\"ruleSetName\": autorest.Encode(\"path\", ruleSetName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2020-09-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/ruleSets/{ruleSetName}/rules/{ruleName}\", pathParameters),\n\t\tautorest.WithJSON(rule),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "85220e8e1c1bad162050aff4144857b7", "score": "0.65041584", "text": "func (client TasksClient) UpdatePreparer(ctx context.Context, parameters ProjectTask, groupName string, serviceName string, projectName string, taskName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"groupName\": autorest.Encode(\"path\", groupName),\n\t\t\"projectName\": autorest.Encode(\"path\", projectName),\n\t\t\"serviceName\": autorest.Encode(\"path\", serviceName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"taskName\": autorest.Encode(\"path\", taskName),\n\t}\n\n\tconst APIVersion = \"2018-04-19\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "a498cadfe2fd7795bf278f0db6a1a3d1", "score": "0.6496034", "text": "func (client BaseClient) CreateNamePreparer(ctx context.Context, nameDescription NameDescription, timeout *int64) (*http.Request, error) {\n\tconst APIVersion = \"6.0\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif timeout != nil {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", *timeout)\n\t} else {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", 60)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/Names/$/Create\"),\n\t\tautorest.WithJSON(nameDescription),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "b365784dd4ee8f631bc540b3cd13618f", "score": "0.64248675", "text": "func (client FileClient) CreatePreparer(ctx context.Context, fileList io.ReadCloser, xNCPLANG string) (*http.Request, error) {\n\tformDataParameters := map[string]interface{}{\n\t\t\"fileList\": fileList,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/api/v1/files\"),\n\t\tautorest.WithMultiPartFormData(formDataParameters))\n\tif len(xNCPLANG) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"X-NCP-LANG\", autorest.String(xNCPLANG)))\n\t}\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "3e41922e2d56a83ca5e592081c360807", "score": "0.640165", "text": "func (client SecretValueClient) CreatePreparer(ctx context.Context, resourceGroupName string, secretResourceName string, secretValueResourceName string, secretValueResourceDescription SecretValueResourceDescription) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"secretResourceName\": secretResourceName,\n\t\t\"secretValueResourceName\": secretValueResourceName,\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-09-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabricMesh/secrets/{secretResourceName}/values/{secretValueResourceName}\", pathParameters),\n\t\tautorest.WithJSON(secretValueResourceDescription),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "d827ecf54098baa91994502f880505e2", "score": "0.6365306", "text": "func (client KustoPoolDatabasesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, workspaceName string, kustoPoolName string, databaseName string, parameters BasicDatabase) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"databaseName\": autorest.Encode(\"path\", databaseName),\n\t\t\"kustoPoolName\": autorest.Encode(\"path\", kustoPoolName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"workspaceName\": autorest.Encode(\"path\", workspaceName),\n\t}\n\n\tconst APIVersion = \"2021-06-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/kustoPools/{kustoPoolName}/databases/{databaseName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "185cbbbf52dd67f90e2cd2230f19684c", "score": "0.63097924", "text": "func (client LabsClient) PublishPreparer(ctx context.Context, resourceGroupName string, labName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-11-15-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}/publish\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "68a4f06b53e92d365d69a01b6a24274b", "score": "0.6250309", "text": "func (client BaseClient) CreateBatchMethodPreparer(ctx context.Context, input BatchType) (*http.Request, error) {\n preparer := autorest.CreatePreparer(\n autorest.AsContentType(\"application/json-patch+json; charset=utf-8\"),\n autorest.AsPost(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/v1/batches\"),\n autorest.WithJSON(input))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "5e43f6c43ecfa6875c145edcc2d63967", "score": "0.6245701", "text": "func (client BaseClient) CreateRepairTaskPreparer(ctx context.Context, repairTask RepairTask) (*http.Request, error) {\n\tconst APIVersion = \"6.0\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/$/CreateRepairTask\"),\n\t\tautorest.WithJSON(repairTask),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "91c123f3766ff219e6b55753055a804c", "score": "0.62365794", "text": "func (client AppsClient) CreateOrUpdateSourceControlPreparer(resourceGroupName string, name string, siteSourceControl SiteSourceControl, cancel <-chan struct{}) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/sourcecontrols/web\", pathParameters),\n\t\tautorest.WithJSON(siteSourceControl),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{Cancel: cancel})\n}", "title": "" }, { "docid": "0453dbf415473146396cd4fa7ab6f48b", "score": "0.62184", "text": "func (client VendorVirtualNetworkFunctionsClient) CreateOrUpdatePreparer(ctx context.Context, locationName string, vendorName string, serviceKey string, parameters VendorVirtualNetworkFunction) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"locationName\": autorest.Encode(\"path\", locationName),\n\t\t\"serviceKey\": autorest.Encode(\"path\", serviceKey),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"vendorName\": autorest.Encode(\"path\", vendorName),\n\t}\n\n\tconst APIVersion = \"2020-01-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/providers/Microsoft.HybridNetwork/locations/{locationName}/vendors/{vendorName}/virtualNetworkFunctions/{serviceKey}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "b28fa49d5a7d4c10bdafcb1761532ec3", "score": "0.62136865", "text": "func (client ReplicationProtectedItemsClient) CreatePreparer(ctx context.Context, fabricName string, protectionContainerName string, replicatedProtectedItemName string, input EnableProtectionInput) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"fabricName\": autorest.Encode(\"path\", fabricName),\n\t\t\"protectionContainerName\": autorest.Encode(\"path\", protectionContainerName),\n\t\t\"replicatedProtectedItemName\": autorest.Encode(\"path\", replicatedProtectedItemName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", client.ResourceGroupName),\n\t\t\"resourceName\": autorest.Encode(\"path\", client.ResourceName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-07-10\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/replicationProtectionContainers/{protectionContainerName}/replicationProtectedItems/{replicatedProtectedItemName}\", pathParameters),\n\t\tautorest.WithJSON(input),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "f3de5c62700c0ab260aa17efe7fb1a83", "score": "0.62020075", "text": "func (client BaseClient) CreateFlowMethodPreparer(ctx context.Context, input FlowType) (*http.Request, error) {\n preparer := autorest.CreatePreparer(\n autorest.AsContentType(\"application/json-patch+json; charset=utf-8\"),\n autorest.AsPost(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/v1/flows\"),\n autorest.WithJSON(input))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "35d144eb1af8139921925edce31ed3e6", "score": "0.6187661", "text": "func (client ServiceTasksClient) UpdatePreparer(ctx context.Context, parameters ProjectTask, groupName string, serviceName string, taskName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"groupName\": autorest.Encode(\"path\", groupName),\n\t\t\"serviceName\": autorest.Encode(\"path\", serviceName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"taskName\": autorest.Encode(\"path\", taskName),\n\t}\n\n\tconst APIVersion = \"2018-07-15-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "9a9256d9a38f8765113a020a748d7195", "score": "0.6174527", "text": "func (client AccessPoliciesClient) UpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, accessPolicyName string, parameters AccessPolicyEntity) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"accessPolicyName\": autorest.Encode(\"path\", accessPolicyName),\n\t\t\"accountName\": autorest.Encode(\"path\", accountName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-05-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tparameters.SystemData = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/videoAnalyzers/{accountName}/accessPolicies/{accessPolicyName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "6ea1ed7b3416d007980fe1d663c81ffe", "score": "0.6173431", "text": "func (client PrivateEndpointConnectionClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, serviceName string, privateEndpointConnectionName string, privateEndpointConnectionRequest PrivateEndpointConnectionRequest) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"privateEndpointConnectionName\": autorest.Encode(\"path\", privateEndpointConnectionName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"serviceName\": autorest.Encode(\"path\", serviceName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/privateEndpointConnections/{privateEndpointConnectionName}\", pathParameters),\n\t\tautorest.WithJSON(privateEndpointConnectionRequest),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "872b1b0cf83eeaac7f9b62c726678dc7", "score": "0.61577624", "text": "func (client AppsClient) UpdateMetadataPreparer(resourceGroupName string, name string, metadata StringDictionary) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/config/metadata\", pathParameters),\n\t\tautorest.WithJSON(metadata),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "621cff624e10d9f6e4bdda9b2cbf573f", "score": "0.61558926", "text": "func (client AppsClient) CreateOrUpdateConfigurationPreparer(resourceGroupName string, name string, siteConfig SiteConfigResource) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/config/web\", pathParameters),\n\t\tautorest.WithJSON(siteConfig),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "801421895f41cee42541d0b3669c5dbe", "score": "0.6127879", "text": "func (client LabsClient) UpdatePreparer(ctx context.Context, body LabUpdate, resourceGroupName string, labName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-11-15-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}\", pathParameters),\n\t\tautorest.WithJSON(body),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "95820e4142d8fa10bc26cacc96896b7a", "score": "0.6124115", "text": "func (client DevicesClient) UpdatePreparer(ctx context.Context, deviceName string, parameters DevicePatch, resourceGroupName string, managerName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"deviceName\": deviceName,\n\t\t\"managerName\": managerName,\n\t\t\"resourceGroupName\": resourceGroupName,\n\t\t\"subscriptionId\": client.SubscriptionID,\n\t}\n\n\tconst APIVersion = \"2017-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "67bdc24a5c65b93f07739c21f73944a3", "score": "0.6111034", "text": "func (client BaseClient) CreateJobMethodPreparer(ctx context.Context, input JobType) (*http.Request, error) {\n preparer := autorest.CreatePreparer(\n autorest.AsContentType(\"application/json-patch+json; charset=utf-8\"),\n autorest.AsPost(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/v1/jobs\"),\n autorest.WithJSON(input))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "77918735e8e8ee46775fd27b028bfa3c", "score": "0.61051655", "text": "func (client KustoPoolDatabasesClient) UpdatePreparer(ctx context.Context, resourceGroupName string, workspaceName string, kustoPoolName string, databaseName string, parameters BasicDatabase) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"databaseName\": autorest.Encode(\"path\", databaseName),\n\t\t\"kustoPoolName\": autorest.Encode(\"path\", kustoPoolName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"workspaceName\": autorest.Encode(\"path\", workspaceName),\n\t}\n\n\tconst APIVersion = \"2021-06-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/kustoPools/{kustoPoolName}/databases/{databaseName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "2cf91ce3ae299ec17c14b2c5e73681ff", "score": "0.6093722", "text": "func (client WorkflowsClient) ValidatePreparer(ctx context.Context, resourceGroupName string, name string, workflowName string, validate Workflow) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"workflowName\": autorest.Encode(\"path\", workflowName),\n\t}\n\n\tconst APIVersion = \"2022-09-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/hostruntime/runtime/webhooks/workflow/api/management/workflows/{workflowName}/validate\", pathParameters),\n\t\tautorest.WithJSON(validate),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "44da871463a891c5d33b17a004470e99", "score": "0.60605174", "text": "func (client MetricGroupClient) CreatePreparer(ctx context.Context, parameters MetricsGroupRequest) (*http.Request, error) {\n\ttimestamp := strconv.FormatInt(time.Now().UnixNano()/int64(time.Millisecond), 10)\n\tsec := security.NewSignature(client.Secretkey, crypto.SHA256)\n\tsignature, err := sec.Signature(\"POST\", common.GetPath(DefaultBaseURI, \"/cw_fea/real/cw/api/rule/group/metrics\"), client.AccessKey, timestamp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/cw_fea/real/cw/api/rule/group/metrics\"),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithHeader(\"x-ncp-apigw-api-key\", client.APIGatewayAPIKey),\n\t\tautorest.WithHeader(\"x-ncp-apigw-timestamp\", timestamp),\n\t\tautorest.WithHeader(\"x-ncp-iam-access-key\", client.AccessKey),\n\t\tautorest.WithHeader(\"x-ncp-apigw-signature-v2\", signature))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "62145a0a198e4820a9d74f9fd25c15bb", "score": "0.6020558", "text": "func (client KustoPoolAttachedDatabaseConfigurationsClient) CreateOrUpdatePreparer(ctx context.Context, workspaceName string, kustoPoolName string, attachedDatabaseConfigurationName string, resourceGroupName string, parameters AttachedDatabaseConfiguration) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"attachedDatabaseConfigurationName\": autorest.Encode(\"path\", attachedDatabaseConfigurationName),\n\t\t\"kustoPoolName\": autorest.Encode(\"path\", kustoPoolName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"workspaceName\": autorest.Encode(\"path\", workspaceName),\n\t}\n\n\tconst APIVersion = \"2021-06-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tparameters.SystemData = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/kustoPools/{kustoPoolName}/attachedDatabaseConfigurations/{attachedDatabaseConfigurationName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "d3b5f285ed21e2154b47307f57a1e17d", "score": "0.6008163", "text": "func (client BlockStorageClient) CreatePreparer(ctx context.Context, serverInstanceNo string, zoneCode string, blockStorageName string, blockStorageDiskDetailTypeCode BlockStorageDiskDetailTypeCode, blockStorageSnapshotInstanceNo string, blockStorageSize string, blockStorageDescription string) (*http.Request, error) {\n\tqueryParameters := map[string]interface{}{\n\t\t\"responseFormatType\": autorest.Encode(\"query\", \"json\"),\n\t\t\"serverInstanceNo\": autorest.Encode(\"query\", serverInstanceNo),\n\t}\n\n\tqueryParameters[\"regionCode\"] = autorest.Encode(\"query\", \"FKR\")\n\n\tif len(zoneCode) > 0 {\n\t\tqueryParameters[\"zoneCode\"] = autorest.Encode(\"query\", zoneCode)\n\t}\n\tif len(blockStorageName) > 0 {\n\t\tqueryParameters[\"blockStorageName\"] = autorest.Encode(\"query\", blockStorageName)\n\t}\n\tif len(string(blockStorageDiskDetailTypeCode)) > 0 {\n\t\tqueryParameters[\"blockStorageDiskDetailTypeCode\"] = autorest.Encode(\"query\", blockStorageDiskDetailTypeCode)\n\t}\n\tif len(blockStorageSnapshotInstanceNo) > 0 {\n\t\tqueryParameters[\"blockStorageSnapshotInstanceNo\"] = autorest.Encode(\"query\", blockStorageSnapshotInstanceNo)\n\t}\n\tif len(blockStorageSize) > 0 {\n\t\tqueryParameters[\"blockStorageSize\"] = autorest.Encode(\"query\", blockStorageSize)\n\t}\n\tif len(blockStorageDescription) > 0 {\n\t\tqueryParameters[\"blockStorageDescription\"] = autorest.Encode(\"query\", blockStorageDescription)\n\t}\n\n\ttimestamp := strconv.FormatInt(time.Now().UnixNano()/int64(time.Millisecond), 10)\n\tsec := security.NewSignature(client.Secretkey, crypto.SHA256)\n\tsignature, err := sec.Signature(\"POST\", common.GetPath(DefaultBaseURI, \"/createBlockStorageInstance\")+\"?\"+common.GetQuery(queryParameters), client.AccessKey, timestamp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/createBlockStorageInstance\"),\n\t\tautorest.WithQueryParameters(queryParameters),\n\t\tautorest.WithHeader(\"x-ncp-apigw-timestamp\", timestamp),\n\t\tautorest.WithHeader(\"x-ncp-iam-access-key\", client.AccessKey),\n\t\tautorest.WithHeader(\"x-ncp-apigw-signature-v2\", signature))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "3c410f5e4951c582ccb183724258d247", "score": "0.59977734", "text": "func (client VirtualMachineScaleSetVMsClient) UpdatePreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string, parameters VirtualMachineScaleSetVM) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"instanceId\": autorest.Encode(\"path\", instanceID),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"vmScaleSetName\": autorest.Encode(\"path\", VMScaleSetName),\n\t}\n\n\tconst APIVersion = \"2019-12-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tparameters.InstanceID = nil\n\tparameters.Sku = nil\n\tparameters.Resources = nil\n\tparameters.Zones = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "0d514fb4f41d37700be145ead3a34cb9", "score": "0.5986912", "text": "func (client DevicesClient) ConfigurePreparer(ctx context.Context, parameters ConfigureDeviceRequest, resourceGroupName string, managerName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"managerName\": managerName,\n\t\t\"resourceGroupName\": resourceGroupName,\n\t\t\"subscriptionId\": client.SubscriptionID,\n\t}\n\n\tconst APIVersion = \"2017-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/configureDevice\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "19f6e7c4874bfa7b3a010d7672f8f2ff", "score": "0.5984566", "text": "func (client VirtualMachineClient) PatchResourcePreparer(ctx context.Context, resourceGroupName string, labName string, name string, labVirtualMachine LabVirtualMachine) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2015-05-21-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevTestLab/labs/{labName}/virtualmachines/{name}\", pathParameters),\n\t\tautorest.WithJSON(labVirtualMachine),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "2f08026077af8bb96bcfbfcdfa4421e0", "score": "0.5963871", "text": "func (client ContactsClient) CreatePreparer(ctx context.Context, securityContactName string, securityContact Contact) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"securityContactName\": autorest.Encode(\"path\", securityContactName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2017-08-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/providers/Microsoft.Security/securityContacts/{securityContactName}\", pathParameters),\n\t\tautorest.WithJSON(securityContact),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "94110959aa266622a01d3995f72e0a12", "score": "0.5957196", "text": "func (client PetsClient) CreateAPTruePreparer(ctx context.Context, createParameters PetAPTrue) (*http.Request, error) {\n\tcreateParameters.Status = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/additionalProperties/true\"),\n\t\tautorest.WithJSON(createParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "4174d265487352cc45ad3d2ec9705a90", "score": "0.5943594", "text": "func (client NamespacesClient) UpdatePreparer(resourceGroupName string, namespaceName string, parameters EHNamespace) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"namespaceName\": autorest.Encode(\"path\", namespaceName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2017-04-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "d0b7ecb6abecb0d89edc15d49b025806", "score": "0.59405947", "text": "func (client BaseClient) CreateShapeMethodPreparer(ctx context.Context, input ShapeType) (*http.Request, error) {\n preparer := autorest.CreatePreparer(\n autorest.AsContentType(\"application/json-patch+json; charset=utf-8\"),\n autorest.AsPost(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/v1/shapes\"),\n autorest.WithJSON(input))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "9717fd8de2df4abc0e893b4c95c81801", "score": "0.59338474", "text": "func (client PetsClient) CreateAPObjectPreparer(ctx context.Context, createParameters PetAPObject) (*http.Request, error) {\n\tcreateParameters.Status = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/additionalProperties/type/object\"),\n\t\tautorest.WithJSON(createParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "ea475bb93070a2b91cb2263eeb567472", "score": "0.59266305", "text": "func (client ManagementClient) UpdateKeyPreparer(vaultBaseURL string, keyName string, keyVersion string, parameters KeyUpdateParameters) (*http.Request, error) {\n\turlParameters := map[string]interface{}{\n\t\t\"vaultBaseUrl\": vaultBaseURL,\n\t}\n\n\tpathParameters := map[string]interface{}{\n\t\t\"key-name\": autorest.Encode(\"path\", keyName),\n\t\t\"key-version\": autorest.Encode(\"path\", keyVersion),\n\t}\n\n\tconst APIVersion = \"2015-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithCustomBaseURL(\"{vaultBaseUrl}\", urlParameters),\n\t\tautorest.WithPathParameters(\"/keys/{key-name}/{key-version}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "63a97c5e01bf083e8efa786c21de7c42", "score": "0.5910724", "text": "func (client CassandraResourcesClient) CreateUpdateCassandraKeyspacePreparer(ctx context.Context, resourceGroupName string, accountName string, keyspaceName string, createUpdateCassandraKeyspaceParameters CassandraKeyspaceCreateUpdateParameters) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"accountName\": autorest.Encode(\"path\", accountName),\n\t\t\"keyspaceName\": autorest.Encode(\"path\", keyspaceName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-10-15\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/cassandraKeyspaces/{keyspaceName}\", pathParameters),\n\t\tautorest.WithJSON(createUpdateCassandraKeyspaceParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "e7fe272ad344349f4f9d418349411151", "score": "0.5910102", "text": "func (client BaseClient) PutPropertyPreparer(ctx context.Context, nameID string, propertyDescription PropertyDescription, timeout *int64) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"nameId\": nameID,\n\t}\n\n\tconst APIVersion = \"6.0\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif timeout != nil {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", *timeout)\n\t} else {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", 60)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/Names/{nameId}/$/GetProperty\", pathParameters),\n\t\tautorest.WithJSON(propertyDescription),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "00e0b5d743a8b3982a2df6954e3d3ace", "score": "0.590744", "text": "func (client HostingEnvironmentsClient) CreateOrUpdateHostingEnvironmentPreparer(resourceGroupName string, name string, hostingEnvironmentEnvelope HostingEnvironment, cancel <-chan struct{}) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2015-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/hostingEnvironments/{name}\", pathParameters),\n\t\tautorest.WithJSON(hostingEnvironmentEnvelope),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{Cancel: cancel})\n}", "title": "" }, { "docid": "0443509b0bafcc441af28c4f2af424d0", "score": "0.5905299", "text": "func (client ManagementClient) ImportKeyPreparer(vaultBaseURL string, keyName string, parameters KeyImportParameters) (*http.Request, error) {\n\turlParameters := map[string]interface{}{\n\t\t\"vaultBaseUrl\": vaultBaseURL,\n\t}\n\n\tpathParameters := map[string]interface{}{\n\t\t\"key-name\": autorest.Encode(\"path\", keyName),\n\t}\n\n\tconst APIVersion = \"2015-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithCustomBaseURL(\"{vaultBaseUrl}\", urlParameters),\n\t\tautorest.WithPathParameters(\"/keys/{key-name}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "3215b836ed52f226e5bf4875c7fc848f", "score": "0.5900564", "text": "func (client AppsClient) CreateOrUpdateDomainOwnershipIdentifierPreparer(resourceGroupName string, name string, domainOwnershipIdentifierName string, domainOwnershipIdentifier Identifier) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"domainOwnershipIdentifierName\": autorest.Encode(\"path\", domainOwnershipIdentifierName),\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/domainOwnershipIdentifiers/{domainOwnershipIdentifierName}\", pathParameters),\n\t\tautorest.WithJSON(domainOwnershipIdentifier),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "0eb4718b5e0db57a536b54ce5ed01448", "score": "0.5882573", "text": "func (client BaseClient) CreateOrderItemPreparer(ctx context.Context, orderItemName string, resourceGroupName string, orderItemResource OrderItemResource) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"orderItemName\": autorest.Encode(\"path\", orderItemName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2020-12-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\torderItemResource.SystemData = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EdgeOrder/orderItems/{orderItemName}\", pathParameters),\n\t\tautorest.WithJSON(orderItemResource),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "51a9de6d7e07f6df31e9e6a0c660073a", "score": "0.58432585", "text": "func (client ImageClient) CreatePreparer(ctx context.Context, serverInstanceNo string, memberServerImageName string, memberServerImageDescription string) (*http.Request, error) {\n\tqueryParameters := map[string]interface{}{\n\t\t\"responseFormatType\": autorest.Encode(\"query\", \"json\"),\n\t\t\"serverInstanceNo\": autorest.Encode(\"query\", serverInstanceNo),\n\t}\n\n\tqueryParameters[\"regionCode\"] = autorest.Encode(\"query\", \"FKR\")\n\n\tif len(memberServerImageName) > 0 {\n\t\tqueryParameters[\"memberServerImageName\"] = autorest.Encode(\"query\", memberServerImageName)\n\t}\n\tif len(memberServerImageDescription) > 0 {\n\t\tqueryParameters[\"memberServerImageDescription\"] = autorest.Encode(\"query\", memberServerImageDescription)\n\t}\n\n\ttimestamp := strconv.FormatInt(time.Now().UnixNano()/int64(time.Millisecond), 10)\n\tsec := security.NewSignature(client.Secretkey, crypto.SHA256)\n\tsignature, err := sec.Signature(\"POST\", common.GetPath(DefaultBaseURI, \"/createMemberServerImageInstance\")+\"?\"+common.GetQuery(queryParameters), client.AccessKey, timestamp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/createMemberServerImageInstance\"),\n\t\tautorest.WithQueryParameters(queryParameters),\n\t\tautorest.WithHeader(\"x-ncp-apigw-timestamp\", timestamp),\n\t\tautorest.WithHeader(\"x-ncp-iam-access-key\", client.AccessKey),\n\t\tautorest.WithHeader(\"x-ncp-apigw-signature-v2\", signature))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "638ee365c7fa7ef3276af238d26ca40c", "score": "0.58228743", "text": "func (client Client) SetPropertiesPreparer(ctx context.Context, accountName string, filesystem string, xMsProperties string, ifModifiedSince string, ifUnmodifiedSince string, xMsClientRequestID string, timeout *int32, xMsDate string) (*http.Request, error) {\n\turlParameters := map[string]interface{}{\n\t\t\"accountName\": accountName,\n\t\t\"dnsSuffix\": DefaultDNSSuffix,\n\t}\n\n\tpathParameters := map[string]interface{}{\n\t\t\"filesystem\": autorest.Encode(\"path\", filesystem),\n\t}\n\n\tqueryParameters := map[string]interface{}{\n\t\t\"resource\": autorest.Encode(\"query\", \"filesystem\"),\n\t}\n\tif timeout != nil {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", *timeout)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPatch(),\n\t\tautorest.WithCustomBaseURL(\"https://{accountName}.{dnsSuffix}\", urlParameters),\n\t\tautorest.WithPathParameters(\"/{filesystem}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\tif len(xMsProperties) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"x-ms-properties\", autorest.String(xMsProperties)))\n\t}\n\tif len(ifModifiedSince) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"If-Modified-Since\", autorest.String(ifModifiedSince)))\n\t}\n\tif len(ifUnmodifiedSince) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"If-Unmodified-Since\", autorest.String(ifUnmodifiedSince)))\n\t}\n\tif len(xMsClientRequestID) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"x-ms-client-request-id\", autorest.String(xMsClientRequestID)))\n\t}\n\tif len(xMsDate) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"x-ms-date\", autorest.String(xMsDate)))\n\t}\n\tpreparer = autorest.DecoratePreparer(preparer,\n\t\tautorest.WithHeader(\"x-ms-version\", autorest.String(APIVersion)))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "5727c5fbaf3a85ae090d4588041c212e", "score": "0.5795942", "text": "func (client VirtualMachineClient) ListPreparer(ctx context.Context, resourceGroupName string, labName string, filter string, top *int32, orderBy string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2015-05-21-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif len(filter) > 0 {\n\t\tqueryParameters[\"$filter\"] = autorest.Encode(\"query\", filter)\n\t}\n\tif top != nil {\n\t\tqueryParameters[\"$top\"] = autorest.Encode(\"query\", *top)\n\t}\n\tif len(orderBy) > 0 {\n\t\tqueryParameters[\"$orderBy\"] = autorest.Encode(\"query\", orderBy)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevTestLab/labs/{labName}/virtualmachines\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "d8c93e8925cfa44a11e1f5cf275227f5", "score": "0.5793868", "text": "func (client ApplyUpdatesClient) GetPreparer(ctx context.Context, resourceGroupName string, providerName string, resourceType string, resourceName string, applyUpdateName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"applyUpdateName\": autorest.Encode(\"path\", applyUpdateName),\n\t\t\"providerName\": autorest.Encode(\"path\", providerName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"resourceName\": autorest.Encode(\"path\", resourceName),\n\t\t\"resourceType\": autorest.Encode(\"path\", resourceType),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-06-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceType}/{resourceName}/providers/Microsoft.Maintenance/applyUpdates/{applyUpdateName}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "bfcff8343c9de9fe9503e745f82abbcf", "score": "0.579032", "text": "func (client ExpressRouteCircuitsClient) UpdateTagsPreparer(ctx context.Context, resourceGroupName string, circuitName string, parameters TagsObject) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"circuitName\": autorest.Encode(\"path\", circuitName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "e14c4887403294ec8697dd08d4472a36", "score": "0.578882", "text": "func (client BaseClient) GetAllJobsMethodPreparer(ctx context.Context, ID string, name string, description string, connectionID string, schemaID string, shapeID string, agentID string, typeParameter string, createdBy string, updatedBy string, deletedBy string) (*http.Request, error) {\n queryParameters := map[string]interface{} {\n }\n if len(ID) > 0 {\n queryParameters[\"id\"] = autorest.Encode(\"query\",ID)\n }\n if len(name) > 0 {\n queryParameters[\"name\"] = autorest.Encode(\"query\",name)\n }\n if len(description) > 0 {\n queryParameters[\"description\"] = autorest.Encode(\"query\",description)\n }\n if len(connectionID) > 0 {\n queryParameters[\"connectionId\"] = autorest.Encode(\"query\",connectionID)\n }\n if len(schemaID) > 0 {\n queryParameters[\"schemaId\"] = autorest.Encode(\"query\",schemaID)\n }\n if len(shapeID) > 0 {\n queryParameters[\"shapeId\"] = autorest.Encode(\"query\",shapeID)\n }\n if len(agentID) > 0 {\n queryParameters[\"agentId\"] = autorest.Encode(\"query\",agentID)\n }\n if len(typeParameter) > 0 {\n queryParameters[\"type\"] = autorest.Encode(\"query\",typeParameter)\n }\n if len(createdBy) > 0 {\n queryParameters[\"createdBy\"] = autorest.Encode(\"query\",createdBy)\n }\n if len(updatedBy) > 0 {\n queryParameters[\"updatedBy\"] = autorest.Encode(\"query\",updatedBy)\n }\n if len(deletedBy) > 0 {\n queryParameters[\"deletedBy\"] = autorest.Encode(\"query\",deletedBy)\n }\n\n preparer := autorest.CreatePreparer(\n autorest.AsGet(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/v1/jobs\"),\n autorest.WithQueryParameters(queryParameters))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "e8a32cdf84f421cdf069d2e3604e8d00", "score": "0.57863075", "text": "func (client TranslatorClient) TranslatePreparer(ctx context.Context, toParameter []string, textParameter []TranslateTextInput, from string, textType string, category string, profanityAction string, profanityMarker string, includeAlignment *bool, includeSentenceLength *bool, suggestedFrom string, fromScript string, toScript []string, xClientTraceID string) (*http.Request, error) {\n\turlParameters := map[string]interface{}{\n\t\t\"Endpoint\": client.Endpoint,\n\t}\n\n\tconst APIVersion = \"3.0\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t\t\"to\": autorest.Encode(\"query\", toParameter, \",\"),\n\t}\n\tif len(from) > 0 {\n\t\tqueryParameters[\"from\"] = autorest.Encode(\"query\", from)\n\t}\n\tif len(string(textType)) > 0 {\n\t\tqueryParameters[\"textType\"] = autorest.Encode(\"query\", textType)\n\t}\n\tif len(category) > 0 {\n\t\tqueryParameters[\"category\"] = autorest.Encode(\"query\", category)\n\t}\n\tif len(string(profanityAction)) > 0 {\n\t\tqueryParameters[\"profanityAction\"] = autorest.Encode(\"query\", profanityAction)\n\t}\n\tif len(profanityMarker) > 0 {\n\t\tqueryParameters[\"profanityMarker\"] = autorest.Encode(\"query\", profanityMarker)\n\t}\n\tif includeAlignment != nil {\n\t\tqueryParameters[\"includeAlignment\"] = autorest.Encode(\"query\", *includeAlignment)\n\t}\n\tif includeSentenceLength != nil {\n\t\tqueryParameters[\"includeSentenceLength\"] = autorest.Encode(\"query\", *includeSentenceLength)\n\t}\n\tif len(suggestedFrom) > 0 {\n\t\tqueryParameters[\"suggestedFrom\"] = autorest.Encode(\"query\", suggestedFrom)\n\t}\n\tif len(fromScript) > 0 {\n\t\tqueryParameters[\"fromScript\"] = autorest.Encode(\"query\", fromScript)\n\t}\n\tif toScript != nil && len(toScript) > 0 {\n\t\tqueryParameters[\"toScript\"] = autorest.Encode(\"query\", toScript, \",\")\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithCustomBaseURL(\"{Endpoint}\", urlParameters),\n\t\tautorest.WithPath(\"/translate\"),\n\t\tautorest.WithJSON(textParameter),\n\t\tautorest.WithQueryParameters(queryParameters))\n\tif len(xClientTraceID) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"X-ClientTraceId\", autorest.String(xClientTraceID)))\n\t}\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "ba5ad61dc45dffb3a8e532acb44d7faa", "score": "0.5772912", "text": "func (client TasksClient) ListPreparer(ctx context.Context, groupName string, serviceName string, projectName string, taskType string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"groupName\": autorest.Encode(\"path\", groupName),\n\t\t\"projectName\": autorest.Encode(\"path\", projectName),\n\t\t\"serviceName\": autorest.Encode(\"path\", serviceName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-04-19\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif len(taskType) > 0 {\n\t\tqueryParameters[\"taskType\"] = autorest.Encode(\"query\", taskType)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "512fea2a8ee74da545efdf23809b54ea", "score": "0.5759417", "text": "func (client BaseClient) ProvisionApplicationTypePreparer(ctx context.Context, provisionApplicationTypeDescriptionBaseRequiredBodyParam BasicProvisionApplicationTypeDescriptionBase, timeout *int64) (*http.Request, error) {\n\tconst APIVersion = \"6.1\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif timeout != nil {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", *timeout)\n\t} else {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", 60)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/ApplicationTypes/$/Provision\"),\n\t\tautorest.WithJSON(provisionApplicationTypeDescriptionBaseRequiredBodyParam),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "b9e07b28a2d6cc555c9a891feb68e52f", "score": "0.5746103", "text": "func (client MachinesClient) GetPreparer(ctx context.Context, resourceGroupName string, workspaceName string, machineName string, timestamp *date.Time) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"machineName\": autorest.Encode(\"path\", machineName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"workspaceName\": autorest.Encode(\"path\", workspaceName),\n\t}\n\n\tconst APIVersion = \"2015-11-01-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif timestamp != nil {\n\t\tqueryParameters[\"timestamp\"] = autorest.Encode(\"query\", *timestamp)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/features/serviceMap/machines/{machineName}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "7448c6c44478a50237c7d821885f0409", "score": "0.5744758", "text": "func (client BaseClient) GetPartitionInfoPreparer(ctx context.Context, partitionID uuid.UUID, timeout *int64) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"partitionId\": partitionID,\n\t}\n\n\tconst APIVersion = \"6.0\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif timeout != nil {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", *timeout)\n\t} else {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", 60)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/Partitions/{partitionId}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "e7d79b2c513cba5065ebb528dbca59a8", "score": "0.57444733", "text": "func (client VirtualMachineClient) StartPreparer(ctx context.Context, resourceGroupName string, labName string, name string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2015-05-21-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevTestLab/labs/{labName}/virtualmachines/{name}/start\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "4947052adf0e46e736c65151ecf042d8", "score": "0.57309806", "text": "func (client TasksClient) GetPreparer(ctx context.Context, groupName string, serviceName string, projectName string, taskName string, expand string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"groupName\": autorest.Encode(\"path\", groupName),\n\t\t\"projectName\": autorest.Encode(\"path\", projectName),\n\t\t\"serviceName\": autorest.Encode(\"path\", serviceName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t\t\"taskName\": autorest.Encode(\"path\", taskName),\n\t}\n\n\tconst APIVersion = \"2018-04-19\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif len(expand) > 0 {\n\t\tqueryParameters[\"$expand\"] = autorest.Encode(\"query\", expand)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "0f911c17797bd0d06353296f63892015", "score": "0.5721161", "text": "func (client VirtualMachineClient) GetResourcePreparer(ctx context.Context, resourceGroupName string, labName string, name string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2015-05-21-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevTestLab/labs/{labName}/virtualmachines/{name}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "7df1c2c74a69a5082de118e36a29d013", "score": "0.5717389", "text": "func (client BaseClient) GetAllSchemasMethodPreparer(ctx context.Context, ID string, name string, description string, discoveredID string, discoveredName string, discoveredDescription string, query string, connectionID string, profilingJobID string, publisherMetaJSON string, capability string, createdBy string, updatedBy string, deletedBy string) (*http.Request, error) {\n queryParameters := map[string]interface{} {\n }\n if len(ID) > 0 {\n queryParameters[\"id\"] = autorest.Encode(\"query\",ID)\n }\n if len(name) > 0 {\n queryParameters[\"name\"] = autorest.Encode(\"query\",name)\n }\n if len(description) > 0 {\n queryParameters[\"description\"] = autorest.Encode(\"query\",description)\n }\n if len(discoveredID) > 0 {\n queryParameters[\"discoveredId\"] = autorest.Encode(\"query\",discoveredID)\n }\n if len(discoveredName) > 0 {\n queryParameters[\"discoveredName\"] = autorest.Encode(\"query\",discoveredName)\n }\n if len(discoveredDescription) > 0 {\n queryParameters[\"discoveredDescription\"] = autorest.Encode(\"query\",discoveredDescription)\n }\n if len(query) > 0 {\n queryParameters[\"query\"] = autorest.Encode(\"query\",query)\n }\n if len(connectionID) > 0 {\n queryParameters[\"connectionId\"] = autorest.Encode(\"query\",connectionID)\n }\n if len(profilingJobID) > 0 {\n queryParameters[\"profilingJobId\"] = autorest.Encode(\"query\",profilingJobID)\n }\n if len(publisherMetaJSON) > 0 {\n queryParameters[\"publisherMetaJson\"] = autorest.Encode(\"query\",publisherMetaJSON)\n }\n if len(capability) > 0 {\n queryParameters[\"capability\"] = autorest.Encode(\"query\",capability)\n }\n if len(createdBy) > 0 {\n queryParameters[\"createdBy\"] = autorest.Encode(\"query\",createdBy)\n }\n if len(updatedBy) > 0 {\n queryParameters[\"updatedBy\"] = autorest.Encode(\"query\",updatedBy)\n }\n if len(deletedBy) > 0 {\n queryParameters[\"deletedBy\"] = autorest.Encode(\"query\",deletedBy)\n }\n\n preparer := autorest.CreatePreparer(\n autorest.AsGet(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/v1/schemas\"),\n autorest.WithQueryParameters(queryParameters))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "2ecfa78e03f77107c8b25bf20e028f97", "score": "0.5710285", "text": "func (client AppsClient) CreateOrUpdateSlotPreparer(resourceGroupName string, name string, siteEnvelope Site, slot string, skipDNSRegistration *bool, skipCustomDomainVerification *bool, forceDNSRegistration *bool, TTLInSeconds string, cancel <-chan struct{}) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"slot\": autorest.Encode(\"path\", slot),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif skipDNSRegistration != nil {\n\t\tqueryParameters[\"skipDnsRegistration\"] = autorest.Encode(\"query\", *skipDNSRegistration)\n\t}\n\tif skipCustomDomainVerification != nil {\n\t\tqueryParameters[\"skipCustomDomainVerification\"] = autorest.Encode(\"query\", *skipCustomDomainVerification)\n\t}\n\tif forceDNSRegistration != nil {\n\t\tqueryParameters[\"forceDnsRegistration\"] = autorest.Encode(\"query\", *forceDNSRegistration)\n\t}\n\tif len(TTLInSeconds) > 0 {\n\t\tqueryParameters[\"ttlInSeconds\"] = autorest.Encode(\"query\", TTLInSeconds)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/slots/{slot}\", pathParameters),\n\t\tautorest.WithJSON(siteEnvelope),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{Cancel: cancel})\n}", "title": "" }, { "docid": "af306b3e29e255d10e02ccf8d5d0a801", "score": "0.56982327", "text": "func (client ManagementClient) WrapKeyPreparer(vaultBaseURL string, keyName string, keyVersion string, parameters KeyOperationsParameters) (*http.Request, error) {\n\turlParameters := map[string]interface{}{\n\t\t\"vaultBaseUrl\": vaultBaseURL,\n\t}\n\n\tpathParameters := map[string]interface{}{\n\t\t\"key-name\": autorest.Encode(\"path\", keyName),\n\t\t\"key-version\": autorest.Encode(\"path\", keyVersion),\n\t}\n\n\tconst APIVersion = \"2015-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPost(),\n\t\tautorest.WithCustomBaseURL(\"{vaultBaseUrl}\", urlParameters),\n\t\tautorest.WithPathParameters(\"/keys/{key-name}/{key-version}/wrapkey\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "8203e480dc64ed08dc4fdfc3849a50c3", "score": "0.5687642", "text": "func (client AppsClient) StartPreparer(resourceGroupName string, name string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/start\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "55ac937fe236edc0c8c9e677dd8944b2", "score": "0.5680665", "text": "func (client PetsClient) CreateCatAPTruePreparer(ctx context.Context, createParameters CatAPTrue) (*http.Request, error) {\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/additionalProperties/true-subclass\"),\n\t\tautorest.WithJSON(createParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "6517a3d70b80d858d8b7b26d55f6ab94", "score": "0.56630135", "text": "func (client ConsumerInvitationsClient) GetPreparer(ctx context.Context, location string, invitationID string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"invitationId\": autorest.Encode(\"path\", invitationID),\n\t\t\"location\": autorest.Encode(\"path\", location),\n\t}\n\n\tconst APIVersion = \"2019-11-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/providers/Microsoft.DataShare/locations/{location}/consumerInvitations/{invitationId}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "9bc2dd647c480b218c20121b7f902ec3", "score": "0.56537884", "text": "func (client BaseClient) UpdateSchemaMethodPreparer(ctx context.Context, ID string, model SchemaType) (*http.Request, error) {\n pathParameters := map[string]interface{} {\n \"id\": autorest.Encode(\"path\",ID),\n }\n\n preparer := autorest.CreatePreparer(\n autorest.AsContentType(\"application/json-patch+json; charset=utf-8\"),\n autorest.AsPut(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPathParameters(\"/v1/schemas/{id}\",pathParameters),\n autorest.WithJSON(model))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "e922b0d8aaefa097d890ac00ce190d7d", "score": "0.5647056", "text": "func (client AppsClient) GetPreparer(resourceGroupName string, name string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "b876a4d5994c1f46a47419370867d8c2", "score": "0.56461513", "text": "func (client AppsClient) ListMetadataPreparer(resourceGroupName string, name string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/config/metadata/list\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "bf41ab657e45cd46dfd74065fae5a1bc", "score": "0.56424636", "text": "func (client RulesClient) UpdatePreparer(ctx context.Context, resourceGroupName string, profileName string, ruleSetName string, ruleName string, ruleUpdateProperties RuleUpdateParameters) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"profileName\": autorest.Encode(\"path\", profileName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"ruleName\": autorest.Encode(\"path\", ruleName),\n\t\t\"ruleSetName\": autorest.Encode(\"path\", ruleSetName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2020-09-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/ruleSets/{ruleSetName}/rules/{ruleName}\", pathParameters),\n\t\tautorest.WithJSON(ruleUpdateProperties),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "7e1cd434e46676f3702f76b063d5de85", "score": "0.5636317", "text": "func (client AppsClient) CreateOrUpdateSourceControlSlotPreparer(resourceGroupName string, name string, siteSourceControl SiteSourceControl, slot string, cancel <-chan struct{}) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"slot\": autorest.Encode(\"path\", slot),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2016-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/slots/{slot}/sourcecontrols/web\", pathParameters),\n\t\tautorest.WithJSON(siteSourceControl),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{Cancel: cancel})\n}", "title": "" }, { "docid": "7c8e863d01b33830f3ec4ddb2d243d93", "score": "0.5630537", "text": "func (client IntGroupClient) PutUnixTimeDatePreparer(intBody string) (*http.Request, error) {\n preparer := autorest.CreatePreparer(\n autorest.AsJSON(),\n autorest.AsPut(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/int/unixtime\"),\n autorest.WithJSON(intBody))\n return preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "dbf266dbe84f8263d4e21328263bb852", "score": "0.5630373", "text": "func (client LabsClient) GetPreparer(ctx context.Context, resourceGroupName string, labName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"labName\": autorest.Encode(\"path\", labName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-11-15-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "404c7d629b021349a1819c236c8102aa", "score": "0.56295323", "text": "func (client ManagedVirtualNetworksClient) GetPreparer(ctx context.Context, resourceGroupName string, factoryName string, managedVirtualNetworkName string, ifNoneMatch string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"factoryName\": autorest.Encode(\"path\", factoryName),\n\t\t\"managedVirtualNetworkName\": autorest.Encode(\"path\", managedVirtualNetworkName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\tif len(ifNoneMatch) > 0 {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithHeader(\"If-None-Match\", autorest.String(ifNoneMatch)))\n\t}\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "67891c6add2a887420aaa593bbb2d775", "score": "0.56283224", "text": "func (client ProviderClient) ListOperationsPreparer(ctx context.Context) (*http.Request, error) {\n\tconst APIVersion = \"2021-03-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/providers/Microsoft.Web/operations\"),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "f2f1f38362aa4711d82664ca91b9ff92", "score": "0.56252", "text": "func (client StorageTargetClient) SuspendPreparer(ctx context.Context, resourceGroupName string, cacheName string, storageTargetName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"cacheName\": autorest.Encode(\"path\", cacheName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"storageTargetName\": autorest.Encode(\"path\", storageTargetName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2021-09-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StorageCache/caches/{cacheName}/storageTargets/{storageTargetName}/suspend\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "7873359f5611eb1bd077da0afdeb1a18", "score": "0.5622321", "text": "func (client PetsClient) CreateAPInPropertiesPreparer(ctx context.Context, createParameters PetAPInProperties) (*http.Request, error) {\n\tcreateParameters.Status = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/additionalProperties/in/properties\"),\n\t\tautorest.WithJSON(createParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "14b41d405276ccabed03224a0f237559", "score": "0.56186736", "text": "func (client BaseClient) CreateServicePreparer(ctx context.Context, applicationID string, serviceDescription BasicServiceDescription, timeout *int64) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"applicationId\": applicationID,\n\t}\n\n\tconst APIVersion = \"6.0\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif timeout != nil {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", *timeout)\n\t} else {\n\t\tqueryParameters[\"timeout\"] = autorest.Encode(\"query\", 60)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/Applications/{applicationId}/$/GetServices/$/Create\", pathParameters),\n\t\tautorest.WithJSON(serviceDescription),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "791c40a9d90db95a0b6c4d85c6a76279", "score": "0.5614343", "text": "func (client DevicesClient) GetPreparer(ctx context.Context, deviceName string, resourceGroupName string, managerName string, expand string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"deviceName\": deviceName,\n\t\t\"managerName\": managerName,\n\t\t\"resourceGroupName\": resourceGroupName,\n\t\t\"subscriptionId\": client.SubscriptionID,\n\t}\n\n\tconst APIVersion = \"2017-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif len(expand) > 0 {\n\t\tqueryParameters[\"$expand\"] = autorest.Encode(\"query\", expand)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "97aa5f01a6bf597fb487e9da2f485034", "score": "0.5606364", "text": "func (client Datetimerfc1123Client) PutUtcMaxDateTimePreparer(ctx context.Context, datetimeBody date.TimeRFC1123) (*http.Request, error) {\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/datetimerfc1123/max\"),\n\t\tautorest.WithJSON(datetimeBody))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "47da11784f31535a48b70a5c86e36592", "score": "0.56050825", "text": "func (client BaseClient) CheckResourceNamePreparer(ctx context.Context, resourceNameDefinition *ResourceName) (*http.Request, error) {\n\tconst APIVersion = \"2016-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPath(\"/providers/Microsoft.Resources/checkResourceName\"),\n\t\tautorest.WithQueryParameters(queryParameters))\n\tif resourceNameDefinition != nil {\n\t\tpreparer = autorest.DecoratePreparer(preparer,\n\t\t\tautorest.WithJSON(resourceNameDefinition))\n\t}\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "4d5d1daf0ea50fc481b68ab2d79a95b5", "score": "0.56020325", "text": "func (client NamespacesClient) CreateOrUpdateAuthorizationRulePreparer(resourceGroupName string, namespaceName string, authorizationRuleName string, parameters AuthorizationRule) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"authorizationRuleName\": autorest.Encode(\"path\", authorizationRuleName),\n\t\t\"namespaceName\": autorest.Encode(\"path\", namespaceName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2017-04-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/AuthorizationRules/{authorizationRuleName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "ec574761e91b62463885c4000cd9cc36", "score": "0.5592401", "text": "func (client TestJobClient) SuspendPreparer(ctx context.Context, resourceGroupName string, automationAccountName string, runbookName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"automationAccountName\": autorest.Encode(\"path\", automationAccountName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"runbookName\": autorest.Encode(\"path\", runbookName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-06-30\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/runbooks/{runbookName}/draft/testJob/suspend\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "title": "" }, { "docid": "03caeefe4f93606135d5284c803e7c9c", "score": "0.5590426", "text": "func (client BaseClient) CreateSchemaProfileMethodPreparer(ctx context.Context, input SchemaProfileType) (*http.Request, error) {\n preparer := autorest.CreatePreparer(\n autorest.AsContentType(\"application/json-patch+json; charset=utf-8\"),\n autorest.AsPost(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/v1/schemaprofiles\"),\n autorest.WithJSON(input))\n return preparer.Prepare((&http.Request{}).WithContext(ctx))\n }", "title": "" }, { "docid": "744579d246c5bd761ccd93d75c4e4aa1", "score": "0.558793", "text": "func (client Datetimerfc1123Client) PutUtcMinDateTimePreparer(datetimeBody date.TimeRFC1123) (*http.Request, error) {\n preparer := autorest.CreatePreparer(\n autorest.AsJSON(),\n autorest.AsPut(),\n autorest.WithBaseURL(client.BaseURI),\n autorest.WithPath(\"/datetimerfc1123/min\"),\n autorest.WithJSON(datetimeBody))\n return preparer.Prepare(&http.Request{})\n}", "title": "" }, { "docid": "1b21e33b13647a3b4e9e36ce2b33db85", "score": "0.5586011", "text": "func (client ManagementClient) CreateCertificatePreparer(vaultBaseURL string, certificateName string, parameters CertificateCreateParameters) (*http.Request, error) {\n\turlParameters := map[string]interface{}{\n\t\t\"vaultBaseUrl\": vaultBaseURL,\n\t}\n\n\tpathParameters := map[string]interface{}{\n\t\t\"certificate-name\": autorest.Encode(\"path\", certificateName),\n\t}\n\n\tconst APIVersion = \"2015-06-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsJSON(),\n\t\tautorest.AsPost(),\n\t\tautorest.WithCustomBaseURL(\"{vaultBaseUrl}\", urlParameters),\n\t\tautorest.WithPathParameters(\"/certificates/{certificate-name}/create\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare(&http.Request{})\n}", "title": "" } ]
f44f2076c4b1156203d2013fbf6e7bf4
RecordWin updates a Team's statistics for a win
[ { "docid": "ad80c7f04c4a90f259d94bebaed82002", "score": "0.7844143", "text": "func (team *Team) RecordWin() {\n\tteam.MP++\n\tteam.W++\n\tteam.P += 3\n}", "title": "" } ]
[ { "docid": "557e0c0239dc83245f5e789b2a90b86d", "score": "0.8050979", "text": "func (tournament *Tournament) RecordWin(team string) {\n\ttournament.Team(team).RecordWin()\n}", "title": "" }, { "docid": "290989a0de622537ee2ab1c558499beb", "score": "0.68411636", "text": "func (i *InMemoryPlayerStore) RecordWin(name string) {\n\ti.lock.RLock()\n\tdefer i.lock.RUnlock()\n\ti.store[name]++\n}", "title": "" }, { "docid": "1554c819e787da4a8c25ec6fba1b21be", "score": "0.6337188", "text": "func (tournament *Tournament) RecordDraw(team string) {\n\ttournament.Team(team).RecordDraw()\n}", "title": "" }, { "docid": "9f868279631f9c2384af1275493cf51a", "score": "0.6304149", "text": "func (store *PostgresPlayerStore) RecordWin(name string) {\n\n\tdbname := \"gwt_dev\"\n\n\tpsqlInfo := fmt.Sprintf(\"dbname=%s sslmode=disable\", dbname)\n\n\tdb, err := sql.Open(\"postgres\", psqlInfo)\n\n\tdefer db.Close()\n\n\tif err != nil {\n\t\tfmt.Printf(\"Got error when connecting to db! %s\\n\", err)\n\t\treturn\n\t}\n\trows, err := db.Query(`SELECT score FROM player_scores WHERE name = $1`, name)\n\tif err != nil {\n\t\tfmt.Printf(\"Got error when querying to db! %s\\n\", err)\n\t\treturn\n\t}\n\tvar currentScore int\n\n\tif rows.Next() {\n\t\tif err := rows.Scan(&currentScore); err != nil {\n\t\t\tfmt.Printf(\"Got error scanning score! %s\", err.Error())\n\t\t}\n\t\tsqlStatement := `UPDATE player_scores SET score = $1 WHERE name = $2`\n\t\t_, err = db.Exec(sqlStatement, currentScore+1, name)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Got error when updating db! %s\\n\", err)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tsqlStatement := `INSERT INTO player_scores VALUES ($1, 1)`\n\t\t_, err = db.Exec(sqlStatement, name)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Got error when inserting into db! %s\\n\", err)\n\t\t\treturn\n\t\t}\n\t}\n\n}", "title": "" }, { "docid": "9ec17dca7488b36c36ae2ebb9d7b1085", "score": "0.6265378", "text": "func (team *Team) RecordDraw() {\n\tteam.MP++\n\tteam.D++\n\tteam.P++\n}", "title": "" }, { "docid": "3c195feab69881ec3fb31b7115f10455", "score": "0.58899", "text": "func (m *MockPlayerStore) RecordWin(name string) {\n\tm.ctrl.Call(m, \"RecordWin\", name)\n}", "title": "" }, { "docid": "39ce571a6ff88ec42382649601992522", "score": "0.58144677", "text": "func (m *ActiveMatch) RecordGame(winner kq.Side, winType kq.WinType) {\n\tm.Games = append(m.Games, &GameScore{\n\t\tTeamASide: m.TeamASide,\n\t\tWinner: winner,\n\t\tWinType: winType,\n\t})\n\tif winner == m.TeamASide {\n\t\tm.ScoreA++\n\t} else {\n\t\tm.ScoreB++\n\t}\n}", "title": "" }, { "docid": "f331bbc3dcba0171a137b446a9be5dc1", "score": "0.5803318", "text": "func (team *Team) RecordLoss() {\n\tteam.MP++\n\tteam.L++\n}", "title": "" }, { "docid": "5847ea1f84f06d854e07bd4a6803a07e", "score": "0.56006944", "text": "func (tournament *Tournament) RecordLoss(team string) {\n\ttournament.Team(team).RecordLoss()\n}", "title": "" }, { "docid": "4429d31b988ecf37d02394b34931eb27", "score": "0.5599335", "text": "func (mr *MockPlayerStoreMockRecorder) RecordWin(name interface{}) *gomock.Call {\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"RecordWin\", reflect.TypeOf((*MockPlayerStore)(nil).RecordWin), name)\n}", "title": "" }, { "docid": "5794060f8236b5315d728c5e8bfd1094", "score": "0.5593081", "text": "func doWin(base *BaseGame) {\n\t// Set game state\n\tbase.GameState = \"WON\"\n\tbase.Message = \"Game shot!\"\n}", "title": "" }, { "docid": "c065c1cc004f76fbd3c2d85c29cc7cc2", "score": "0.54283684", "text": "func TestUpdateScoreWinningSet(t *testing.T) {\n\tvar score models.Score\n\n\tassertHandler := assert.New(t)\n\tnow := time.Now()\n\tinitialUUID, _ := uuid.NewV4()\n\n\tfirstUserGoal := goal{\n\t\tScorer: \"user1\",\n\t\tOpponent: \"user2\",\n\t\tPlayer: \"p1\",\n\t\tGamelle: false,\n\t}\n\n\tsecondUserGoal := goal{\n\t\tScorer: \"user2\",\n\t\tOpponent: \"user1\",\n\t\tPlayer: \"p1\",\n\t\tGamelle: false,\n\t}\n\n\tfirstUserToWinScore := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 9,\n\t\tUser2Points: 4,\n\t\tUser1Sets: 2,\n\t\tUser2Sets: 1,\n\t\tGoalsInBalance: 0,\n\t}\n\n\tawaitedFirstUserToWinAfterFirstUserGoalScore := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 0,\n\t\tUser2Points: 0,\n\t\tUser1Sets: 3,\n\t\tUser2Sets: 1,\n\t\tGoalsInBalance: 0,\n\t}\n\n\tscore = firstUserToWinScore\n\t_ = updateScore(&score, firstUserGoal)\n\tassertHandler.Equal(awaitedFirstUserToWinAfterFirstUserGoalScore, score, \"User1 winning set (without goals in balance): score not updated as expected\")\n\n\tawaitedFirstUserToWinAfterSecondUserGoalScore := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 9,\n\t\tUser2Points: 5,\n\t\tUser1Sets: 2,\n\t\tUser2Sets: 1,\n\t\tGoalsInBalance: 0,\n\t}\n\n\tscore = firstUserToWinScore\n\t_ = updateScore(&score, secondUserGoal)\n\tassertHandler.Equal(awaitedFirstUserToWinAfterSecondUserGoalScore, score, \"User2 scored while user1 about to win: score not updated as expected\")\n\n\tsecondUserToWinScore := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 5,\n\t\tUser2Points: 9,\n\t\tUser1Sets: 6,\n\t\tUser2Sets: 7,\n\t\tGoalsInBalance: 0,\n\t}\n\n\tawaitedSecondUserToWinAfterFirstUserGoalScore := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 6,\n\t\tUser2Points: 9,\n\t\tUser1Sets: 6,\n\t\tUser2Sets: 7,\n\t\tGoalsInBalance: 0,\n\t}\n\n\tscore = secondUserToWinScore\n\t_ = updateScore(&score, firstUserGoal)\n\tassertHandler.Equal(awaitedSecondUserToWinAfterFirstUserGoalScore, score, \"User1 scored while user2 about to win: score not updated as expected\")\n\n\tawaitedSecondUserToWinAfterSecondUserGoalScore := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 0,\n\t\tUser2Points: 0,\n\t\tUser1Sets: 6,\n\t\tUser2Sets: 8,\n\t\tGoalsInBalance: 0,\n\t}\n\n\tscore = secondUserToWinScore\n\t_ = updateScore(&score, secondUserGoal)\n\tassertHandler.Equal(awaitedSecondUserToWinAfterSecondUserGoalScore, score, \"User2 winning set (without goals in balance): score not updated as expected\")\n\n\tfirstUserToWinScoreWithGoalsInBalance := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 9,\n\t\tUser2Points: 4,\n\t\tUser1Sets: 2,\n\t\tUser2Sets: 1,\n\t\tGoalsInBalance: 6,\n\t}\n\n\tawaitedFirstUserToWinAfterFirstUserGoalScoreWithGoalsInBalance := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 0,\n\t\tUser2Points: 0,\n\t\tUser1Sets: 3,\n\t\tUser2Sets: 1,\n\t\tGoalsInBalance: 0,\n\t}\n\n\tscore = firstUserToWinScoreWithGoalsInBalance\n\t_ = updateScore(&score, firstUserGoal)\n\tassertHandler.Equal(awaitedFirstUserToWinAfterFirstUserGoalScoreWithGoalsInBalance, score, \"User1 winning set (with goals in balance): score not updated as expected\")\n\n\tsecondUserToWinScoreWithGoalsInBalance := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 5,\n\t\tUser2Points: 9,\n\t\tUser1Sets: 6,\n\t\tUser2Sets: 7,\n\t\tGoalsInBalance: 2,\n\t}\n\n\tawaitedSecondUserToWinAfterSecondUserGoalScoreWithGoalsInBalance := models.Score{\n\t\tID: initialUUID,\n\t\tCreatedAt: now,\n\t\tUpdatedAt: now,\n\t\tUser1Id: \"user1\",\n\t\tUser2Id: \"user2\",\n\t\tUser1Points: 0,\n\t\tUser2Points: 0,\n\t\tUser1Sets: 6,\n\t\tUser2Sets: 8,\n\t\tGoalsInBalance: 0,\n\t}\n\n\tscore = secondUserToWinScoreWithGoalsInBalance\n\t_ = updateScore(&score, secondUserGoal)\n\tassertHandler.Equal(awaitedSecondUserToWinAfterSecondUserGoalScoreWithGoalsInBalance, score, \"User2 winning set (with goals in balance): score not updated as expected\")\n\n}", "title": "" }, { "docid": "abaa3b3878494bfc5a6665818bf32f05", "score": "0.52709085", "text": "func recordSingleGamesStats(game *singleBiasGame) {\n\n\t// get guildID from game channel\n\tchannel, _ := cache.GetDiscordSession().State.Channel(game.channelID)\n\tguild, err := cache.GetDiscordSession().State.Guild(channel.GuildID)\n\tif err != nil {\n\t\tfmt.Println(\"Error getting guild when recording stats\")\n\t\treturn\n\t}\n\n\t// create a bias game entry\n\tbiasGameEntry := &models.BiasGameEntry{\n\t\tID: \"\",\n\t\tUserID: game.user.ID,\n\t\tGuildID: guild.ID,\n\t\tGameType: \"single\",\n\t\tGender: game.gender,\n\t\tRoundWinners: compileGameWinnersLosers(game.roundWinners),\n\t\tRoundLosers: compileGameWinnersLosers(game.roundLosers),\n\t\tGameWinner: models.BiasEntry{\n\t\t\tName: game.gameWinnerBias.biasName,\n\t\t\tGroupName: game.gameWinnerBias.groupName,\n\t\t\tGender: game.gameWinnerBias.gender,\n\t\t},\n\t}\n\n\tutils.MongoDBInsert(models.BiasGameTable, biasGameEntry)\n}", "title": "" }, { "docid": "f9698470c26453d9b5ee85b5550bab3e", "score": "0.52065116", "text": "func checkWin(coordinate coordinate, g *game) {\n\tif capturedTen(g) == true {\n\t\t// Player wins by capturing 10!\n\t\trecordWin(g, g.player)\n\t\tg.winMove = coordinate\n\t} else if g.align5.capture8 == true {\n\t\t// Opponent wins by aligning 5. Player could have won by capturing ten, but didn't, silly!\n\t\trecordWin(g, opponent(g.player))\n\t} else if g.align5.break5 == true {\n\t\tif positionOccupiedByPlayer(g.winMove, &g.goban, opponent(g.player)) == true &&\n\t\t\talignFive(g.winMove, &g.goban, &g.align5, opponent(g.player), g.capture0, g.capture1) == true {\n\t\t\t// Opponent wins by aligning 5. Player could have broken this alignment by capturing, but didn't, silly!\n\t\t\trecordWin(g, opponent(g.player))\n\t\t\treturn\n\t\t}\n\t\tg.align5.break5 = false\n\t}\n\tif alignFive(coordinate, &g.goban, &g.align5, g.player, g.capture0, g.capture1) == true {\n\t\tif g.align5.break5 == false && g.align5.capture8 == false {\n\t\t\t// Player wins by aligning 5!\n\t\t\trecordWin(g, g.player)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "bb060d1abfd809df351481ee247a24f8", "score": "0.51043904", "text": "func (p *UnstructuredPlay) RecordGame(winner kq.Side, winType kq.WinType) {\n\tp.current.RecordGame(winner, winType)\n}", "title": "" }, { "docid": "935fbb74cc03e817c591c6915a4230a4", "score": "0.5081665", "text": "func recordMultiGamesStats(game *multiBiasGame) {\n\n\t// get guildID from game channel\n\tchannel, _ := cache.GetDiscordSession().State.Channel(game.channelID)\n\tguild, err := cache.GetDiscordSession().State.Guild(channel.GuildID)\n\tif err != nil {\n\t\tfmt.Println(\"Error getting guild when recording stats\")\n\t\treturn\n\t}\n\n\t// create a bias game entry\n\tbiasGameEntry := &models.BiasGameEntry{\n\t\tID: \"\",\n\t\tGuildID: guild.ID,\n\t\tGameType: \"multi\",\n\t\tGender: game.gender,\n\t\tRoundWinners: compileGameWinnersLosers(game.roundWinners),\n\t\tRoundLosers: compileGameWinnersLosers(game.roundLosers),\n\t\tGameWinner: models.BiasEntry{\n\t\t\tName: game.gameWinnerBias.biasName,\n\t\t\tGroupName: game.gameWinnerBias.groupName,\n\t\t\tGender: game.gameWinnerBias.gender,\n\t\t},\n\t}\n\n\tutils.MongoDBInsert(models.BiasGameTable, biasGameEntry)\n}", "title": "" }, { "docid": "220f21deed71eb293e20365adaeca7e4", "score": "0.507952", "text": "func funcPayWinners(ctx wasmlib.ScFuncContext, f *PayWinnersContext) {\n\t// Use the built-in random number generator which has been automatically initialized by\n\t// using the transaction hash as initial entropy data. Note that the pseudo-random number\n\t// generator will use the next 8 bytes from the hash as its random Int64 number and once\n\t// it runs out of data it simply hashes the previous hash for a next pseudo-random sequence.\n\t// Here we determine the winning number for this round in the range of 1 thru MaxNumber.\n\twinningNumber := uint16(ctx.Random(MaxNumber-1) + 1)\n\n\t// Save the last winning number in state storage under 'lastWinningNumber' so that there\n\t// is (limited) time for people to call the 'getLastWinningNumber' View to verify the last\n\t// winning number if they wish. Note that this is just a silly example. We could log much\n\t// more extensive statistics information about each playing round in state storage and\n\t// make that data available through views for anyone to see.\n\tf.State.LastWinningNumber().SetValue(winningNumber)\n\n\t// Gather all winners and calculate some totals at the same time.\n\t// Keep track of the total bet amount, the total win amount, and all the winners.\n\t// Note how we decided to keep the winners in a local vector instead of creating\n\t// yet another array in state storage or having to go through lockedBets again.\n\ttotalBetAmount := uint64(0)\n\ttotalWinAmount := uint64(0)\n\twinners := make([]*fairroulette.Bet, 0)\n\n\t// Get the 'bets' array in state storage.\n\tbets := f.State.Bets()\n\n\t// Determine the amount of bets in the 'bets' array.\n\tnrOfBets := bets.Length()\n\n\t// Loop through all indexes of the 'bets' array.\n\tfor i := uint32(0); i < nrOfBets; i++ {\n\t\t// Retrieve the bet stored at the next index\n\t\tbet := bets.GetBet(i).Value()\n\n\t\t// Add this bet's amount to the running total bet amount\n\t\ttotalBetAmount += bet.Amount\n\n\t\t// Did this better bet on the winning number?\n\t\tif bet.Number == winningNumber {\n\t\t\t// Yes, add this bet amount to the running total win amount.\n\t\t\ttotalWinAmount += bet.Amount\n\n\t\t\t// And save this bet in the winners vector.\n\t\t\twinners = append(winners, bet)\n\t\t}\n\t}\n\n\t// Now that we preprocessed all bets we can get rid of the data in state storage\n\t// so that the 'bets' array becomes available for when the next betting round ends.\n\tbets.Clear()\n\n\tf.Events.Winner(winningNumber)\n\t// Did we have any winners at all?\n\tif len(winners) == 0 {\n\t\t// No winners, log this fact to the log on the host.\n\t\tctx.Log(\"Nobody wins!\")\n\t}\n\n\t// Pay out the winners proportionally to their bet amount. Note that we could configure\n\t// a small percentage that would go to the owner of the smart contract as hosting payment.\n\n\t// Keep track of the total payout so we can calculate the remainder after truncation.\n\ttotalPayout := uint64(0)\n\n\t// Loop through all winners.\n\tsize := len(winners)\n\tfor i := 0; i < size; i++ {\n\t\t// Get the next winner.\n\t\tbet := winners[i]\n\n\t\t// Determine the proportional win amount (we could take our percentage here)\n\t\tpayout := totalBetAmount * bet.Amount / totalWinAmount\n\n\t\t// Anything to pay to the winner?\n\t\tif payout != 0 {\n\t\t\t// Yep, keep track of the running total payout\n\t\t\ttotalPayout += payout\n\n\t\t\t// Set up an ScTransfer proxy that transfers the correct amount of tokens.\n\t\t\t// Note that ScTransfer wraps an ScMutableMap of token color/amount combinations\n\t\t\t// in a simpler to use interface. The constructor we use here creates and initializes\n\t\t\t// a single token color transfer in a single statement. The actual color and amount\n\t\t\t// values passed in will be stored in a new map on the host.\n\t\t\ttransfers := wasmlib.ScTransferFromBaseTokens(payout)\n\n\t\t\t// Perform the actual transfer of tokens from the smart contract to the address\n\t\t\t// of the winner. The transfer_to_address() method receives the address value and\n\t\t\t// the proxy to the new transfers map on the host, and will call the corresponding\n\t\t\t// host sandbox function with these values.\n\t\t\tctx.Send(bet.Better.Address(), transfers)\n\t\t}\n\n\t\t// Announce who got sent what as event.\n\t\tf.Events.Payout(bet.Better.Address(), payout)\n\t}\n\n\t// This is where we transfer the remainder after payout to the creator of the smart contract.\n\t// The bank always wins :-P\n\tremainder := totalBetAmount - totalPayout\n\tif remainder != 0 {\n\t\t// We have a remainder. First create a transfer for the remainder.\n\t\ttransfers := wasmlib.ScTransferFromBaseTokens(remainder)\n\n\t\t// Send the remainder to the contract owner.\n\t\tctx.Send(f.State.Owner().Value().Address(), transfers)\n\t}\n\n\t// Set round status to 0, send out event to notify that the round has ended\n\tf.State.RoundStatus().SetValue(0)\n\tf.Events.Stop()\n}", "title": "" }, { "docid": "41ee455064e9e1950a0dc07ea8993a53", "score": "0.5036389", "text": "func (sR *StandingRecord) UpdateRecord(db *sql.DB) {\n\t/*\n\t\t1. If this is a unique record, insert it.\n\t*/\n\tstatement := `SET timezone='UTC';`\n\t_, err := db.Exec(statement)\n\tif err != nil {\n\t\tif pqerr, ok := err.(*pq.Error); ok {\n\t\t\tfmt.Println(\"pq error:\", pqerr.Code.Name())\n\t\t} else {\n\t\t\tfmt.Println(err)\n\t\t}\n\t}\n\tstatement = `INSERT INTO StandingRecord VALUES ($1,$2,$3,$4,$5,$6,$7);`\n\t_, err = db.Exec(statement, sR.EffectiveDate.UTC(), sR.TeamID, sR.Wins, sR.Losses, sR.Wins+sR.Losses, sR.GamesBack, sR.WildcardGamesBack)\n\tif err != nil {\n\t\tif pqerr, ok := err.(*pq.Error); ok {\n\t\t\tif pqerr.Code.Name() == \"unique_violation\" {\n\t\t\t\tvar existingGamesPlayed int\n\t\t\t\tstatement = `SELECT gamesplayed FROM StandingRecord WHERE effectivedate = $1 AND teamid = $2;`\n\t\t\t\terr = db.QueryRow(statement, sR.EffectiveDate.UTC(), sR.TeamID).Scan(&existingGamesPlayed)\n\t\t\t\tif err != nil {\n\t\t\t\t\tif pqerr, ok := err.(*pq.Error); ok {\n\t\t\t\t\t\tfmt.Println(\"pq error:\", pqerr.Code.Name())\n\t\t\t\t\t} else {\n\t\t\t\t\t\tfmt.Println(err)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif sR.Wins+sR.Losses > existingGamesPlayed {\n\t\t\t\t\tstatement = `UPDATE StandingRecord SET\n\t\t\t\t\twins = $1, losses = $2, gamesplayed = $3, gamesback = $4, wildcardgamesback = $5\n\t\t\t\t\tWHERE effectivedate = $6 AND teamid = $7;`\n\t\t\t\t\t_, err = db.Exec(statement, sR.Wins, sR.Losses, sR.GamesBack, sR.WildcardGamesBack, sR.Wins+sR.Losses, sR.EffectiveDate.UTC(), sR.TeamID)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tif pqerr, ok := err.(*pq.Error); ok {\n\t\t\t\t\t\t\tfmt.Println(\"pq error:\", pqerr.Code.Name())\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tfmt.Println(err)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfmt.Println(\"pq error:\", pqerr.Code.Name())\n\t\t\t}\n\t\t} else {\n\t\t\tfmt.Println(err)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ff28572a9350b917129d9b5b8c5a5394", "score": "0.50055414", "text": "func (store mongoDatastore) TeamEventStats(tag EventTag, number int) (TeamStats, error) {\n\tvar stats TeamStats\n\n\tvar team Team\n\tif err := store.fetchOne(teamCollection, bson.M{\"_id\": number}, &team); err == nil {\n\t\tstats.OPR = team.OPR\n\t}\n\n\titer := store.C(matchCollection(tag)).Find(bson.M{\"teams.team\": number}).Limit(matchLimit).Iter()\n\n\tvar match Match\n\tstats.EventTag = tag\n\tfor iter.Next(&match) {\n\t\tvar i int\n\t\tfor i = 0; i < len(match.Teams); i++ {\n\t\t\tif match.Teams[i].Team == number {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif i >= len(match.Teams) {\n\t\t\t// Team not found in match. This shouldn't be hit.\n\t\t\t// TODO: Log problem\n\t\t\tcontinue\n\t\t}\n\n\t\tif match.Teams[i].NoShow {\n\t\t\tstats.NoShowCount++\n\t\t\tcontinue\n\t\t}\n\n\t\tif match.Score == nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tstats.MatchCount++\n\t\tstats.TotalPoints += match.Teams[i].Score\n\t\tif match.Teams[i].Failure {\n\t\t\tstats.FailureCount++\n\t\t}\n\t\tif shot := stats.TeleoperatedBalls.Total(); shot > stats.MaxTeleoperatedShot {\n\t\t\tstats.MaxTeleoperatedShot = shot\n\t\t}\n\t\tif scored := stats.TeleoperatedBalls.TotalScored(); scored > stats.MaxTeleoperatedScored {\n\t\t\tstats.MaxTeleoperatedScored = scored\n\t\t}\n\t\tstats.AutonomousBalls.Add(match.Teams[i].Autonomous)\n\t\tstats.TeleoperatedBalls.Add(match.Teams[i].Teleoperated)\n\t\tstats.CoopBridge.add(match.Teams[i].CoopBridge)\n\t\tstats.TeamBridge1.add(match.Teams[i].TeamBridge1)\n\t\tstats.TeamBridge2.add(match.Teams[i].TeamBridge2)\n\t}\n\treturn stats, iter.Err()\n}", "title": "" }, { "docid": "90d7f7a28d82ea0202d76d11c644dac0", "score": "0.49861816", "text": "func (o *CMFTurntableWin) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {\n\tvar err error\n\tif err = o.doBeforeUpdateHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\tkey := makeCacheKey(columns, nil)\n\tcmfTurntableWinUpdateCacheMut.RLock()\n\tcache, cached := cmfTurntableWinUpdateCache[key]\n\tcmfTurntableWinUpdateCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl := columns.UpdateColumnSet(\n\t\t\tcmfTurntableWinAllColumns,\n\t\t\tcmfTurntableWinPrimaryKeyColumns,\n\t\t)\n\n\t\tif !columns.IsWhitelist() {\n\t\t\twl = strmangle.SetComplement(wl, []string{\"created_at\"})\n\t\t}\n\t\tif len(wl) == 0 {\n\t\t\treturn 0, errors.New(\"models: unable to update cmf_turntable_win, could not build whitelist\")\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(\"UPDATE `cmf_turntable_win` SET %s WHERE %s\",\n\t\t\tstrmangle.SetParamNames(\"`\", \"`\", 0, wl),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, cmfTurntableWinPrimaryKeyColumns),\n\t\t)\n\t\tcache.valueMapping, err = queries.BindMapping(cmfTurntableWinType, cmfTurntableWinMapping, append(wl, cmfTurntableWinPrimaryKeyColumns...))\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tvalues := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, values)\n\t}\n\tvar result sql.Result\n\tresult, err = exec.ExecContext(ctx, cache.query, values...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to update cmf_turntable_win row\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by update for cmf_turntable_win\")\n\t}\n\n\tif !cached {\n\t\tcmfTurntableWinUpdateCacheMut.Lock()\n\t\tcmfTurntableWinUpdateCache[key] = cache\n\t\tcmfTurntableWinUpdateCacheMut.Unlock()\n\t}\n\n\treturn rowsAff, o.doAfterUpdateHooks(ctx, exec)\n}", "title": "" }, { "docid": "76d08ab8295f39b3632ec593b49cd882", "score": "0.49847847", "text": "func RecordStats(db *sql.DB) func() {\n\tviewsOnce.Do(func() { ocsql.RegisterAllViews() })\n\treturn ocsql.RecordStats(db, 10*time.Second)\n}", "title": "" }, { "docid": "e95e4a33b11126ddf36381afb2b8bc46", "score": "0.49783164", "text": "func Tally(r io.Reader, w io.Writer) error {\n\treader := bufio.NewScanner(r)\n\n\ttournament := Tournament{}\n\n\tfor reader.Scan() {\n\t\tl := reader.Text()\n\t\tif l == \"\" || l[0] == '#' {\n\t\t\tcontinue\n\t\t}\n\n\t\tf := strings.Split(l, \";\")\n\t\tif len(f) != 3 {\n\t\t\treturn fmt.Errorf(\"tournament: invalid line - %q\", l)\n\t\t}\n\n\t\tswitch f[2] {\n\t\tcase \"win\":\n\t\t\ttournament.RecordWin(f[0])\n\t\t\ttournament.RecordLoss(f[1])\n\t\tcase \"loss\":\n\t\t\ttournament.RecordLoss(f[0])\n\t\t\ttournament.RecordWin(f[1])\n\t\tcase \"draw\":\n\t\t\ttournament.RecordDraw(f[0])\n\t\t\ttournament.RecordDraw(f[1])\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"tournament: invalid result - %q\", f[2])\n\t\t}\n\t}\n\n\to := bufio.NewWriter(w)\n\n\to.WriteString(fmt.Sprintf(\"%-30s | %2s | %2s | %2s | %2s | %2s\\n\", \"Team\", \"MP\", \"W\", \"D\", \"L\", \"P\"))\n\n\tfor _, t := range tournament.Standings() {\n\t\to.WriteString(fmt.Sprintf(\"%-30s | %2d | %2d | %2d | %2d | %2d\\n\", t.Name, t.MP, t.W, t.D, t.L, t.P))\n\t}\n\n\to.Flush()\n\n\treturn nil\n}", "title": "" }, { "docid": "ab43296ae1f4e1e09be2a900f2ca4666", "score": "0.49230888", "text": "func (_Wallet *WalletTransactor) Winthdraw(opts *bind.TransactOpts, _amt *big.Int) (*types.Transaction, error) {\n\treturn _Wallet.contract.Transact(opts, \"Winthdraw\", _amt)\n}", "title": "" }, { "docid": "819f0caf6334090a7ce558ed1157a842", "score": "0.47880208", "text": "func recordWriterStat(ms *ManagedStream, m *stats.Int64Measure, n int64) {\n\tstats.Record(ms.ctx, m.M(n))\n}", "title": "" }, { "docid": "5fe78c9c0af5290006b90b85f04613ff", "score": "0.47779837", "text": "func UndoWin(action undo.Action, base *BaseGame) {\n\tbase.GameState = action.PreviousGameState\n\tbase.Message = action.PreviousMessage\n}", "title": "" }, { "docid": "cc4333fef8091796b9900c83b41fca1f", "score": "0.4772916", "text": "func (d *Dota2) SendMatchMatchmakingStats(\n\taverageQueueTime uint32,\n\tmaximumQueueTime uint32,\n\tbehaviorScoreVariance protocol.EMatchBehaviorScoreVariance,\n) {\n\treq := &protocol.CMsgMatchMatchmakingStats{\n\t\tAverageQueueTime: &averageQueueTime,\n\t\tMaximumQueueTime: &maximumQueueTime,\n\t\tBehaviorScoreVariance: &behaviorScoreVariance,\n\t}\n\td.write(uint32(protocol.EDOTAGCMsg_k_EMsgMatchMatchmakingStats), req)\n}", "title": "" }, { "docid": "b1e66e411930a8b3a7bea3bc3461f776", "score": "0.47485897", "text": "func AddNewTeam(ctx context.Context, tournament *firestore.DocumentRef, currentRound Round) error {\n\tnextRound := currentRound + 1\n\tranking := tournament.Collection(\"ranking\").Doc(nextRound.String())\n\toldRanking := make(map[int]string)\n\tnewRanking := make(map[int]string)\n\trankToUpload := make(map[string]string)\n\tvar s string\n\tvar j int\n\tdoc, err := ranking.Get(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdata := doc.Data()\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Println(currentRound)\n\tfmt.Println(\"Current ranking:\")\n\tfor i := 1; i < len(data)+1; i++ {\n\t\toldRanking[i] = fmt.Sprintf(\"%v\", data[strconv.Itoa(i)])\n\t\tfmt.Println(i, oldRanking[i])\n\t}\n\tfmt.Println(\"Type ranking to insert:\")\n\tfmt.Scanln(&j)\n\tfor i := 1; i < len(data)+2; i++ {\n\t\tif i < j {\n\t\t\tnewRanking[i] = oldRanking[i]\n\t\t} else if i == j {\n\t\t\tfmt.Println(\"Type team name:\")\n\t\t\tfmt.Scanln(&s)\n\t\t\tnewRanking[i] = s\n\t\t} else if i > j {\n\t\t\tnewRanking[i] = oldRanking[i-1]\n\t\t}\n\t}\n\tfmt.Println(\"New ranking:\")\n\tfor i := 1; i < len(newRanking)+1; i++ {\n\t\tfmt.Println(i, newRanking[i])\n\t}\n\t// Create a map to upload to Firestore.\n\tfor rank, team := range newRanking {\n\t\tif rank > 0 {\n\t\t\trankToUpload[strconv.Itoa(rank)] = team\n\t\t}\n\t}\n\t// Upload new ranking to Firestore.\n\tfmt.Println(\"Upload? y/n\")\n\tfmt.Scanln(&s)\n\tif s == \"y\" {\n\t\t_, err := tournament.Collection(\"ranking\").Doc(nextRound.String()).Set(ctx, rankToUpload)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "dd9bf12d860cbfd33d0001c74b7cf627", "score": "0.47372374", "text": "func CMFTurntableWins(mods ...qm.QueryMod) cmfTurntableWinQuery {\n\tmods = append(mods, qm.From(\"`cmf_turntable_win`\"))\n\treturn cmfTurntableWinQuery{NewQuery(mods...)}\n}", "title": "" }, { "docid": "7d2c931a4a83c950c079b60602201ff5", "score": "0.46990383", "text": "func (nsc *NilProducerStatsCollector) AddPutRecordsCalled(int) {}", "title": "" }, { "docid": "581bf204f44e3eb667cd2af06cdc9145", "score": "0.46678966", "text": "func (t *TeamStats) GetWins() int {\n\treturn t.Wins\n}", "title": "" }, { "docid": "581bf204f44e3eb667cd2af06cdc9145", "score": "0.46678966", "text": "func (t *TeamStats) GetWins() int {\n\treturn t.Wins\n}", "title": "" }, { "docid": "74e18a48177a571e71bf6aee2b1aea51", "score": "0.46294516", "text": "func SaveWinners(winners *[]Winner) bool {\n\t//Load Stored Data\n\twn := LoadWinners()\n\n\t//Append Prize to be stored\n\t// pr = append(cs, *Winners...)\n\tvar isMailSent bool //Check If Mail is sent\n\n\tfor _, val := range *winners {\n\t\t//check if map key exist\n\t\tif _, ok := wn[val.TimeStamp]; !ok {\n\t\t\t//Send Mail if prize is won\n\t\t\tif val.PrizeWon != \"NO WIN\" {\n\t\t\t\tisMailSent = SendMail(\"Congratulations\", val.Email, val.Name, val.PrizeWon)\n\t\t\t\t//Update Email Sent Data\n\t\t\t\tif isMailSent {\n\t\t\t\t\tval.EmailSent = \"YES\" //Email Sent\n\t\t\t\t} else {\n\t\t\t\t\tval.EmailSent = \"NO\" //Email Failed\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tval.EmailSent = \"NO\" //No prize won\n\t\t\t}\n\t\t\t//Update map\n\t\t\twn[val.TimeStamp] = val\n\t\t}\n\t}\n\t//Convert to Byte\n\tdata, err := json.Marshal(&wn)\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\t//Save back to file\n\tif saveToFile(WinnersPath, &data) {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "a2127669592d6b63e930b49b3750e34a", "score": "0.46145755", "text": "func WriteEvtWins(evtWin []T_A, fn string, add bool)(err error){\r\n jsonBytes, err := AtoJson(evtWin)\r\n if add{\r\n err = Wr_Byte_(jsonBytes, fn)\r\n }else{\r\n \terr = Wr_Byte(jsonBytes, fn)\r\n }\r\n return\r\n}", "title": "" }, { "docid": "86ed0a8f8df81cc71c6092097a315cb0", "score": "0.46035832", "text": "func (c *CollectiveGameStats) Add(\n region string, gameId string, riotSummonerId int64, stats *riot.GameDto) {\n if c.games == nil {\n c.games = make(map[string]map[int64]*riot.GameDto)\n }\n playerMap, exists := c.games[gameId]\n if !exists {\n playerMap = make(map[int64]*riot.GameDto)\n c.games[gameId] = playerMap\n }\n playerMap[riotSummonerId] = stats\n\n for _, riotOtherPlayer := range stats.FellowPlayers {\n c.stub(region, gameId, riotOtherPlayer.SummonerId)\n }\n}", "title": "" }, { "docid": "a2fa48e65b4ec78c40bfbe514b9a6988", "score": "0.45962366", "text": "func (nsc *NilProducerStatsCollector) UpdatePutRecordsSendDuration(time.Duration) {}", "title": "" }, { "docid": "90611a79eb6ee6e7ce41c4ea4add0c2c", "score": "0.459033", "text": "func (s *ActionStats) Record(st time.Time, err error) {\n\tthisRuntime := clock.Now().Sub(st)\n\ts.TotalRuntime += thisRuntime\n\n\tif thisRuntime > s.MaxRuntime {\n\t\ts.MaxRuntime = thisRuntime\n\t}\n\n\tif s.Count == 0 || thisRuntime < s.MinRuntime {\n\t\ts.MinRuntime = thisRuntime\n\t}\n\n\ts.Count++\n\n\tif err != nil {\n\t\ts.ErrorCount++\n\t}\n}", "title": "" }, { "docid": "e052f7df7784b14450d76f7513d6306b", "score": "0.45619464", "text": "func (g *Game) Tally(GiD string) LeadBoard {\n\tmsg := new(Message)\n\t//LeadBoard format - UserID - UserName - UserLowerPlay - UserUperPlay - GameID - GameName -\n\n\t//fmt.Println(GamePlayers)\n\t//jgx, _ := json.Marshal(g)\n\t//fmt.Println(string(jgx))\n\t//KEY DATA to Extract\n\t//GAME ID\n\t//GAME NAME\n\t//STATUS\n\t//TIME STARTED\n\t//TIME ENDED\n\t//==> PLays<==// [{CMAKHCJC flea [3 9 3]} {CMAKHCJC flea [7 8 3]} {XRKISJDD dogfish [3 8 21]} {ZFCKXWLU baboon [3 9 4]} {LWBWCNQB guinea [3 8 22]} {LXEAIRJI badger [7 9 4]}]\n\t//TopScorer\n\t//Low Scorer\n\t//Winner (by score, by jackpot, by name in case of a tie)\n\t//\n\tvar TopScorer = make([]Play, 1)\n\tvar LowScorer = make([]Play, 1)\n\tvar WinScorer = make([]Play, 1)\n\t//var JackScorer = make([]Play, 1)\n\t//var TopTieScorer = make([]Play, len(g.Plays))\n\tvar TopTieScorer []Play //may cause index range errors check properly\n\tvar LowTieScorer = make([]Play, len(g.Plays))\n\t//var JackTieScorer = make([]Play, len(g.Plays))\n\tvar JackTieScorer []Play //= make([]Play, 0)\n\n\tglb := new(LeadBoard)\n\tglb.GameID = g.ID\n\tglb.GameName = g.Name\n\tglb.GameStatus = g.Status\n\tglb.Starttime = g.Starttime\n\tglb.Endtime = g.Endtime\n\t//glb.TopScorer = TopScorer\n\t//glb.LowScorer = LowScorer\n\t//glb.Winner = WinScorer\n\t//GameTitle := g.Name\n\t//\tvar lastTopPlay int\n\tvar currentTopScore int\n\tvar currentLowScore = g.Plays[0].Entries[2]\n\n\tfor i, v := range g.Plays {\n\t\tif v.Entries[2] == 21 {\n\t\t\t//JackScorer[0] = v //no need\n\t\t\tJackTieScorer = append(JackTieScorer, v)\n\t\t}\n\n\t\tif i == 0 {\n\t\t\t//set first current top score and low scores\n\t\t\tcurrentTopScore = v.Entries[2]\n\t\t\tcurrentLowScore = v.Entries[2]\n\t\t}\n\n\t\tif v.Entries[2] > currentTopScore {\n\t\t\tTopScorer[0] = v\n\t\t\tcurrentTopScore = v.Entries[2]\n\t\t\tTopTieScorer = TopTieScorer[:0] //Clean or empty up tie\n\t\t\t//TopTieScorer = append(TopTieScorer, v)\n\t\t}\n\n\t\tif v.Entries[2] < currentLowScore {\n\t\t\tLowScorer[0] = v\n\t\t\tcurrentLowScore = v.Entries[2]\n\t\t\tLowTieScorer = LowTieScorer[:0] //Clean or empty up tie\n\t\t}\n\n\t\t//A winner Tie\n\t\tif v.Entries[2] == currentTopScore {\n\t\t\tTopTieScorer = append(TopTieScorer, v)\n\t\t}\n\n\t\t//A looser Tie\n\t\tif v.Entries[2] == currentLowScore {\n\t\t\tLowTieScorer = append(LowTieScorer, v)\n\t\t}\n\t}\n\n\tif len(JackTieScorer) < 1 {\n\t\t//no jack pot winner use top scorer for wins\n\t\tif len(TopTieScorer) > 1 {\n\t\t\t//there's a tie\n\t\t\t//fmt.Println(TopTieScorer)\n\t\t\t//sort by players high input number\n\t\t\tsort.SliceStable(TopTieScorer, func(i, j int) bool { return TopTieScorer[i].Entries[1] > TopTieScorer[j].Entries[1] })\n\t\t\tif TopTieScorer[0].Entries[1] == TopTieScorer[1].Entries[1] {\n\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Entry compare6: [\"+strconv.Itoa(TopTieScorer[0].Entries[1])+\"] &nbsp; [\"+strconv.Itoa(TopTieScorer[1].Entries[1])+\"]\"))\n\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Entry compare6low: [\"+strconv.Itoa(TopTieScorer[0].Entries[0])+\"] &nbsp; [\"+strconv.Itoa(TopTieScorer[1].Entries[0])+\"]\"))\n\n\t\t\t\t//there's a tie sort by players low input number\n\t\t\t\tsort.SliceStable(TopTieScorer, func(i, j int) bool { return TopTieScorer[i].Entries[0] > TopTieScorer[j].Entries[0] })\n\t\t\t\tif TopTieScorer[0].Entries[0] == TopTieScorer[1].Entries[0] {\n\t\t\t\t\t//there's still a tie\n\t\t\t\t\t//sort by name score is the same\n\t\t\t\t\tsort.SliceStable(TopTieScorer, func(i, j int) bool { return TopTieScorer[i].PlayerName < TopTieScorer[j].PlayerName })\n\t\t\t\t\tWinScorer[0] = TopTieScorer[0]\n\t\t\t\t\tTopScorer[0] = TopTieScorer[0]\n\t\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Case 5 name winner Winner!!\"))\n\t\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Entry compare5: [\"+strconv.Itoa(TopTieScorer[0].Entries[1])+\"] &nbsp; [\"+strconv.Itoa(TopTieScorer[1].Entries[1])+\"]\"))\n\t\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Entry compare5 low: [\"+strconv.Itoa(TopTieScorer[0].Entries[0])+\"] &nbsp; [\"+strconv.Itoa(TopTieScorer[1].Entries[0])+\"]\"))\n\n\t\t\t\t} else {\n\t\t\t\t\tWinScorer[0] = TopTieScorer[0]\n\t\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Case 4 highest lowest chosen number Winner!!\"))\n\t\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Entry compare 4: [\"+strconv.Itoa(TopTieScorer[0].Entries[1])+\"] &nbsp; [\"+strconv.Itoa(TopTieScorer[1].Entries[1])+\"]\"))\n\n\t\t\t\t}\n\n\t\t\t} else {\n\n\t\t\t\tWinScorer[0] = TopTieScorer[0]\n\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Case 3 Highest chosen number Winner!!\"))\n\t\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Entry compare 3: [\"+strconv.Itoa(TopTieScorer[0].Entries[1])+\"] &nbsp; [\"+strconv.Itoa(TopTieScorer[1].Entries[1])+\"]\"))\n\t\t\t}\n\t\t\t//\n\t\t\tMQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"High Score Winner!!\"))\n\t\t} else {\n\t\t\t//MQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Case 2 Simple highest scorer Winner!!\"))\n\t\t\tWinScorer[0] = TopScorer[0]\n\t\t}\n\n\t} else {\n\n\t\t//there's a jackpot winner\n\t\t//if len(JackTieScorer) > 1 { //using greater than cause we're putting in anyway as long as they score 21 so just check for count above 1\n\t\t//choose from the top name of the jackpot tie\n\t\tsort.SliceStable(JackTieScorer, func(i, j int) bool { return JackTieScorer[i].PlayerName < JackTieScorer[j].PlayerName })\n\t\tWinScorer[0] = JackTieScorer[0]\n\t\tMQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Jack pot Winner!!\"))\n\t\t//} else {\n\t\t//or just the jackpot scorer\n\t\t//\tWinScorer[0] = JackScorer[0]\n\t\t//}*/\n\t\tMQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"21 Point Winner!!!\"))\n\t}\n\n\tglb.TopScorer = TopScorer\n\tglb.LowScorer = LowScorer\n\tglb.Winner = WinScorer\n\n\t//fmt.Println(GamePlayers)\n\t//jglb, _ := json.Marshal(glb)\n\t//fmt.Println(string(jglb))\n\n\t//gj, err := json.Marshal(glb)\n\t//if err != nil {\n\t//\tfmt.Println(err.Error())\n\t//}\n\t//MQ.enQ(msg.Wrap(\"GenMQ\", \"Game TALLY ====> \\n\\n\"+string(gj)))\n\t//end print out the game data\n\n\t//MQ.enQ(msg.Wrap(\"GenMQ\", \"Winner is : \"+glb.Winner[0].PlayerName))\n\tMQ.enQ(msg.Wrap(\"AnnouncmentMQ\", \"Winner is <br>\"+glb.Winner[0].PlayerName+\"<br> Winning score : \"+strconv.Itoa(glb.Winner[0].Entries[2])))\n\tWinners = append(Winners, glb.Winner[0])\n\t//top scorer table\n\ttops, err := TallyTopScorers()\n\tRunError(err)\n\tfmt.Printf(\"Published %v new topscorer\", tops)\n\t//bradCastleadBoard()\n\t//bradCastGameList()\n\treturn *glb\n}", "title": "" }, { "docid": "70242839c8f6fa0323f6ad0d8ab99b75", "score": "0.45608455", "text": "func (bot *Bot) bumpStatistics(moved string) {\n\tstats, err := bot.DB.GetStatistics()\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tmovedInt, _ := strconv.Atoi(moved)\n\tstats[\"usrs\"] += movedInt\n\terr = bot.DB.SetStatistics(stats)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tlog.Println(stats)\n\t}\n}", "title": "" }, { "docid": "141b57485ad6122e1968d0774380f86c", "score": "0.45602882", "text": "func (o *CMFTurntableWin) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_turntable_win provided for insertion\")\n\t}\n\n\tvar err error\n\n\tif err := o.doBeforeInsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfTurntableWinColumnsWithDefault, o)\n\n\tkey := makeCacheKey(columns, nzDefaults)\n\tcmfTurntableWinInsertCacheMut.RLock()\n\tcache, cached := cmfTurntableWinInsertCache[key]\n\tcmfTurntableWinInsertCacheMut.RUnlock()\n\n\tif !cached {\n\t\twl, returnColumns := columns.InsertColumnSet(\n\t\t\tcmfTurntableWinAllColumns,\n\t\t\tcmfTurntableWinColumnsWithDefault,\n\t\t\tcmfTurntableWinColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfTurntableWinType, cmfTurntableWinMapping, wl)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tcache.retMapping, err = queries.BindMapping(cmfTurntableWinType, cmfTurntableWinMapping, returnColumns)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(wl) != 0 {\n\t\t\tcache.query = fmt.Sprintf(\"INSERT INTO `cmf_turntable_win` (`%s`) %%sVALUES (%s)%%s\", strings.Join(wl, \"`,`\"), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1))\n\t\t} else {\n\t\t\tcache.query = \"INSERT INTO `cmf_turntable_win` () VALUES ()%s%s\"\n\t\t}\n\n\t\tvar queryOutput, queryReturning string\n\n\t\tif len(cache.retMapping) != 0 {\n\t\t\tcache.retQuery = fmt.Sprintf(\"SELECT `%s` FROM `cmf_turntable_win` WHERE %s\", strings.Join(returnColumns, \"`,`\"), strmangle.WhereClause(\"`\", \"`\", 0, cmfTurntableWinPrimaryKeyColumns))\n\t\t}\n\n\t\tcache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning)\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to insert into cmf_turntable_win\")\n\t}\n\n\tvar lastID int64\n\tvar identifierCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint64(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfTurntableWinMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tidentifierCols = []interface{}{\n\t\to.ID,\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, identifierCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, identifierCols...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_turntable_win\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfTurntableWinInsertCacheMut.Lock()\n\t\tcmfTurntableWinInsertCache[key] = cache\n\t\tcmfTurntableWinInsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterInsertHooks(ctx, exec)\n}", "title": "" }, { "docid": "dd06da2be436b1b479ebfaaa0d3c4e51", "score": "0.45237848", "text": "func (mstats *ModelStats) logUpdate(elapsed time.Duration, n string, newgwy int64, newsrv int64) {\n\td := mdtors.x[n]\n\tloglevel := LogV\n\tif newgwy != 0 {\n\t\tif d.kind == StatsKindCount || d.kind == StatsKindByteCount {\n\t\t\tspgwy := float64(newgwy) * (float64(time.Millisecond) / float64(elapsed))\n\t\t\tif d.kind == StatsKindByteCount {\n\t\t\t\tlog(loglevel, fmt.Sprintf(\"new-gwy-%s,%d,total-gwy-%s,%s,%s/s, %s\",\n\t\t\t\t\tn, newgwy, n, bytesToKMG(mstats.totalgwy[n]), n, bytesMillisToKMGseconds(spgwy)))\n\t\t\t} else {\n\t\t\t\tlog(loglevel, fmt.Sprintf(\"new-gwy-%ss,%d,total-gwy-%ss,%d,%ss/ms, %.0f\",\n\t\t\t\t\tn, newgwy, n, mstats.totalgwy[n], n, spgwy))\n\t\t\t}\n\t\t} else if d.kind == StatsKindSampleCount {\n\t\t\tavesample := float64(newgwy) / float64(config.numGateways)\n\t\t\tlog(loglevel, fmt.Sprintf(\"new-gwy-average-%s,%.1f\", n, avesample))\n\t\t} else if d.kind == StatsKindPercentage {\n\t\t\tbusygwy := float64(newgwy) / float64(config.numGateways)\n\t\t\tlog(loglevel, fmt.Sprintf(\"gwy-%s(%%),%.0f\", n, busygwy))\n\t\t}\n\t}\n\tif newsrv != 0 {\n\t\tif d.kind == StatsKindCount || d.kind == StatsKindByteCount {\n\t\t\tspsrv := float64(newsrv) * (float64(time.Millisecond) / float64(elapsed))\n\t\t\tif d.kind == StatsKindByteCount {\n\t\t\t\tlog(loglevel, fmt.Sprintf(\"new-srv-%s,%d,total-srv-%s,%s,%s/s, %s\",\n\t\t\t\t\tn, newsrv, n, bytesToKMG(mstats.totalsrv[n]), n, bytesMillisToKMGseconds(spsrv)))\n\t\t\t} else {\n\t\t\t\tlog(loglevel, fmt.Sprintf(\"new-srv-%ss,%d,total-srv-%ss,%d,%ss/ms, %.0f\",\n\t\t\t\t\tn, newsrv, n, mstats.totalsrv[n], n, spsrv))\n\t\t\t}\n\t\t} else if d.kind == StatsKindSampleCount {\n\t\t\tavesample := float64(newsrv) / float64(config.numServers)\n\t\t\tlog(loglevel, fmt.Sprintf(\"new-srv-average-%s,%.1f\", n, avesample))\n\t\t} else if d.kind == StatsKindPercentage {\n\t\t\tbusysrv := float64(newsrv) / float64(config.numServers)\n\t\t\tlog(loglevel, fmt.Sprintf(\"srv-%s(%%),%.0f\", n, busysrv))\n\t\t}\n\t}\n}", "title": "" }, { "docid": "625e483fb822d458e8db0b381085fdfc", "score": "0.45218956", "text": "func AddCMFTurntableWinHook(hookPoint boil.HookPoint, cmfTurntableWinHook CMFTurntableWinHook) {\n\tswitch hookPoint {\n\tcase boil.BeforeInsertHook:\n\t\tcmfTurntableWinBeforeInsertHooks = append(cmfTurntableWinBeforeInsertHooks, cmfTurntableWinHook)\n\tcase boil.BeforeUpdateHook:\n\t\tcmfTurntableWinBeforeUpdateHooks = append(cmfTurntableWinBeforeUpdateHooks, cmfTurntableWinHook)\n\tcase boil.BeforeDeleteHook:\n\t\tcmfTurntableWinBeforeDeleteHooks = append(cmfTurntableWinBeforeDeleteHooks, cmfTurntableWinHook)\n\tcase boil.BeforeUpsertHook:\n\t\tcmfTurntableWinBeforeUpsertHooks = append(cmfTurntableWinBeforeUpsertHooks, cmfTurntableWinHook)\n\tcase boil.AfterInsertHook:\n\t\tcmfTurntableWinAfterInsertHooks = append(cmfTurntableWinAfterInsertHooks, cmfTurntableWinHook)\n\tcase boil.AfterSelectHook:\n\t\tcmfTurntableWinAfterSelectHooks = append(cmfTurntableWinAfterSelectHooks, cmfTurntableWinHook)\n\tcase boil.AfterUpdateHook:\n\t\tcmfTurntableWinAfterUpdateHooks = append(cmfTurntableWinAfterUpdateHooks, cmfTurntableWinHook)\n\tcase boil.AfterDeleteHook:\n\t\tcmfTurntableWinAfterDeleteHooks = append(cmfTurntableWinAfterDeleteHooks, cmfTurntableWinHook)\n\tcase boil.AfterUpsertHook:\n\t\tcmfTurntableWinAfterUpsertHooks = append(cmfTurntableWinAfterUpsertHooks, cmfTurntableWinHook)\n\t}\n}", "title": "" }, { "docid": "5f09476d0d24549704558d22b5986be8", "score": "0.4514627", "text": "func (st *Statistics) TrackStats(no int, data interface{}) int {\n\tstat, ok := data.(stat)\n\tif !ok {\n\t\tdlog.Error(\"TrackStats called with a non-stat payload\")\n\t\treturn event.UnbindEvent\n\t}\n\tst.trackStats(stat.name, stat.inc)\n\treturn 0\n}", "title": "" }, { "docid": "73bd9eecdc3358b3831ab55fea113e11", "score": "0.45118368", "text": "func (_Wallet *WalletTransactorSession) Winthdraw(_amt *big.Int) (*types.Transaction, error) {\n\treturn _Wallet.Contract.Winthdraw(&_Wallet.TransactOpts, _amt)\n}", "title": "" }, { "docid": "d94cd330291dc97b1d17299a58e25d4b", "score": "0.45069528", "text": "func displayCurrentGameStats(msg *discordgo.Message) {\n\n\tblankField := &discordgo.MessageEmbedField{\n\t\tName: ZERO_WIDTH_SPACE,\n\t\tValue: ZERO_WIDTH_SPACE,\n\t\tInline: true,\n\t}\n\n\t// find currently running game for the user or a mention if one exists\n\tuserPlayingGame := msg.Author\n\tif len(msg.Mentions) > 0 {\n\t\tuserPlayingGame = msg.Mentions[0]\n\t}\n\n\tif game, ok := currentSinglePlayerGames[userPlayingGame.ID]; ok {\n\n\t\tembed := &discordgo.MessageEmbed{\n\t\t\tColor: 0x0FADED, // blueish\n\t\t\tAuthor: &discordgo.MessageEmbedAuthor{\n\t\t\t\tName: fmt.Sprintf(\"%s - Current Game Info\\n\", userPlayingGame.Username),\n\t\t\t},\n\t\t}\n\n\t\t// for i := 0; i < len(game.roundWinners); i++ {\n\t\tfor i := len(game.roundWinners) - 1; i >= 0; i-- {\n\n\t\t\tfieldName := fmt.Sprintf(\"Round %d:\", i+1)\n\t\t\tif len(game.roundWinners) == i+1 {\n\t\t\t\tfieldName = \"Last Round:\"\n\t\t\t}\n\n\t\t\tmessage := fmt.Sprintf(\"W: %s %s\\nL: %s %s\\n\",\n\t\t\t\tgame.roundWinners[i].groupName,\n\t\t\t\tgame.roundWinners[i].biasName,\n\t\t\t\tgame.roundLosers[i].groupName,\n\t\t\t\tgame.roundLosers[i].biasName)\n\n\t\t\tembed.Fields = append(embed.Fields, &discordgo.MessageEmbedField{\n\t\t\t\tName: fieldName,\n\t\t\t\tValue: message,\n\t\t\t\tInline: true,\n\t\t\t})\n\t\t}\n\n\t\t// notify user if no rounds have been played in the game yet\n\t\tif len(embed.Fields) == 0 {\n\t\t\tembed.Fields = append(embed.Fields, &discordgo.MessageEmbedField{\n\t\t\t\tName: \"No Rounds\",\n\t\t\t\tValue: utils.Geti18nText(\"biasgame.current.no-rounds-played\"),\n\t\t\t\tInline: true,\n\t\t\t})\n\t\t}\n\n\t\t// this is to correct embed alignment\n\t\tif len(embed.Fields)%3 == 1 {\n\t\t\tembed.Fields = append(embed.Fields, blankField)\n\t\t\tembed.Fields = append(embed.Fields, blankField)\n\t\t} else if len(embed.Fields)%3 == 2 {\n\t\t\tembed.Fields = append(embed.Fields, blankField)\n\t\t}\n\n\t\tutils.SendPagedMessage(msg, embed, 12)\n\t} else {\n\t\tutils.SendMessage(msg.ChannelID, \"biasgame.current.no-running-game\")\n\t}\n}", "title": "" }, { "docid": "5f57d6ceb3ecf363d38eeacf54a5ca8d", "score": "0.44946602", "text": "func Run(gg runners.GameGen, play players.Player) {\n\n\tg := &game.Game{}\n\tvar numGames uint64\n\tvar numMoves uint64\n\tstart := time.Now()\n\n\tfor g.State() != game.StateWon {\n\t\tg = gg()\n\t\tplay(g)\n\n\t\tnumGames++\n\t\tnumMoves += g.TotalMoves()\n\n\t\tif numGames%1000 == 0 {\n\t\t\telapsed := time.Since(start)\n\t\t\tfmt.Printf(\"Played %v games in %v with an average %v moves to failure\\n\", numGames, elapsed, numMoves/numGames)\n\t\t}\n\t}\n\n\tfmt.Printf(\"\\nWinning took %v games\\n\", numGames)\n\tfmt.Printf(\"\\nScore: %v\\tMoves: %v\\n\\n\", g.Score(), g.TotalMoves())\n\tfmt.Println(g)\n}", "title": "" }, { "docid": "32352e381cacc9499ad885a4250eb148", "score": "0.44839588", "text": "func (_Wallet *WalletSession) Winthdraw(_amt *big.Int) (*types.Transaction, error) {\n\treturn _Wallet.Contract.Winthdraw(&_Wallet.TransactOpts, _amt)\n}", "title": "" }, { "docid": "d12787c4dd56f347f59f1f25000eb6a4", "score": "0.44697097", "text": "func (m *mongoDB) AddScoreRecord(owner string, recordType schema.ScoreRecordType, score float64, ts int64) error {\n\tc := m.client.Database(m.database).Collection(schema.ScoreHistoryCollection)\n\tctx, cancel := context.WithTimeout(context.Background(), defaultTimeout)\n\tdefer cancel()\n\n\tdate := time.Unix(ts, 0).Format(\"2006-01-02\")\n\tquery := bson.M{\"owner\": owner, \"type\": recordType, \"date\": date}\n\n\tvar record schema.ScoreRecord\n\terr := c.FindOne(ctx, query).Decode(&record)\n\tif err != nil && err != mongo.ErrNoDocuments {\n\t\treturn err\n\t}\n\tscore = (record.Score*record.UpdateTimes + score) / (record.UpdateTimes + 1)\n\n\tupdate := bson.M{\n\t\t\"$set\": bson.M{\n\t\t\t\"score\": score,\n\t\t\t\"ts\": ts,\n\t\t},\n\t\t\"$inc\": bson.M{\"update_times\": 1},\n\t\t\"$setOnInsert\": bson.M{\n\t\t\t\"owner\": owner,\n\t\t\t\"type\": recordType,\n\t\t\t\"date\": date,\n\t\t},\n\t}\n\topts := options.Update().SetUpsert(true)\n\t_, err = c.UpdateOne(ctx, query, update, opts)\n\treturn err\n}", "title": "" }, { "docid": "03058851ac927f09b1634b0ff3aed96a", "score": "0.4469004", "text": "func Tally(input io.Reader, output io.Writer) error {\n\tstandings, err := parseMatchOutcomes(input)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsort.SliceStable(standings, func(i, j int) bool {\n\t\tif standings[i].points == standings[j].points {\n\t\t\treturn standings[i].team < standings[j].team\n\t\t}\n\t\treturn standings[i].points > standings[j].points\n\t})\n\n\toutput.Write([]byte(fmt.Sprintf(rowTemplate, \"Team\", \"MP\", \"W\", \"D\", \"L\", \"P\")))\n\tfor _, standing := range standings {\n\t\toutput.Write([]byte(fmt.Sprintf(rowTemplate, standing.team, standing.played, standing.won, standing.drawn, standing.lost, standing.points)))\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c7900a47e48a4702331c0001548a52df", "score": "0.4451947", "text": "func (handler *Handler) writeTurnSummariesForPlayer(\n\trequestContext context.Context,\n\trelevantSegments []string) (interface{}, int) {\n\tif len(relevantSegments) < 1 {\n\t\treturn \"Not enough segments in URI to determine player\", http.StatusBadRequest\n\t}\n\n\tplayerIdentifier := relevantSegments[0]\n\n\tplayerName, errorFromIdentification :=\n\t\thandler.segmentTranslator.FromSegment(playerIdentifier)\n\n\tif errorFromIdentification != nil {\n\t\treturn errorFromIdentification, http.StatusBadRequest\n\t}\n\n\tallGamesWithPlayer, errorFromView :=\n\t\thandler.stateCollection.ViewAllWithPlayer(requestContext, playerName)\n\n\tif errorFromView != nil {\n\t\treturn errorFromView, http.StatusBadRequest\n\t}\n\n\tnumberOfGamesWithPlayer := len(allGamesWithPlayer)\n\n\tturnSummaries := make([]parsing.TurnSummary, numberOfGamesWithPlayer)\n\n\tfor gameIndex := 0; gameIndex < numberOfGamesWithPlayer; gameIndex++ {\n\t\tgameView := allGamesWithPlayer[gameIndex]\n\t\t_, playerTurnIndex, _ := gameView.CurrentTurnOrder()\n\t\tturnSummaries[gameIndex] = parsing.TurnSummary{\n\t\t\tGameIdentifier: handler.segmentTranslator.ToSegment(gameView.GameName()),\n\t\t\tGameName: gameView.GameName(),\n\t\t\tIsPlayerTurn: playerTurnIndex == 0,\n\t\t}\n\t}\n\n\tendpointObject := parsing.TurnSummaryList{\n\t\tTurnSummaries: turnSummaries,\n\t}\n\n\treturn endpointObject, http.StatusOK\n}", "title": "" }, { "docid": "6ab2f2ccd6e360e96cd299e3d8235119", "score": "0.44513467", "text": "func GetGroupTournamentStats(t TournamentInterface, winPoints int, lossPoints int, tiePoints int) ([]TeamStatsInterface, error) {\n\tif t.GetType() != int(TournamentTypeGroup) {\n\t\treturn nil, errors.New(\"can not get stats for tournament type TournamentTypeGroup\")\n\t}\n\tvar stats []TeamStatsInterface\n\n\tfor _, group := range t.GetGroups() {\n\t\tvar groupStats []TeamStatsInterface\n\n\t\tfor _, team := range *group.GetTeams() {\n\t\t\tstat := TeamStats{\n\t\t\t\tGroup: group,\n\t\t\t\tTeam: team,\n\t\t\t\tPlayed: 0,\n\t\t\t\tWins: 0,\n\t\t\t\tLosses: 0,\n\t\t\t\tTies: 0,\n\t\t\t\tPointsFor: 0.00,\n\t\t\t\tPointsAgainst: 0.00,\n\t\t\t\tPoints: 0}\n\t\t\tfor _, game := range team.GetGames() {\n\t\t\t\tif game.GetHomeTeam().GetID() == team.GetID() {\n\t\t\t\t\tstat.PointsFor = game.GetHomeScore().GetPoints()\n\t\t\t\t\tstat.PointsAgainst = game.GetAwayScore().GetPoints()\n\t\t\t\t\tif game.GetHomeScore().GetPoints() > game.GetAwayScore().GetPoints() {\n\t\t\t\t\t\tstat.Wins++\n\t\t\t\t\t} else if game.GetHomeScore().GetPoints() == game.GetAwayScore().GetPoints() {\n\t\t\t\t\t\tstat.Ties++\n\t\t\t\t\t} else {\n\t\t\t\t\t\tstat.Losses++\n\t\t\t\t\t}\n\t\t\t\t} else if game.GetAwayTeam().GetID() == team.GetID() {\n\t\t\t\t\tstat.PointsFor = game.GetAwayScore().GetPoints()\n\t\t\t\t\tstat.PointsAgainst = game.GetHomeScore().GetPoints()\n\t\t\t\t\tif game.GetHomeScore().GetPoints() < game.GetAwayScore().GetPoints() {\n\t\t\t\t\t\tstat.Wins++\n\t\t\t\t\t} else if game.GetHomeScore().GetPoints() == game.GetAwayScore().GetPoints() {\n\t\t\t\t\t\tstat.Ties++\n\t\t\t\t\t} else {\n\t\t\t\t\t\tstat.Losses++\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tstat.Played++\n\t\t\t}\n\t\t\tstat.AddPoints(stat.Wins * winPoints)\n\t\t\tstat.AddPoints(stat.Losses * lossPoints)\n\t\t\tstat.AddPoints(stat.Ties * tiePoints)\n\n\t\t\tgroupStats = append(groupStats, &stat)\n\t\t}\n\t\tgroupStats = SortTournamentStats(groupStats)\n\t\tstats = append(stats, groupStats...)\n\t}\n\treturn stats, nil\n}", "title": "" }, { "docid": "4f24356f2f19b62546f806de4a239d90", "score": "0.4446688", "text": "func announceWinner(data *spicerack.FightCard) {\n\tp1 := formatFighterName(nil, data.RedName, P1_NAME_FORMAT)\n\tp2 := formatFighterName(nil, data.BlueName, P2_NAME_FORMAT)\n\tvar w, l, msg string\n\n\tif data.Winner() == data.RedName {\n\t\tw = p1\n\t\tl = p2\n\t} else if data.Winner() == data.BlueName {\n\t\tw = p2\n\t\tl = p1\n\t} else {\n\t\tmsg = \"...I have no idea who won.\"\n\t}\n\n\tif data.Upset(UPSET_FACTOR) {\n\t\tmsg = fmt.Sprintf(UPSET_WINNER_FORMAT, data.Odds(), w, l)\n\t} else {\n\t\tmsg = fmt.Sprintf(WINNER_FORMAT, w, l)\n\t}\n\n\tclient.Privmsg(settings.Channel, msg)\n}", "title": "" }, { "docid": "d191b0159ccbee28297930f9ebf086e8", "score": "0.44378155", "text": "func (this *LocalView) Done(youWin Outcome) {\n\tswitch youWin {\n\tcase Win:\n\t\tthis.out.Write([]byte(fmt.Sprintf(\"%s wins.\\n\", this.name)))\n\tcase Lose:\n\t\tthis.out.Write([]byte(fmt.Sprintf(\"%s loses.\\n\", this.name)))\n\tcase Draw:\n\t\tthis.out.Write([]byte(\"Draw game.\\n\"))\n\t}\n\tthis.out.Flush()\n}", "title": "" }, { "docid": "cfe03b88f5ae056895fb362109612354", "score": "0.44377452", "text": "func GetGroupStats(group GroupInterface, winPoints int, lossPoints int, tiePoints int) []TeamStatsInterface {\n\tvar groupStats []TeamStatsInterface\n\tteamStats := map[int]*TeamStats{}\n\n\tfor _, team := range *group.GetTeams() {\n\t\tteamStats[team.GetID()] = &TeamStats{\n\t\t\tGroup: group,\n\t\t\tTeam: team,\n\t\t}\n\t}\n\n\tfor _, game := range *group.GetGames() {\n\t\tif _, ok := teamStats[game.GetHomeTeam().GetID()]; !ok {\n\t\t\tteamStats[game.GetHomeTeam().GetID()] = &TeamStats{\n\t\t\t\tGroup: group,\n\t\t\t\tTeam: game.GetHomeTeam(),\n\t\t\t}\n\t\t}\n\t\t// Calculate stats for the home team in every game\n\t\tteamStats[game.GetHomeTeam().GetID()].PointsFor += game.GetHomeScore().GetPoints()\n\t\tteamStats[game.GetHomeTeam().GetID()].PointsAgainst += game.GetAwayScore().GetPoints()\n\t\tif game.GetHomeScore().GetPoints() > game.GetAwayScore().GetPoints() {\n\t\t\tteamStats[game.GetHomeTeam().GetID()].Wins++\n\t\t} else if game.GetHomeScore().GetPoints() == game.GetAwayScore().GetPoints() {\n\t\t\tteamStats[game.GetHomeTeam().GetID()].Ties++\n\t\t} else {\n\t\t\tteamStats[game.GetHomeTeam().GetID()].Losses++\n\t\t}\n\n\t\tteamStats[game.GetHomeTeam().GetID()].Played++\n\n\t\t// Calculate stats for the away team in every game\n\t\tif _, ok := teamStats[game.GetAwayTeam().GetID()]; !ok {\n\t\t\tteamStats[game.GetAwayTeam().GetID()] = &TeamStats{\n\t\t\t\tGroup: group,\n\t\t\t\tTeam: game.GetAwayTeam(),\n\t\t\t}\n\t\t}\n\t\tteamStats[game.GetAwayTeam().GetID()].PointsFor += game.GetAwayScore().GetPoints()\n\t\tteamStats[game.GetAwayTeam().GetID()].PointsAgainst += game.GetHomeScore().GetPoints()\n\t\tif game.GetHomeScore().GetPoints() < game.GetAwayScore().GetPoints() {\n\t\t\tteamStats[game.GetAwayTeam().GetID()].Wins++\n\t\t} else if game.GetHomeScore().GetPoints() == game.GetAwayScore().GetPoints() {\n\t\t\tteamStats[game.GetAwayTeam().GetID()].Ties++\n\t\t} else {\n\t\t\tteamStats[game.GetAwayTeam().GetID()].Losses++\n\t\t}\n\t\tteamStats[game.GetAwayTeam().GetID()].Played++\n\t}\n\n\tfor _, t := range teamStats {\n\t\tt.Points = t.Wins * winPoints\n\t\tt.Points += t.Losses * lossPoints\n\t\tt.Points += t.Ties * tiePoints\n\t\tgroupStats = append(groupStats, t)\n\t}\n\n\tgroupStats = SortTournamentStats(groupStats)\n\treturn groupStats\n}", "title": "" }, { "docid": "a18bf4df623a170632dfc5728d36f4af", "score": "0.44304875", "text": "func (s *fileBasedActivityStatsStorer) RecordActives(tlf tlf.ID, host string) {\n\ts.ch <- activity{tlfID: tlf, host: host}\n}", "title": "" }, { "docid": "374dae96b4954283346a38147983dcfc", "score": "0.44255573", "text": "func Record(i int, action string) {\n\n\tvar amount int\n\tif action[0] == 'R' {\n\t\tamount, _ = strconv.Atoi(action[5:])\n\t} else if action == \"AllIn\" {\n\t\tamount = current.Players[i].Chips - current.Players[i].Bet\n\t}\n\tif current.Stage == \"Pre-flop\" {\n\t\tUpdateArray(i, 0, amount)\n\t} else if current.Stage == \"Flop\" {\n\t\tUpdateArray(i, 1, amount)\n\t} else if current.Stage == \"Turn\" {\n\t\tUpdateArray(i, 2, amount)\n\t} else if current.Stage == \"River\" {\n\t\tUpdateArray(i, 3, amount)\n\t}\n}", "title": "" }, { "docid": "b272dd7cb73d8e018cbebfa3b452635b", "score": "0.44156194", "text": "func (a *App) UpdateTeam(w http.ResponseWriter, r *http.Request) {\n\thandlers.UpdateTeam(a.DB, w, r)\n}", "title": "" }, { "docid": "26d76cabd78b517eb13c2f8efbe8196a", "score": "0.4415617", "text": "func (db *PgDB) RecordInstanceStats(a *model.InstanceStats) error {\n\treturn db.namedExecOne(`\nINSERT INTO provisioner_instance_stats (resource_pool, instance_id, slots, start_time)\nSELECT :resource_pool, :instance_id, :slots, CURRENT_TIMESTAMP\nWHERE NOT EXISTS (\n\tSELECT * FROM provisioner_instance_stats WHERE instance_id = :instance_id AND end_time IS NULL\n)\n`, a)\n}", "title": "" }, { "docid": "2917a18f73a61c51a462c0b9d6075293", "score": "0.4412272", "text": "func (st *Statistics) TrackTimeStats(no int, data interface{}) int {\n\ttimed, ok := data.(timedStat)\n\tif !ok {\n\t\tdlog.Error(\"TrackTimeStats called with a non-timedStat payload\")\n\t\treturn event.UnbindEvent\n\t}\n\tif timed.on { //Turning on a thing to time track\n\t\tst.statTimeLock.Lock()\n\t\tst.statTimes[timed.name] = time.Now()\n\t\tst.statTimeLock.Unlock()\n\t} else {\n\t\tst.statTimeLock.Lock()\n\t\ttimeDiff := int(time.Since(st.statTimes[timed.name]))\n\t\tst.statTimeLock.Unlock()\n\t\tif timeDiff < 0 {\n\t\t\treturn 0\n\t\t}\n\t\tst.trackStats(timed.name, timeDiff)\n\t}\n\treturn 0\n}", "title": "" }, { "docid": "e40cf8e4f8c2424edfdafcb4b23e6dcb", "score": "0.44100276", "text": "func (g *game) genStats() {\n\tvar wins, loses, ties int\n\tfor _, i := range g.results {\n\t\tswitch {\n\t\tcase i == win:\n\t\t\twins++\n\t\tcase i == lose:\n\t\t\tloses++\n\t\tcase i == tie:\n\t\t\tties++\n\t\t}\n\t}\n\tfmt.Printf(\"\\n\\nRounds: %d, Wins: %d, Loses: %d, Ties: %d\\n\\n\", len(g.results), wins, loses, ties)\n\tos.Exit(1) // Since it was a ctrl-c, exit non-zero\n}", "title": "" }, { "docid": "822c70cd8e3155d8f231cc08d8874ca2", "score": "0.44098854", "text": "func (fw *FinalizingWorker) winner(party []Participant) {\n\t// inform\n\tfw.log.Debugf(\"calculating ballot %s results\", fw.ballot.Address.String())\n\n\t// container for votes\n\tvotes := make([]uint64, len(fw.ballot.Proposals)+1)\n\n\t// container for weights\n\tweights := make([]*big.Int, len(fw.ballot.Proposals)+1)\n\n\t// loop all voters\n\tfor _, voter := range party {\n\t\t// advance votes counter\n\t\tvotes[voter.Vote]++\n\n\t\t// make sure to list the weight\n\t\tif weights[voter.Vote] == nil {\n\t\t\tweights[voter.Vote] = new(big.Int)\n\t\t}\n\n\t\t// advance weight\n\t\tweights[voter.Vote] = new(big.Int).Add(weights[voter.Vote], voter.Total)\n\t}\n\n\t// make a new string builder for generating the rolling/final results\n\tvar sb strings.Builder\n\tif fw.ballot.IsOpen {\n\t\t// add the header\n\t\tsb.WriteString(fmt.Sprintf(\"Rolling results for ballot '%s':\\n\", fw.ballot.Name))\n\t} else {\n\t\t// add the header\n\t\tsb.WriteString(fmt.Sprintf(\"Final results for ballot '%s':\\n\", fw.ballot.Name))\n\t}\n\n\t// log results\n\tfor _, prop := range fw.ballot.Proposals {\n\t\t// do we have it?\n\t\tif weights[prop.Id] != nil {\n\t\t\tw := new(big.Int).Div(weights[prop.Id], big.NewInt(int64(math.Pow10(18)))).Uint64()\n\t\t\tsb.WriteString(fmt.Sprintf(\"\\tProposal #%d %s: votes %d, weight %d FTM\\n\", prop.Id, prop.Name, votes[prop.Id], w))\n\t\t}\n\t}\n\n\t// post the results with configured web hook\n\tfw.postResults(sb.String())\n\n\t// log the result\n\tfw.log.Notice(sb.String())\n}", "title": "" }, { "docid": "c261581693b214d5b28f90d583016d85", "score": "0.43949363", "text": "func (f *FPL) addSummaryRow() {\n\tif len(f.Players) == 0 {\n\t\tlog.Println(\"unable to addSummaryRow() due to empty f.Players\")\n\t\treturn\n\t}\n\tp0 := f.Players[0]\n\tsummary := team.Player{\n\t\tID: p0.TeamID + 1000,\n\t\tWebName: fmt.Sprintf(\"AllPlayed_%s_Players\", p0.TeamName),\n\t\tTeamName: p0.TeamName,\n\t\tRoleName: \"all\",\n\t\tTeamID: p0.TeamID,\n\t\tRoleID: 0,\n\t\t// PlayerCount: \"-1\",\n\t\t// RegularPlayerCount: \"-1\",\n\t\t// PointsPerGame: \"-1\",\n\t\t// OppPointsPerGame: \"-1\",\n\t\t// Form: \"-1\",\n\t\tTotalPoints: 0,\n\t\t// ValueForm: \"-1\",\n\t\t// ValueSeason: \"-1\",\n\t\t// IctIndex: \"-1\",\n\t\tNowCost: 0,\n\t\tMinutes: 0,\n\t}\n\n\t// for int, sum up all the values\n\tfor _, player := range f.Players {\n\t\tsummary.TotalPoints += player.TotalPoints\n\t\tsummary.NowCost += player.NowCost\n\t\tsummary.Minutes += player.Minutes\n\t}\n\toppTotalPoints := float64(f.calcOpponentPoints())\n\tmatchPlayed := math.Round(float64(summary.Minutes) / 990)\n\tcurrentPrice := float64(summary.NowCost) / 10.0\n\n\t// for float, convert strings => sum up all floats => string\n\ttempIctIndex := 0.0\n\ttempForm := 0.0\n\t// pCount := 0.0\n\t// rpCount := 0.0\n\tfor _, player := range f.Players {\n\t\tif player.Minutes <= 0 {\n\t\t\tcontinue\n\t\t} else if player.Minutes >= int(matchPlayed*90) {\n\t\t\t// shortlist players with long minutes played (>60 per match)\n\t\t\tsummary.RegularPlayerCount++\n\t\t}\n\t\tsummary.PlayerCount++\n\n\t\t// convert strings => sum up all floats\n\t\tpIctIndex, _ := strconv.ParseFloat(player.IctIndex, 64)\n\t\ttempIctIndex += pIctIndex\n\t\tpForm, _ := strconv.ParseFloat(player.Form, 64)\n\t\ttempForm += pForm\n\t}\n\n\t// convert floats => string\n\tsummary.IctIndex = fmt.Sprintf(\"%.2f\", tempIctIndex/float64(summary.RegularPlayerCount))\n\tsummary.ValueForm = fmt.Sprintf(\"%.2f\", tempForm/currentPrice)\n\tsummary.ValueSeason = fmt.Sprintf(\"%.2f\", float64(summary.TotalPoints)/currentPrice)\n\tsummary.Form = fmt.Sprintf(\"%.2f\", tempForm)\n\tsummary.OppPointsPerGame = fmt.Sprintf(\"%.2f\", oppTotalPoints/matchPlayed)\n\tsummary.PointsPerGame = fmt.Sprintf(\"%.2f\", float64(summary.TotalPoints)/matchPlayed)\n\n\tf.Players = append([]team.Player{summary}, f.Players...)\n\tf.Res.Players = append([]team.Player{summary}, f.Res.Players...)\n\treturn\n}", "title": "" }, { "docid": "78e99463037675dbef9137993ac0d376", "score": "0.4392339", "text": "func (r *ScoresService) ListWindow(leaderboardId string, collection string, timeSpan string) *ScoresListWindowCall {\n\tc := &ScoresListWindowCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.leaderboardId = leaderboardId\n\tc.collection = collection\n\tc.urlParams_.Set(\"timeSpan\", timeSpan)\n\treturn c\n}", "title": "" }, { "docid": "f703835580c9c4145461f34f1b8dab33", "score": "0.43788677", "text": "func RecordTestTimings(ch chan IntegrationTestRun, wg *sync.WaitGroup) {\n\t//Set up slice to keep track of all results\n\tdefer wg.Done()\n\n\tallResults := []IntegrationTestRun{}\n\n\tBatchStartTime := time.Now()\n\tlog.Info(\"Started test batch at \", BatchStartTime)\n\t//Iterate over channel while tests are running\n\tfor result := range ch {\n\t\tresult.BatchStartTime = BatchStartTime\n\t\tallResults = append(allResults, result)\n\t}\n\n\tBatchEndTime := time.Now()\n\tBatchDuration := BatchEndTime.Sub(BatchStartTime).Seconds()\n\n\t//Add job end time and duration to results\n\tfor i := range allResults {\n\t\tallResults[i].BatchEndTime = BatchEndTime\n\t\tallResults[i].BatchDuration = BatchDuration\n\t}\n\n\tresultsJSON := processTestTimings(allResults)\n\n\tinsightsAPIKey := os.Getenv(\"NRDIAG_INSIGHTS_API_KEY\")\n\tinsightsAccountID := os.Getenv(\"INSIGHTS_ACCOUNT_ID\")\n\n\t// Skip the Insights API call if necessary environment variables are not present.\n\tif insightsAccountID != \"\" && insightsAPIKey != \"\" {\n\t\tlog.Info(\"POSTing\", len(allResults), \"results to Insights account:\", insightsAccountID)\n\n\t\terr := insertCustomEvents(insightsAccountID, insightsAPIKey, resultsJSON)\n\n\t\tif err != nil {\n\t\t\tlog.Info(\"Insights event insertion unsuccessful.\")\n\t\t\tlog.Info(err)\n\t\t}\n\n\t\tlog.Info(\"Successfully POSTed events to Insights\")\n\t} else {\n\t\tlog.Info(\"Skipping posting data into Insights API because environment variables are not set.\")\n\t}\n}", "title": "" }, { "docid": "148017b137e519f6dc7a0a9294614a74", "score": "0.43775064", "text": "func (p *Processes) getWinStat(fields map[string]interface{}) error {\n\tpids, err := process.Pids()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Get all processes pids failed, error: %v\", err)\n\t}\n\t// total processes\n\tfields[KeyProcessesTotal] = int64(len(pids))\n\tfor _, pid := range pids {\n\t\tp, _ := process.NewProcess(pid)\n\t\tthreads, err := p.NumThreads()\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Get process threads failed, error: %v\", err)\n\t\t\tcontinue\n\t\t}\n\t\tfields[KeyProcessesTotalThreads] = fields[KeyProcessesTotalThreads].(int64) + int64(threads)\n\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "baa3a77bb4ac20ba4b19b867aa4db4e3", "score": "0.43743315", "text": "func Test_play_win(t *testing.T) {\n\t// create user input\n\tuser_input := \"D\"\n\n\t// create a player\n\tcurrent_player := Player{0, 0, 0, 1}\n\n\t// create the rooms\n\tconst total_states = 3 //Total number of inventory slots\n\tvar rooms [total_states]Room\n\tinit_game(rooms[:])\n\n\t// create an inventory\n\tconst inventory_size = 3\n\tvar inventory [inventory_size]string\n\tinventory[0] = \"Coffee\"\n\tinventory[1] = \"Cream\"\n\tinventory[2] = \"Sugar\"\n\n\texp := 1\n\tact := play(current_player, rooms[:], inventory[:], user_input)\n\n\tif exp != act.win_status {\n\t\tt.Fatalf(\"Expected %d got %d\", exp, act.win_status)\n\t}\n}", "title": "" }, { "docid": "d65d1d54b150e2a9860930a8978134ae", "score": "0.43588075", "text": "func GetCurrentSpreadsAndWinProb(TeamData AllTeamData) AllTeamData {\n\turl := \"https://fantasydata.com/nfl-stats/nfl-point-spreads-and-odds.aspx\"\n\tresponse, err := http.Get(url)\n\tdefer response.Body.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error: \", err)\n\t\treturn nil\n\t}\n\tbody, err := ioutil.ReadAll(response.Body)\n\tif err != nil {\n\t\tfmt.Println(\"Error: \", err)\n\t\treturn nil\n\t}\n\tIndex := bytes.Index(body, []byte(\"StatsGrid\"))\n\tbody = body[Index:]\n\tIndex = bytes.Index(body, []byte(\"<tbody>\"))\n\tbody = body[Index:]\n\tIndex = bytes.Index(body, []byte(\"</tbody>\"))\n\tbody = body[:Index]\n\tTableData := FindAllBetween(body, \"<td>\", \"</td>\")\n\tfor i := 0; i < len(TableData); i += 6 {\n\t\tFavorite := strings.Replace(string(TableData[i]), \"at \", \"\", 1)\n\t\tDog := strings.Replace(string(TableData[i+2]), \"at \", \"\", 1)\n\t\tFavorite = strings.Replace(Favorite, \"<td>\", \"\", 1)\n\t\tFavorite = strings.Replace(Favorite, \"</td>\", \"\", 1)\n\t\tFavorite = GetPFRTeamAbbr(strings.ToUpper(Favorite))\n\t\tDog = strings.Replace(Dog, \"<td>\", \"\", 1)\n\t\tDog = strings.Replace(Dog, \"</td>\", \"\", 1)\n\t\tDog = GetPFRTeamAbbr(strings.ToUpper(Dog))\n\t\tTableData[i+1] = strings.Replace(TableData[i+1], \"<td>\", \"\", 1)\n\t\tTableData[i+1] = strings.Replace(TableData[i+1], \"</td>\", \"\", 1)\n\t\tSpread, err := strconv.ParseFloat(TableData[i+1], 64)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"It seems that the line for the %v vs %v game is not available because we got %v for the line.\\n\", Favorite, Dog, TableData[i+1])\n\t\t} else {\n\t\t\tTeamData[Favorite][SPREAD] = Spread\n\t\t\tTeamData[Dog][SPREAD] = -Spread\n\t\t\tTeamData[Favorite][PLAYINGTHISWEEK] = GetTeamFloatFromAbbr(Dog)\n\t\t\tTeamData[Dog][PLAYINGTHISWEEK] = GetTeamFloatFromAbbr(Favorite)\n\t\t}\n\t}\n\treturn TeamData\n}", "title": "" }, { "docid": "6ca0e0bdc45a63aa3699e9a8f8fef5b1", "score": "0.43561035", "text": "func GenerateRanking(ctx context.Context, tournament *firestore.DocumentRef, challenges firestore.Query) error {\n\tteamMetrics := make(map[string]*TeamMetadata)\n\tdivisionMetrics := make(map[Division]*DivisionMetadata)\n\tdivisionToTeam := make(map[Division][]string)\n\tlocalRank := []string{\"\"}\n\trankToUpload := make(map[string]string)\n\tranking := tournament.Collection((\"ranking\"))\n\tvar nextRound Round\n\n\t// Compute team level metrics for the round.\n\titer := challenges.Documents(ctx)\n\tfor {\n\t\tdoc, err := iter.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tvar challenge Challenge\n\t\tif err = doc.DataTo(&challenge); err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// Populate challenger related metrics\n\t\tnextRound = challenge.Round + 1\n\t\tvar challenger, defender *TeamMetadata\n\t\tif val, ok := teamMetrics[challenge.Challenger]; ok {\n\t\t\tchallenger = val\n\t\t} else {\n\t\t\tvar c TeamMetadata\n\t\t\tchallenger = &c\n\t\t\tchallenger.Team = challenge.Challenger\n\t\t\tchallenger.Round = challenge.Round\n\t\t\tchallenger.Division = challenge.Division\n\t\t\tchallenger.Rank = challenge.ChallengerRank\n\t\t\tteamMetrics[challenge.Challenger] = challenger\n\t\t\tdivisionToTeam[challenger.Division] = append(divisionToTeam[challenger.Division], challenger.Team)\n\t\t}\n\t\tif val, ok := teamMetrics[challenge.Defender]; ok {\n\t\t\tdefender = val\n\t\t} else {\n\t\t\tvar d TeamMetadata\n\t\t\tdefender = &d\n\t\t\tdefender.Team = challenge.Defender\n\t\t\tdefender.Round = challenge.Round\n\t\t\tdefender.Division = challenge.Division\n\t\t\tdefender.Rank = challenge.DefenderRank\n\t\t\tteamMetrics[challenge.Defender] = defender\n\t\t\tdivisionToTeam[defender.Division] = append(divisionToTeam[defender.Division], defender.Team)\n\t\t}\n\n\t\tif challenge.ChallengerScore == 4 {\n\t\t\tfmt.Printf(\"%s won. %s lost.\\n\", challenger.Team, defender.Team)\n\t\t\tchallenger.NumWins++\n\t\t\tdefender.NumLosses++\n\t\t} else if challenge.DefenderScore == 4 {\n\t\t\tfmt.Printf(\"%s won. %s lost.\\n\", defender.Team, challenger.Team)\n\t\t\tdefender.NumWins++\n\t\t\tchallenger.NumLosses++\n\t\t} else {\n\t\t\treturn fmt.Errorf(\"Invalid scores detected for %d-%d: %s vs %s\", challenge.Round, challenge.Code, challenger.Team, defender.Team)\n\t\t}\n\t\tchallenger.NumSetsGained += challenge.ChallengerScore\n\t\tchallenger.NumSetsLost += challenge.DefenderScore\n\t\tdefender.NumSetsGained += challenge.DefenderScore\n\t\tdefender.NumSetsLost += challenge.ChallengerScore\n\n\t\t_, err = tournament.Collection(\"teams\").Doc(challenger.Team).Collection(\"metrics\").Doc(challenge.Round.String()).Set(ctx, challenger)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(\"Uploading to firestore successful:\", challenger.Team)\n\t\tfmt.Println(challenger)\n\t\t_, err = tournament.Collection(\"teams\").Doc(defender.Team).Collection(\"metrics\").Doc(challenge.Round.String()).Set(ctx, defender)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(\"Uploading to firestore successful:\", defender.Team)\n\t\tfmt.Println(defender)\n\t}\n\n\t// Compute division metrics based on the team metrics.\n\tfor div := X; int(div) <= len(teamMetrics)/3; div++ {\n\t\tteamsInDiv := divisionToTeam[div]\n\t\tif len(teamsInDiv) == 0 {\n\t\t\tbreak\n\t\t}\n\t\tvar divMetadata DivisionMetadata\n\t\tdivMetadata.Division = div\n\n\t\tteams := make([]*TeamMetadata, 0, len(teamsInDiv))\n\t\tfor _, team := range teamsInDiv {\n\t\t\tteamMetric := teamMetrics[team]\n\t\t\tteams = append(teams, teamMetric)\n\t\t}\n\t\tsort.Slice(teams, func(i, j int) bool {\n\t\t\tt1 := teams[i]\n\t\t\tt2 := teams[j]\n\t\t\tif t1.NumWins != t2.NumWins {\n\t\t\t\treturn t1.NumWins > t2.NumWins\n\t\t\t}\n\t\t\tt1Won := t1.NumSetsGained - t1.NumSetsLost\n\t\t\tt2Won := t2.NumSetsGained - t2.NumSetsLost\n\t\t\tif t1Won != t2Won {\n\t\t\t\treturn t1Won > t2Won\n\t\t\t}\n\t\t\treturn t1.Rank > t2.Rank\n\t\t})\n\t\tif len(teamsInDiv) < 3 {\n\t\t\tdivMetadata.Winner = teams[0].Team\n\t\t\tdivMetadata.Loser = teams[1].Team\n\t\t} else {\n\t\t\tdivMetadata.Winner = teams[0].Team\n\t\t\tdivMetadata.Neutral = teams[1].Team\n\t\t\tdivMetadata.Loser = teams[2].Team\n\t\t}\n\n\t\tfmt.Printf(\"Division %s Winner: %s Loser: %s Neutral: %s\\n\", div.String(), divMetadata.Winner, divMetadata.Loser, divMetadata.Neutral)\n\t\tlocalRank = append(localRank, divMetadata.Winner)\n\t\tif divMetadata.Neutral != \"\" {\n\t\t\tlocalRank = append(localRank, divMetadata.Neutral)\n\t\t}\n\t\tlocalRank = append(localRank, divMetadata.Loser)\n\t\tdivisionMetrics[div] = &divMetadata\n\t}\n\n\t// Swap ranking based on loser information.\n\tfor div := X; int(div) < len(divisionMetrics)-1; div++ {\n\t\tfor rank, team := range localRank {\n\t\t\tif team == divisionMetrics[div].Loser {\n\t\t\t\tfmt.Printf(\"Swapping %s at rank %d with %s at rank %d\\n\", team, rank, localRank[rank+1], rank+1)\n\t\t\t\tlocalRank[rank], localRank[rank+1] = localRank[rank+1], localRank[rank]\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\t// Create a map to upload to Firestore.\n\tfor rank, team := range localRank {\n\t\tif rank > 0 {\n\t\t\trankToUpload[strconv.Itoa(rank)] = team\n\t\t}\n\t}\n\n\t// Upload new ranking to Firestore.\n\t_, err := ranking.Doc(nextRound.String()).Set(ctx, rankToUpload)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "60494b7942090d3fa195d35b594426aa", "score": "0.43548042", "text": "func DrawWindow(layerAlias string, styleEntry memory.TuiStyleEntryType, xLocation int, yLocation int, width int, height int) {\n\tlayerEntry := memory.GetLayer(layerAlias)\n\tlocalAttributeEntry := memory.NewAttributeEntry()\n\tdrawWindow(layerEntry, styleEntry, localAttributeEntry, xLocation, yLocation, width, height)\n}", "title": "" }, { "docid": "2e93f699f07583475c3979192dd5ca30", "score": "0.43446425", "text": "func (ctx *Context) UpdateLeaderboard(results models.Results) {\n\tprintln(\"TODO: implement updateLeaderboard\")\n\tprintln(\"updateleaderboard: numplayed: \")\n\tprintln(results.NumPlayed)\n\t// Implement UpdateLeaderboard use the results, update the leaderboard\n\tinsert, err := ctx.Db.Exec(sqlInsertUsers, results.ActorID, results.GuesserID, results.NumRight, results.NumPlayed)\n\tif err != nil {\n\t\tprintln(\"err was not nil \" + err.Error())\n\t}\n\tprintln(insert)\n}", "title": "" }, { "docid": "c607beadc7d192b9d54b81bdab365631", "score": "0.43359283", "text": "func (recorder *StatsRecorder) Record(event Event) {\n\trecorder.Init()\n\trecorder.mutex.Lock()\n\n\tstats := recorder.current\n\n\tstats.Requests++\n\tstats.Latency.Sample(uint64(event.Latency))\n\n\tif event.Error {\n\t\tstats.Errors++\n\n\t} else if event.Timeout {\n\t\tstats.Timeouts++\n\n\t} else {\n\t\tif stats.Responses == nil {\n\t\t\tstats.Responses = make(map[int]uint64)\n\t\t}\n\t\tstats.Responses[event.Response]++\n\t}\n\n\trecorder.mutex.Unlock()\n}", "title": "" }, { "docid": "007833480c324e66723f5f49d6424b05", "score": "0.43323976", "text": "func (d *Dao) GetAwardWinRecord(c context.Context, mid int64) (awardIDs map[int64]bool, err error) {\n\tawardIDs = make(map[int64]bool)\n\trows, err := d.db.Query(c, _awardWinRecordSQL, mid)\n\tif err != nil {\n\t\tlog.Error(\"GetAwardWinRecord d.db.Query error(%v)\", err)\n\t\treturn\n\t}\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\tvar id int64\n\t\terr = rows.Scan(&id)\n\t\tif err != nil {\n\t\t\tlog.Error(\"GetAwardWinRecord rows.Scan error(%v)\", err)\n\t\t\treturn\n\t\t}\n\t\tawardIDs[id] = true\n\t}\n\terr = rows.Err()\n\treturn\n}", "title": "" }, { "docid": "941102725c8dadce7e5470d535c6385b", "score": "0.43320322", "text": "func (s *Statistics) AddPlayedGame() {\n\tatomic.AddInt64(&s.gamesPlayed, 1)\n}", "title": "" }, { "docid": "d85c6bf17d38b0c7cbb91daf4ded6f22", "score": "0.43232945", "text": "func (c *combat) winner() int {\n\thash := c.hash()\n\tif w, ok := globalCache[hash]; ok {\n\t\treturn w\n\t}\n\tw := 0\n\tfor w == 0 {\n\t\tw = c.takeTurn()\n\t}\n\tglobalCache[hash] = w\n\treturn w\n}", "title": "" }, { "docid": "63633e8ec67615462ec674d8a2f6f522", "score": "0.4317726", "text": "func TestSaveTwin(t *testing.T) {\n\t//Initialize Global Variables (Mocks)\n\tinitMocks(t)\n\tInitDBTable()\n\n\tcases := []struct {\n\t\t// name is name of the testcase\n\t\tname string\n\t\t// returnInt is first return of mock interface ormerMock\n\t\treturnInt int64\n\t\t// returnErr is second return of mock interface ormerMock which is also expected error\n\t\treturnErr error\n\t}{{\n\t\t// Success Case\n\t\tname: \"SuccessCase\",\n\t\treturnInt: int64(1),\n\t\treturnErr: nil,\n\t}, {\n\t\t// Failure Case\n\t\tname: \"FailureCase\",\n\t\treturnInt: int64(1),\n\t\treturnErr: failedDBOperationErr,\n\t},\n\t}\n\n\t// run the test cases\n\tfor _, test := range cases {\n\t\tormerMock.EXPECT().Insert(gomock.Any()).Return(test.returnInt, test.returnErr).Times(1)\n\t\terr := SaveTwin(&Twin{})\n\t\tt.Run(test.name, func(t *testing.T) {\n\t\t\tif test.returnErr != err {\n\t\t\t\tt.Errorf(\"SaveTwin case failed: wanted error %v and got error %v\", test.returnErr, err)\n\t\t\t}\n\t\t})\n\t}\n}", "title": "" }, { "docid": "645faedbe5e203d0932c459d4a9bb6e9", "score": "0.431039", "text": "func WinProbability(scoreDiff, spread, stdev float64) float64 {\n\treturn 1 - cdf(scoreDiff+0.5, -spread, stdev) + 0.5*(cdf(scoreDiff+0.5, -spread, stdev)-cdf(scoreDiff-0.5, -spread, stdev))\n}", "title": "" }, { "docid": "f29b2a6c1927411c0b79d3f9f634c573", "score": "0.42968765", "text": "func BuildPlayerStatsTable(summary GameSummary, table *tview.Table) int {\n\tvar rowCounter = 0\n\n\ttable.SetCell(rowCounter, 0,\n\t\ttview.NewTableCell(summary.Statistics.Away.Name).\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\ttable.SetCell(rowCounter, 1,\n\t\ttview.NewTableCell(\"Stats\").\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\n\ttable.SetCell(rowCounter, 2,\n\t\ttview.NewTableCell(\"\").\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\n\ttable.SetCell(rowCounter, 3,\n\t\ttview.NewTableCell(\"\").\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\n\ttable.SetCell(rowCounter, 4,\n\t\ttview.NewTableCell(\"\").\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\n\ttable.SetCell(rowCounter, 5,\n\t\ttview.NewTableCell(summary.Statistics.Home.Name).\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\ttable.SetCell(rowCounter, 6,\n\t\ttview.NewTableCell(\"Stats\").\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\n\ttable.SetCell(rowCounter, 7,\n\t\ttview.NewTableCell(\"\").\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\n\ttable.SetCell(rowCounter, 8,\n\t\ttview.NewTableCell(\"\").\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\n\ttable.SetCell(rowCounter, 9,\n\t\ttview.NewTableCell(\"\").\n\t\t\tSetTextColor(tcell.ColorWhite).\n\t\t\tSetAlign(tview.AlignLeft))\n\n\trowCounter++\n\n\trowCounter = buildPassing(table, summary, rowCounter)\n\trowCounter = buildRushing(table, summary, rowCounter)\n\trowCounter = buildReceiving(table, summary, rowCounter)\n\trowCounter = buildFumbles(table, summary, rowCounter)\n\trowCounter = buildKicking(table, summary, rowCounter)\n\trowCounter = buildKickReturns(table, summary, rowCounter)\n\trowCounter = buildPunts(table, summary, rowCounter)\n\trowCounter = buildPuntReturns(table, summary, rowCounter)\n\trowCounter = buildDefense(table, summary, rowCounter)\n\treturn rowCounter\n}", "title": "" }, { "docid": "f71bccb83cddaa157507baf1a55353d6", "score": "0.42953402", "text": "func (lg *Logs) RunStats(stats ...string) {\n\tsk := etime.Scope(etime.Train, etime.Run)\n\tlt := lg.TableDetailsScope(sk)\n\tix, _ := lt.NamedIdxView(\"RunStats\")\n\n\tspl := split.GroupBy(ix, []string{\"RunName\"})\n\tfor _, st := range stats {\n\t\tsplit.Desc(spl, st)\n\t}\n\tlg.MiscTables[\"RunStats\"] = spl.AggsToTable(etable.AddAggName)\n}", "title": "" }, { "docid": "7a31bd74ce5e6c6de68cf5cd358173f7", "score": "0.429356", "text": "func (s *Searcher) Record(ops []Operation) {\n\tfor _, op := range ops {\n\t\tswitch op.(type) {\n\t\tcase Create:\n\t\t\ts.TrialsRequested++\n\t\tcase Shutdown:\n\t\t\ts.Shutdown = true\n\t\t}\n\t}\n}", "title": "" }, { "docid": "cb97906af58b3dff93b815317ad3cd06", "score": "0.42844322", "text": "func (d *Dota2) SendDetailedGameStats(\n\tminutes []*protocol.CMsgDetailedGameStats_MinuteEntry,\n\tplayerInfo []*protocol.CMsgDetailedGameStats_PlayerInfo,\n\tgameStats protocol.CMsgDetailedGameStats_GameStats,\n) {\n\treq := &protocol.CMsgDetailedGameStats{\n\t\tMinutes: minutes,\n\t\tPlayerInfo: playerInfo,\n\t\tGameStats: &gameStats,\n\t}\n\td.write(uint32(protocol.EDOTAGCMsg_k_EMsgDetailedGameStats), req)\n}", "title": "" }, { "docid": "7d7693c0fa4e9b55c1e942e17955c50b", "score": "0.42746726", "text": "func Tally(in io.Reader, out io.Writer) error {\n\tleague := make(League)\n\tscanner := bufio.NewScanner(in)\n\n\tfor scanner.Scan() {\n\t\tline := strings.TrimSpace(scanner.Text())\n\n\t\tif line == \"\" || strings.HasPrefix(line, \"#\") {\n\t\t\tcontinue\n\t\t}\n\n\t\tparts := strings.Split(line, \";\")\n\n\t\tif len(parts) != 3 {\n\t\t\treturn errors.New(fmt.Sprintf(\"Invalid input: %s\", line))\n\t\t}\n\n\t\tteam1 := getTeam(league, parts[0])\n\t\tteam2 := getTeam(league, parts[1])\n\n\t\terr := processOutcome(team1, team2, parts[2])\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\twriteTable(league, out)\n\treturn nil\n}", "title": "" }, { "docid": "090f03a53d59b3bb382f39b1120ed9bc", "score": "0.42733794", "text": "func UndoBustAndWin(action undo.Action, base *BaseGame) {\n\tbase.GameState = action.PreviousGameState\n\tbase.Message = action.PreviousMessage\n\taction.Player.Score.Score = action.PreviousScore\n\taction.Player.Score.ParkScore = action.PreviousParkScore\n\taction.Player.ThrowRounds[len(action.Player.ThrowRounds)-1].Done = false\n}", "title": "" }, { "docid": "71d33459bc3acbddb26f307fa9b26adb", "score": "0.42680123", "text": "func (nsc *NilProducerStatsCollector) UpdatePutRecordsDuration(time.Duration) {}", "title": "" }, { "docid": "f78a13422a757d41a29280e9ea12dddb", "score": "0.42569533", "text": "func (r *SpanRecorder) RecordSpan(span tracer.RawSpan) {\n\tif !r.t.Alive() {\n\t\tatomic.AddInt64(&r.stats.totalSpans, 1)\n\t\tatomic.AddInt64(&r.stats.spansRejected, 1)\n\t\tif isTestSpan(span.Tags) {\n\t\t\tatomic.AddInt64(&r.stats.totalTestSpans, 1)\n\t\t\tatomic.AddInt64(&r.stats.testSpansRejected, 1)\n\t\t}\n\t\tr.logger.Printf(\"a span has been received but the recorder is not running\")\n\t\treturn\n\t}\n\tr.addSpan(span)\n}", "title": "" }, { "docid": "12fa035bf5406f46fa19dbd3eb83487f", "score": "0.42494524", "text": "func (s *Service) UpdateStatData(c context.Context, m *model.SpyStatMessage) (err error) {\n\t//TODO check event resaon\n\tif s.allEventName[m.EventName] == 0 {\n\t\tlog.Error(\"event name not found %+v\", err)\n\t\terr = ecode.SpyEventNotExist\n\t\treturn\n\t}\n\tstat := &model.Statistics{\n\t\tTargetMid: m.TargetMid,\n\t\tTargetID: m.TargetID,\n\t\tEventID: s.allEventName[m.EventName],\n\t\tState: model.WaiteCheck,\n\t\tQuantity: m.Quantity,\n\t\tCtime: time.Now(),\n\t}\n\tif stat.TargetID != 0 {\n\t\t_, ok := s.activityEvents[m.EventName]\n\t\tif ok {\n\t\t\tstat.Type = model.ActivityType\n\t\t} else {\n\t\t\tstat.Type = model.ArchiveType\n\t\t}\n\t}\n\t// add stat\n\tif model.ResetStat == m.Type {\n\t\tif _, err = s.dao.AddStatistics(c, stat); err != nil {\n\t\t\tlog.Error(\"%+v\", err)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tif _, err = s.dao.AddIncrStatistics(c, stat); err != nil {\n\t\t\tlog.Error(\"%+v\", err)\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "11e5e3dd12c23d0202da94d2564d373a", "score": "0.42424837", "text": "func RecordMetrics(theState *state.State, logger log.Logger) {\n\tkey := ambient.NewKey(theState.AppKey, theState.APIKey)\n\tgo func() {\n\t\tfor {\n\t\t\trecordLoop(key, theState, logger)\n\t\t\ttime.Sleep(60 * time.Second)\n\t\t}\n\t}()\n}", "title": "" }, { "docid": "ba16d3566cc40c5ac6ab8b545de38393", "score": "0.42357773", "text": "func (brd *board) draw(t pixel.Target) error {\n\tvar coinCount int\n\tworldMap := World.worldMap\n\tblkFrame := getFrame(24, 24, 0, 5)\n\tcoinFrame := getFrame(12, 12, 16, 19)\n\tfor i := 0; i < len(worldMap); i++ {\n\t\tfor j := 0; j < len(worldMap[0]); j++ {\n\t\t\tif worldMap[i][j] == 0 {\n\t\t\t\tblock{frame: blkFrame, gridX: i, gridY: j, sheet: brd.sheet}.draw(t)\n\t\t\t} else if worldMap[i][j] == 1 {\n\t\t\t\tcoin{frame: coinFrame, gridX: i, gridY: j, sheet: brd.sheet}.draw(t)\n\t\t\t\tcoinCount += 1\n\t\t\t}\n\t\t}\n\t}\n\tif World.worldCoinCount == -1 {\n\t\tWorld.worldCoinCount = coinCount\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "1c6de09d252f9439a8b002d5dbdc0557", "score": "0.42283145", "text": "func (d *Dota2) ApplyTeamToLobby(\n\tteamID uint32,\n) {\n\treq := &protocol.CMsgApplyTeamToPracticeLobby{\n\t\tTeamId: &teamID,\n\t}\n\td.write(uint32(protocol.EDOTAGCMsg_k_EMsgGCApplyTeamToPracticeLobby), req)\n}", "title": "" }, { "docid": "29332987a5547fb918a88c6e5c6034f7", "score": "0.4227119", "text": "func (s *statsCounter) RecordHits(count uint64) {\n\tatomic.AddUint64(&s.Stats.HitCount, count)\n}", "title": "" }, { "docid": "cecbe5e00b7d322370283dcc24409a57", "score": "0.4219145", "text": "func (ss *StreamState) updateHWT(streamId common.StreamId,\n\tbucket string, hwt *common.TsVbuuid, prevSnap *common.TsVbuuid) {\n\n\tts := ss.streamBucketHWTMap[streamId][bucket]\n\tpartialSnap := false\n\n\tfor i, seq := range hwt.Seqnos {\n\t\t//if seqno has incremented, update it\n\t\tif seq > ts.Seqnos[i] {\n\t\t\tts.Seqnos[i] = seq\n\t\t\tss.streamBucketNewTsReqdMap[streamId][bucket] = true\n\t\t}\n\t\t//if snapEnd is greater than current hwt snapEnd\n\t\tif hwt.Snapshots[i][1] > ts.Snapshots[i][1] {\n\t\t\tlastSnap := ss.streamBucketLastSnapMarker[streamId][bucket]\n\t\t\t//store the prev snap marker in the lastSnapMarker map\n\t\t\tlastSnap.Snapshots[i][0] = prevSnap.Snapshots[i][0]\n\t\t\tlastSnap.Snapshots[i][1] = prevSnap.Snapshots[i][1]\n\t\t\tlastSnap.Vbuuids[i] = prevSnap.Vbuuids[i]\n\t\t\tlastSnap.Seqnos[i] = prevSnap.Seqnos[i]\n\n\t\t\t//store the new snap marker in hwt\n\t\t\tts.Snapshots[i][0] = hwt.Snapshots[i][0]\n\t\t\tts.Snapshots[i][1] = hwt.Snapshots[i][1]\n\t\t\tss.streamBucketNewTsReqdMap[streamId][bucket] = true\n\t\t\tif prevSnap.Seqnos[i] != prevSnap.Snapshots[i][1] {\n\t\t\t\tlogging.Warnf(\"StreamState::updateHWT Received Partial Last Snapshot in HWT \"+\n\t\t\t\t\t\"Bucket %v StreamId %v vbucket %v Snapshot %v-%v Seqno %v Vbuuid %v lastSnap %v-%v lastSnapSeqno %v\",\n\t\t\t\t\tbucket, streamId, i, hwt.Snapshots[i][0], hwt.Snapshots[i][1], hwt.Seqnos[i], ts.Vbuuids[i],\n\t\t\t\t\tprevSnap.Snapshots[i][0], prevSnap.Snapshots[i][1], prevSnap.Seqnos[i])\n\t\t\t\tpartialSnap = true\n\n\t\t\t}\n\n\t\t} else if hwt.Snapshots[i][1] < ts.Snapshots[i][1] {\n\t\t\t// Catch any out of order Snapshot. StreamReader should make sure that Snapshot is monotonic increasing\n\t\t\tlogging.Debugf(\"StreamState::updateHWT. Recieved a snapshot marker older than current hwt snapshot. \"+\n\t\t\t\t\"Bucket %v StreamId %v vbucket %v Current Snapshot %v-%v New Snapshot %v-%v\",\n\t\t\t\tbucket, streamId, i, ts.Snapshots[i][0], ts.Snapshots[i][1], hwt.Snapshots[i][0], hwt.Snapshots[i][1])\n\t\t}\n\t}\n\n\tif partialSnap {\n\t\tss.disableSnapAlignForPendingTs(streamId, bucket)\n\t}\n\n\tlogging.LazyTrace(func() string {\n\t\treturn fmt.Sprintf(\"StreamState::updateHWT HWT Updated : %v\", ts)\n\t})\n}", "title": "" }, { "docid": "aaaec7686ed3b8d75633762ca1fc03a4", "score": "0.42153773", "text": "func updateStats(failurePlan []executor.FailurePlan) {\n\thasGPS, hasBaro, hasAccel, hasCompass, hasGyro := false, false, false, false, false\n\tfor _, plan := range failurePlan {\n\t\tswitch plan.SensorFailure.SensorType {\n\t\tcase hinj.GPS:\n\t\t\thasGPS = true\n\t\tcase hinj.Barometer:\n\t\t\thasBaro = true\n\t\tcase hinj.Accelerometer:\n\t\t\thasAccel = true\n\t\tcase hinj.Compass:\n\t\t\thasCompass = true\n\t\tcase hinj.Gyroscope:\n\t\t\thasGyro = true\n\t\t}\n\t}\n\tif hasGPS {\n\t\tstatistics.unsafeFromGPS++\n\t}\n\tif hasBaro {\n\t\tstatistics.unsafeFromAccel++\n\t}\n\tif hasCompass {\n\t\tstatistics.unsafeFromCompass++\n\t}\n\tif hasAccel {\n\t\tstatistics.unsafeFromAccel++\n\t}\n\tif hasGyro {\n\t\tstatistics.unsafeFromGyro++\n\t}\n\tstatistics.totalUnsafe++\n}", "title": "" }, { "docid": "71f22a22c2519b23967f2f57960bf633", "score": "0.42066035", "text": "func (b *Boomer) RecordFailure(requestType, name string, responseTime int64, exception string) {\n\tvar runnerStats *requestStats\n\tswitch b.mode {\n\tcase DistributedWorkerMode:\n\t\trunnerStats = b.workerRunner.stats\n\tcase DistributedMasterMode:\n\t\trunnerStats = b.masterRunner.stats\n\tcase StandaloneMode:\n\t\trunnerStats = b.localRunner.stats\n\t}\n\trunnerStats.requestFailureChan <- &requestFailure{\n\t\trequestType: requestType,\n\t\tname: name,\n\t\tresponseTime: responseTime,\n\t\terrMsg: exception,\n\t}\n}", "title": "" }, { "docid": "20b3e620898fd12e103dccbd864f86c8", "score": "0.42057228", "text": "func (dsc *DefaultProducerStatsCollector) AddPutRecordsCalled(count int) {\n\tdsc.PutRecordsCalled.Inc(int64(count))\n}", "title": "" }, { "docid": "ad67b192624d7cec99d5bc3aecd56b08", "score": "0.42028582", "text": "func (r *runsAggregator) report(ctx context.Context, projects []string) error {\n\teg, ectx := errgroup.WithContext(ctx)\n\tvar pendingRunKeys []*datastore.Key\n\tvar activeRunKeys []*datastore.Key\n\tvar runStats *runStats\n\teg.Go(func() (err error) {\n\t\tq := datastore.NewQuery(common.RunKind).Eq(\"Status\", run.Status_PENDING)\n\t\tswitch pendingRunKeys, err = loadRunKeys(ectx, q, maxRuns+1); {\n\t\tcase err != nil:\n\t\t\treturn err\n\t\tcase len(pendingRunKeys) == maxRuns+1:\n\t\t\t// Outright refuse sending incomplete data.\n\t\t\tlogging.Errorf(ctx, \"FIXME: too many pending runs (>%d) to report aggregated metrics for\", maxRuns)\n\t\t\treturn errors.New(\"too many pending Runs\")\n\t\tdefault:\n\t\t\treturn nil\n\t\t}\n\t})\n\teg.Go(func() (err error) {\n\t\tq := datastore.NewQuery(common.RunKind).\n\t\t\tLt(\"Status\", run.Status_ENDED_MASK).\n\t\t\tGt(\"Status\", run.Status_PENDING)\n\t\tswitch activeRunKeys, err = loadRunKeys(ectx, q, maxRuns+1); {\n\t\tcase err != nil:\n\t\t\treturn err\n\t\tcase len(activeRunKeys) == maxRuns+1:\n\t\t\t// Outright refuse sending incomplete data.\n\t\t\tlogging.Errorf(ctx, \"FIXME: too many active runs (>%d) to report aggregated metrics for\", maxRuns)\n\t\t\treturn errors.New(\"too many active Runs\")\n\t\tdefault:\n\t\t\treturn nil\n\t\t}\n\t})\n\teg.Go(func() (err error) {\n\t\trunStats, err = initRunStats(ectx, projects)\n\t\treturn err\n\t})\n\tif err := eg.Wait(); err != nil {\n\t\treturn err\n\t}\n\n\teg, ectx = errgroup.WithContext(ctx)\n\tnow := clock.Now(ctx)\n\teg.Go(func() error {\n\t\treturn iterRuns(ectx, pendingRunKeys, maxRunsWorkingSet, func(r *run.Run) {\n\t\t\trunStats.addPending(r, now)\n\t\t})\n\t})\n\teg.Go(func() error {\n\t\treturn iterRuns(ectx, activeRunKeys, maxRunsWorkingSet, func(r *run.Run) {\n\t\t\trunStats.addActive(r, now)\n\t\t})\n\t})\n\tif err := eg.Wait(); err != nil {\n\t\treturn err\n\t}\n\trunStats.report(ctx)\n\treturn nil\n}", "title": "" }, { "docid": "314f5b18630ae7663ea871bd8b7379b1", "score": "0.4199505", "text": "func recordMetrics(s *shard, txn *transaction) {\n\n\t// DEPRECATED metrics, to be removed:\n\ttxCountTotal.Inc()\n\ttxMessagesTotal.Add(float64(txn.consumedCount))\n\n\ttxSecondsTotal.Add(txn.committedAt.Sub(txn.beganAt).Seconds())\n\ttxConsumeSecondsTotal.Add(txn.stalledAt.Sub(txn.beganAt).Seconds())\n\ttxStalledSecondsTotal.Add(txn.prepareBeganAt.Sub(txn.stalledAt).Seconds())\n\ttxFlushSecondsTotal.Add(txn.prepareDoneAt.Sub(txn.prepareBeganAt).Seconds())\n\ttxSyncSecondsTotal.Add(txn.committedAt.Sub(txn.prepareDoneAt).Seconds())\n\t// End DEPRECATED metrics.\n\n\tshardTxnTotal.WithLabelValues(s.FQN()).Inc()\n\tshardReadMsgsTotal.WithLabelValues(s.FQN()).Add(float64(txn.consumedCount))\n\tshardReadBytesTotal.WithLabelValues(s.FQN()).Add(float64(txn.consumedBytes))\n\tfor journal, source := range txn.checkpoint.Sources {\n\t\tshardReadHeadGauge.\n\t\t\tWithLabelValues(s.FQN(), journal.String()).\n\t\t\tSet(float64(source.ReadThrough))\n\t}\n\n\tvar (\n\t\tdurNotRunning = txn.beganAt.Sub(txn.prevPrepareDoneAt)\n\t\tdurConsuming = txn.stalledAt.Sub(txn.beganAt)\n\t\tdurStalled = txn.prepareBeganAt.Sub(txn.stalledAt)\n\t\tdurPreparing = txn.prepareDoneAt.Sub(txn.prepareBeganAt)\n\t\tdurCommitting = txn.committedAt.Sub(txn.prepareDoneAt)\n\t\tdurAcknowledging = txn.ackedAt.Sub(txn.committedAt)\n\t)\n\n\tlog.WithFields(log.Fields{\n\t\t\"id\": s.Spec().Id,\n\t\t\"10NotRunning\": durNotRunning,\n\t\t\"20Consuming\": durConsuming,\n\t\t\"30Stalled\": durStalled,\n\t\t\"40Prepare\": durPreparing,\n\t\t\"50Committing\": durCommitting,\n\t\t\"60Acknowledging\": durAcknowledging,\n\t\t\"messages\": txn.consumedCount,\n\t\t\"bytes\": txn.consumedBytes,\n\t}).Debug(\"transaction metrics\")\n\n\t// Phases which run synchronously within the transaction loop.\n\tshardTxnPhaseSecondsTotal.WithLabelValues(s.FQN(), \"10-not-running\", \"sync\").Add(durNotRunning.Seconds())\n\tshardTxnPhaseSecondsTotal.WithLabelValues(s.FQN(), \"20-consuming\", \"sync\").Add(durConsuming.Seconds())\n\tshardTxnPhaseSecondsTotal.WithLabelValues(s.FQN(), \"30-stalled\", \"sync\").Add(durStalled.Seconds())\n\tshardTxnPhaseSecondsTotal.WithLabelValues(s.FQN(), \"40-preparing\", \"sync\").Add(durPreparing.Seconds())\n\t// Phases which run asynchronously, in parallel with later transaction.\n\tshardTxnPhaseSecondsTotal.WithLabelValues(s.FQN(), \"50-committing\", \"async\").Add(durCommitting.Seconds())\n\tshardTxnPhaseSecondsTotal.WithLabelValues(s.FQN(), \"60-acknowledging\", \"async\").Add(durAcknowledging.Seconds())\n}", "title": "" }, { "docid": "8f34f80c808ad032673acbede2b76ade", "score": "0.41882357", "text": "func (self *PerfTimer) Record() {\n\tself.Stop()\n\tself.Vals = append(self.Vals, (float64)(self.cur))\n\tself.cur = 0\n}", "title": "" }, { "docid": "7c5b52213c42d5ddd703890ae8935392", "score": "0.41864315", "text": "func (h *Handler) GetUserJoiningFightLeaderBoard(c echo.Context) error {\n\n\tstart := time.Now()\n\n\tdb := h.DB.Clone()\n\tdefer db.Close()\n\n\tfight_id := c.Param(\"fight_id\")\n\n\tfight_user := []*model.FightUser{}\n\n\t_ = db.DB(config.NameDb).C(\"fight_user\").Find(bson.M{\n\t\t\"fight_id\": fight_id,\n\t}).Sort(\"-point\").All(&fight_user)\n\n\tfight_minitasks := []*model.FightMiniTask{}\n\t_ = db.DB(config.NameDb).C(\"fight_minitask\").Find(bson.M{\n\t\t\"fight_id\": fight_id,\n\t}).All(&fight_minitasks)\n\n\tusers := []*model.FightUserRank{}\n\tfor i := range fight_user {\n\n\t\tuser := model.User{}\n\t\tdb.DB(config.NameDb).C(\"users\").Find(bson.M{\n\t\t\t\"_id\": bson.ObjectIdHex(fight_user[i].UserID),\n\t\t}).One(&user)\n\n\t\t// Fight User Mini task\n\t\tfight_user_minitasks := []*model.FightUserMinitask{}\n\t\t_ = db.DB(config.NameDb).C(\"fight_user_minitask\").Find(bson.M{\n\t\t\t\"fight_id\": fight_id,\n\t\t\t\"user_id\": fight_user[i].UserID,\n\t\t}).All(&fight_user_minitasks)\n\n\t\tminitasks := []*model.FightUserMinitask{}\n\t\ttotal_tried := 0\n\t\tfor j:=range fight_minitasks {\n\t\t\tisFound := false\n\t\t\tminitask := &model.FightUserMinitask{}\n\t\t\tfor a:=range fight_user_minitasks {\n\t\t\t\tif fight_minitasks[j].Minitask_id == fight_user_minitasks[a].Minitask_id {\n\t\t\t\t\tisFound = true\n\t\t\t\t\tminitask = fight_user_minitasks[a]\n\t\t\t\t\tminitask.Point = GetMinitaskPoint(fight_minitasks[j].Minitask_id, h)\n\t\t\t\t\ttotal_tried += fight_user_minitasks[a].Tried\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !isFound {\n\t\t\t\tminitask = &model.FightUserMinitask{\n\t\t\t\t\tID: \"\",\n\t\t\t\t\tFight_id: fight_id,\n\t\t\t\t\tUser_id: fight_user[i].UserID,\n\t\t\t\t\tMinitask_id: \"\",\n\t\t\t\t\tStatus: \"tried\",\n\t\t\t\t\tTried: 0,\n\t\t\t\t\tPoint:\t\tGetMinitaskPoint(fight_minitasks[j].Minitask_id, h),\n\t\t\t\t\tStart_time: \"\",\n\t\t\t\t\tEnd_time: \"\",\n\t\t\t\t}\n\t\t\t}\n\t\t\tminitasks = append(minitasks, minitask)\n\t\t}\n\n\t\tvar coding_time int64 = -1\n\t\tif fight_user[i].IsUserStart {\n\t\t\tcoding_time = time.Since(fight_user[i].StartTime).Milliseconds()\n\t\t}\n\n\t\t// check if user complete or not\n\t\t// Get fight user mini task\n\t\tfight_user_minitask := []*model.FightUserMinitask{}\n\n\t\t_ = db.DB(config.NameDb).C(\"fight_user_minitask\").Find(bson.M{\n\t\t\t\"status\": bson.RegEx{\"done\", \"i\"},\n\t\t\t\"fight_id\": fight_id,\n\t\t\t\"user_id\": fight_user[i].UserID,\n\t\t}).All(&fight_user_minitask)\n\n\t\t// Get fight mini task\n\t\tfight_minitask := []*model.FightMiniTask{}\n\t\t_ = db.DB(config.NameDb).C(\"fight_minitask\").Find(bson.M{\n\t\t\t\"fight_id\": fight_id,\n\t\t}).All(&fight_minitask)\n\n\t\t// check if user done all task or not\n\t\tisUserDoneTasks := len(fight_minitask) == len(fight_user_minitask)\n\n\t\tuser_rank := &model.FightUserRank{\n\t\t\tID: user.ID,\n\t\t\tRank: i,\n\t\t\tEmail: user.Email,\n\t\t\tUserInfo:\t user,\n\t\t\tMiniTasks:\t minitasks,\n\t\t\tPoint: fight_user[i].Point,\n\t\t\tTried: total_tried,\n\t\t\tCodingTime: coding_time,\n\t\t\tIsDone:\t\t isUserDoneTasks,\n\t\t\tFinishedTime: fight_user[i].FinishedTime,\n\t\t}\n\n\t\t//if fight_user[i].IsUserStart {\n\t\t//\tfmt.Println(\"\")\n\t\t//\tduration := time.Since(fight_user[i].StartTime)\n\t\t//\tfmt.Println(user_rank.UserInfo.ID)\n\t\t//\tfmt.Println(user_rank.UserInfo.Email)\n\t\t//\tfmt.Println(fight_user[i].StartTime)\n\t\t//\tfmt.Println(duration.Nanoseconds())\n\t\t//\tfmt.Println(\"\")\n\t\t//}\n\n\t\tusers = append(users, user_rank)\n\t}\n\n\telapsed := time.Since(start)\n\tfmt.Printf(\"FUNC TOOKS %s\", elapsed)\n\n\t//users = SortLeaderBoard(users, fight_id, h)\n\n\treturn c.JSON(http.StatusOK, users)\n}", "title": "" }, { "docid": "fc53d684ea79a53915b72de957949533", "score": "0.41829893", "text": "func (c *combat) takeTurn() int {\n\tif c.seen[c.hash()] {\n\t\treturn 1\n\t}\n\tif len(c.d1) == 0 {\n\t\treturn 2\n\t}\n\tif len(c.d2) == 0 {\n\t\treturn 1\n\t}\n\tc.seen[c.hash()] = true\n\ta, b := c.d1[0], c.d2[0]\n\tc.d1 = c.d1[1:]\n\tc.d2 = c.d2[1:]\n\troundWinner := 0\n\tif a <= len(c.d1) && b <= len(c.d2) {\n\t\t//build and use the result of the new game.\n\t\tsubC := combat{seen: make(map[string]bool)}\n\t\tfor _, card := range c.d1[:a] {\n\t\t\tsubC.d1 = append(subC.d1, card)\n\t\t}\n\t\tfor _, card := range c.d2[:b] {\n\t\t\tsubC.d2 = append(subC.d2, card)\n\t\t}\n\t\troundWinner = subC.winner()\n\t} else if a > b {\n\t\troundWinner = 1\n\t} else if b > a {\n\t\troundWinner = 2\n\t} else {\n\t\tlog.Fatalf(\"there was a tie\")\n\t}\n\tif roundWinner == 1 {\n\t\tc.d1 = append(c.d1, a, b)\n\t} else if roundWinner == 2 {\n\t\tc.d2 = append(c.d2, b, a)\n\t} else {\n\t\tlog.Fatal(\"each round must have a winner\")\n\t}\n\treturn 0\n}", "title": "" }, { "docid": "00168a7bdd076c61fdd7e27589070f6c", "score": "0.41768366", "text": "func (o *CMFTurntableWin) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_turntable_win provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfTurntableWinColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLCMFTurntableWinUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcmfTurntableWinUpsertCacheMut.RLock()\n\tcache, cached := cmfTurntableWinUpsertCache[key]\n\tcmfTurntableWinUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcmfTurntableWinAllColumns,\n\t\t\tcmfTurntableWinColumnsWithDefault,\n\t\t\tcmfTurntableWinColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcmfTurntableWinAllColumns,\n\t\t\tcmfTurntableWinPrimaryKeyColumns,\n\t\t)\n\n\t\tif !updateColumns.IsNone() && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert cmf_turntable_win, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"`cmf_turntable_win`\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `cmf_turntable_win` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfTurntableWinType, cmfTurntableWinMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(cmfTurntableWinType, cmfTurntableWinMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for cmf_turntable_win\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint64(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfTurntableWinMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(cmfTurntableWinType, cmfTurntableWinMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for cmf_turntable_win\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_turntable_win\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfTurntableWinUpsertCacheMut.Lock()\n\t\tcmfTurntableWinUpsertCache[key] = cache\n\t\tcmfTurntableWinUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "title": "" }, { "docid": "ed773a19911ba562ba59b6b803a5ed6e", "score": "0.41727957", "text": "func (sm *SelfMon) AddMetrics(t string, id string, fields map[string]interface{}, devtags, statustags map[string]string) {\n\tif !sm.IsInitialized() {\n\t\treturn\n\t}\n\tsm.mutex.Lock()\n\tdefer sm.mutex.Unlock()\n\t// Selfmon tags\n\ttagMap := make(map[string]string)\n\tfor k, v := range sm.TagMap {\n\t\ttagMap[k] = v\n\t}\n\t// device user configured extra tags (only if inherited)\n\tif sm.cfg.InheritDeviceTags {\n\t\tfor k, v := range devtags {\n\t\t\ttagMap[k] = v\n\t\t}\n\t}\n\t// status tags for device\n\tfor k, v := range statustags {\n\t\ttagMap[k] = v\n\t}\n\n\tswitch t {\n\tcase \"measurement\":\n\t\ttagMap[\"meas_name\"] = id\n\tcase \"device\":\n\t\ttagMap[\"device\"] = id\n\t}\n\n\ttagMap[\"type\"] = t\n\n\tnow := time.Now()\n\tpt, err := client.NewPoint(\n\t\tsm.RtMeasName,\n\t\ttagMap,\n\t\tfields,\n\t\tnow)\n\tif err != nil {\n\t\tlog.Warnf(\"Error on compute Stats data Point %+v for %s : %s: Error:%s\", fields, t, id, err)\n\t\treturn\n\t}\n\n\t(*sm.bps).AddPoint(pt)\n}", "title": "" }, { "docid": "edea991c6ea0f6f82fa2d51a4a64b689", "score": "0.41592747", "text": "func applyMove(s *State, race Race, target Coordinates, count uint8, winThreshold float64) [2]PotentialState {\n\n\tendCell := s.Grid[target]\n\n\tif endCell.IsEmpty() || race == endCell.Race {\n\t\t// nobody on there, or same race as ours, no battle and we can just increase the count\n\n\t\t// Update the cells\n\t\ts.SetCell(target, race, endCell.Count+count)\n\t\treturn [2]PotentialState{{State: s, P: 1}}\n\t}\n\n\t// Fight with the enemy or neutral\n\t// We use a float here for later computation\n\tvar isNeutral float64 = 0\n\tif endCell.Race == Neutral {\n\t\tisNeutral = 1\n\t}\n\n\tP := WinProbability(count, endCell.Count, isNeutral == 1)\n\n\tif P >= winThreshold {\n\t\t// Consider it a win situation given the probability\n\t\tendCount := uint8(P*float64(count) + isNeutral*float64(endCell.Count)*P)\n\t\ts.SetCell(target, race, endCount)\n\t\treturn [2]PotentialState{{State: s, P: 1}}\n\t} else if P < 1-winThreshold {\n\t\t// Consider it a lose situation given the probability\n\t\tendCount := uint8((1 - P) * float64(endCell.Count))\n\t\ts.SetCell(target, endCell.Race, endCount)\n\t\treturn [2]PotentialState{{State: s, P: 1}}\n\t}\n\n\twinState := s\n\n\twinState.SetCell(\n\t\ttarget,\n\t\trace,\n\t\t// each ally has probability P to survive. Against neutral, we have a probability P to convert them\n\t\tuint8(P*float64(count)+isNeutral*float64(endCell.Count)*P),\n\t)\n\n\tlossState := s.Copy(false)\n\tlossState.SetCell(\n\t\ttarget,\n\t\tendCell.Race,\n\t\t// each enemy has probability 1-P to survive\n\t\tuint8((1-P)*float64(endCell.Count)),\n\t)\n\n\treturn [2]PotentialState{\n\t\t{State: winState, P: P},\n\t\t{State: lossState, P: 1 - P},\n\t}\n}", "title": "" } ]
5b69fa4019b933c1c1a6092b1dc78966
WriteBlock serializes a block into the database, header and body separately.
[ { "docid": "8ac1703cf5f5a8fcc8da14e4dcdf3a20", "score": "0.8202536", "text": "func WriteBlock(db database.KeyValueWriter, block *model.Block) {\n\tWriteBody(db, block.Hash(), block.NumberU64(), block.Body())\n\tWriteHeader(db, block.Header())\n\tlog.Debugf(\"DB WriteBlock. number:%d, hash:%x, txs:%d, uncles:%d\", block.NumberU64(), block.Hash(), len(block.Body().Transactions), len(block.Body().Uncles))\n}", "title": "" } ]
[ { "docid": "b027e846e2a53e877bff6b2f7d42cca7", "score": "0.75927466", "text": "func (blockStore *BlockStore) WriteBlock(block *types.Block) error {\n\tbatch := blockStore.store.NewBatch()\n\terr := blockStore.writeBlockByBatch(batch, block)\n\tif err != nil {\n\t\tbatch.Reset()\n\t\treturn err\n\t}\n\terr = batch.Write()\n\tif err != nil {\n\t\tlog.Error(\"failed to commit block %x to database, as: %v\", block.HeaderHash, err)\n\t\treturn err\n\t}\n\n\t// update current block\n\tblockStore.recordCurrentBlock(block)\n\treturn nil\n}", "title": "" }, { "docid": "f22cbbfeaf6d7b7688dda747a47e4fa3", "score": "0.75409096", "text": "func WriteBlock(b world.Block) map[string]interface{} {\n\tname, properties := b.EncodeBlock()\n\treturn map[string]interface{}{\n\t\t\"name\": name,\n\t\t\"states\": properties,\n\t\t\"version\": chunk.CurrentBlockVersion,\n\t}\n}", "title": "" }, { "docid": "b9490cae50269849df7939d1ed32e7d0", "score": "0.7442108", "text": "func (w *Writer) writeBlock() {\n\t_, w.err = w.w.Write(w.buf[w.written:])\n\tw.i = 0\n\tw.j = headerSize\n\tw.written = 0\n}", "title": "" }, { "docid": "2a37b3df4059ddaa7890f7be9fd21906", "score": "0.705803", "text": "func writeBlock(m map[string]interface{}, b world.Block) {\n\tm[\"name\"], m[\"states\"] = b.EncodeBlock()\n\tm[\"version\"] = chunk.CurrentBlockVersion\n}", "title": "" }, { "docid": "308d38d35d30153e2adb836fa7cc43ee", "score": "0.7048143", "text": "func (bb *BytesBuffer) WriteBlock(key, val []byte, md *meta.Meta) (*meta.Meta, error) {\n\t_, err := bb.Buffer.Write(val)\n\treturn md, err\n}", "title": "" }, { "docid": "3531f63fe01e41a198c0e3548820da75", "score": "0.7027761", "text": "func (t *tsmWriter) WriteBlock(key []byte, minTime, maxTime int64, block []byte) error {\n\tif len(key) > maxKeyLength {\n\t\treturn ErrMaxKeyLengthExceeded\n\t}\n\n\t// Nothing to write\n\tif len(block) == 0 {\n\t\treturn nil\n\t}\n\n\tblockType, err := BlockType(block)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Write header only after we have some data to write.\n\tif t.n == 0 {\n\t\tif err := t.writeHeader(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvar checksum [crc32.Size]byte\n\tbinary.BigEndian.PutUint32(checksum[:], crc32.ChecksumIEEE(block))\n\n\t_, err = t.w.Write(checksum[:])\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tn, err := t.w.Write(block)\n\tif err != nil {\n\t\treturn err\n\t}\n\tn += len(checksum)\n\n\t// Record this block in index\n\tt.index.Add(key, blockType, minTime, maxTime, t.n, uint32(n))\n\n\t// Increment file position pointer (checksum + block len)\n\tt.n += int64(n)\n\n\tif len(t.index.Entries(key)) >= maxIndexEntries {\n\t\treturn ErrMaxBlocksExceeded\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "857e90d306c85f1e1bf14cd1af11eacb", "score": "0.69470906", "text": "func (this *BlockStore) SaveBlock(block *types.Block) error {\n\tif this.enableCache {\n\t\tthis.cache.AddBlock(block)\n\t}\n\tblockHeight := block.Header.Height\n\terr := this.SaveHeader(block) // head+txhash set\n\tif err != nil {\n\t\treturn fmt.Errorf(\"SaveHeader error %s\", err)\n\t}\n\tpubStrs := make([]string, 0, 3)\n\n\tfor _, v := range block.Transactions {\n\t\tif v.TxType == types.AuthX { // The current star nodes defined by Genesis will generate transactions.\n\t\t\tpubStrs = append(pubStrs, hex.EncodeToString(v.TxData.NodePub))\n\t\t}\n\t\terr = this.SaveTransaction(v, blockHeight)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"SaveTransaction block height %d tx %s err %s\", blockHeight, v.Hash().String(), err)\n\t\t}\n\t}\n\tnode.PushStars(pubStrs)\n\tthis.saveSigData(block.Sigs, blockHeight)\n\treturn nil\n}", "title": "" }, { "docid": "ebbca5592386de6d2826d68e2d1128b8", "score": "0.68366987", "text": "func (g *generator) WriteBlock(output io.Writer, round uint64) {\n\tif round != g.round {\n\t\tfmt.Printf(\"Generator only supports sequential block access. Expected %d but received request for %d.\", g.round, round)\n\t}\n\n\t// Generate the transactions\n\ttransactions := make([]types.SignedTxnInBlock, 0, g.config.TxnPerBlock)\n\tfor i := uint64(0); i < g.config.TxnPerBlock; i++ {\n\t\ttxn, err := g.generateTransaction(g.round, i)\n\t\tstxnib := types.SignedTxnInBlock{\n\t\t\tSignedTxnWithAD: txn,\n\t\t\tHasGenesisID: true,\n\t\t\tHasGenesisHash: true,\n\t\t}\n\t\tif err != nil {\n\t\t\tpanic(fmt.Sprintf(\"failed to generate transaction: %v\\n\", err))\n\t\t}\n\t\ttransactions = append(transactions, stxnib)\n\t}\n\n\tblock := types.Block{\n\t\tBlockHeader: types.BlockHeader{\n\t\t\tRound: types.Round(g.round),\n\t\t\tBranch: types.BlockHash{},\n\t\t\tSeed: types.Seed{},\n\t\t\tTxnRoot: types.Digest{},\n\t\t\tTimeStamp: g.timestamp,\n\t\t\tGenesisID: g.genesisID,\n\t\t\tGenesisHash: g.genesisHash,\n\t\t\tRewardsState: types.RewardsState{\n\t\t\t\tFeeSink: g.feeSink,\n\t\t\t\tRewardsPool: g.rewardsPool,\n\t\t\t\tRewardsLevel: 0,\n\t\t\t\tRewardsRate: 0,\n\t\t\t\tRewardsResidue: 0,\n\t\t\t\tRewardsRecalculationRound: 0,\n\t\t\t},\n\t\t\tUpgradeState: types.UpgradeState{\n\t\t\t\tCurrentProtocol: g.protocol,\n\t\t\t},\n\t\t\tUpgradeVote: types.UpgradeVote{},\n\t\t\tTxnCounter: g.txnCounter,\n\t\t\tCompactCert: nil,\n\t\t},\n\t\tPayset: types.Payset(transactions),\n\t}\n\n\tcert := types.EncodedBlockCert{\n\t\tBlock: block,\n\t\tCertificate: types.Certificate{},\n\t}\n\n\tg.txnCounter += g.config.TxnPerBlock\n\tg.timestamp += consensusTimeMilli\n\tg.round++\n\n\tfmt.Println(g.txnCounter)\n\toutput.Write(msgpack.Encode(cert))\n}", "title": "" }, { "docid": "3258b6f58da8cb40a813c83ff5096691", "score": "0.6667316", "text": "func (rasterBand RasterBand) WriteBlock(xOff, yOff int, dataPtr unsafe.Pointer) error {\n\treturn C.GDALWriteBlock(rasterBand.cval, C.int(xOff), C.int(yOff), dataPtr).Err()\n}", "title": "" }, { "docid": "b00a53c56d36339e15be9f10f3f02b34", "score": "0.66576725", "text": "func (w *EncWriter) writeBlock() error {\n\tvar nonce [24]byte\n\t_, err := io.ReadFull(rand.Reader, nonce[:])\n\tif err != nil {\n\t\tpanic(\"could not read entropy for encryption\")\n\t}\n\n\tencryptedData := box.Seal(nil, w.buf, &nonce, &w.peersPublicKey, &w.secretKey)\n\tw.buf = nil\n\n\t_, err = w.out.Write(nonce[:])\n\tif err != nil {\n\t\treturn err\n\t}\n\tblockSize := uint64(len(encryptedData))\n\terr = binary.Write(w.out, binary.LittleEndian, blockSize)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = w.out.Write(encryptedData)\n\treturn err\n}", "title": "" }, { "docid": "ddc9d3b532d096c0326f6453b83e8a67", "score": "0.6632464", "text": "func handleWriteBlock(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tvar msg Document\n\n\tdecoder := json.NewDecoder(r.Body)\n\tif err := decoder.Decode(&msg); err != nil {\n\t\trespondWithJSON(w, r, http.StatusBadRequest, r.Body)\n\t\treturn\n\t}\n\tdefer r.Body.Close()\n\n\tmutex.Lock()\n\tvar prevBlock *Block\n\tif len(Blockchain) > 0 {\n\t\tprevBlock = &Blockchain[len(Blockchain)-1]\n\n\t}\n\tnewBlock, err := generateBlock(prevBlock, msg)\n\tif err != nil {\n\t\trespondWithJSON(w, r, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\n\tif len(Blockchain) == 0 || isBlockValid(newBlock, *prevBlock) {\n\t\tBlockchain = append(Blockchain, newBlock)\n\t}\n\tmutex.Unlock()\n\n\trespondWithJSON(w, r, http.StatusCreated, newBlock)\n\tgo publishBlockchain()\n}", "title": "" }, { "docid": "bdd7ae9e5875c6ef5ccb9f4884854116", "score": "0.66184896", "text": "func (b *InMemBlockStorage) WriteBlock(index uint, buffer *bytes.Buffer) (n int, err error) {\n\tif b.blockLen <= int(index) {\n\t\treturn 0, io.EOF\n\t}\n\n\tblock := &b.blocks[int(index)]\n\tminSize := buffer.Len()\n\tif minSize > b.blockSize {\n\t\tminSize = b.blockSize\n\t}\n\n\tnCopied := copy(block.data, buffer.Bytes()[0:minSize])\n\n\treturn nCopied, nil\n}", "title": "" }, { "docid": "66024238820f4356c9a1500ff20bc959", "score": "0.653599", "text": "func (bs *BlockStore) SaveBlock(block *types.Block, blockParts *types.PartSet, seenCommit *types.Commit, receipts *types.Receipts, txsResult *types.TxsResult) {\n\tif block == nil {\n\t\tcmn.PanicSanity(\"BlockStore can only save a non-nil block\")\n\t}\n\n\theight := block.Height\n\tif height > types.BlockHeightZero {\n\t\tif g, w := height, bs.Height()+1; g != w {\n\t\t\tcmn.PanicSanity(cmn.Fmt(\"BlockStore can only save contiguous blocks. Wanted %v, got %v\", w, g))\n\t\t}\n\t}\n\n\tif !blockParts.IsComplete() {\n\t\tcmn.PanicSanity(cmn.Fmt(\"BlockStore can only save complete block part sets\"))\n\t}\n\n\tbsBatch := bs.db.NewBatch()\n\t// Save block parts\n\tfor i := 0; i < blockParts.Total(); i++ {\n\t\tpart := blockParts.GetPart(i)\n\t\tbs.saveBlockPart(height, i, part, bsBatch)\n\t}\n\n\t// Save block commit (duplicate and separate from the Block)\n\tif height > types.BlockHeightZero {\n\t\tblockCommitBytes := ser.MustEncodeToBytes(block.LastCommit)\n\t\tbsBatch.Set(calcBlockCommitKey(height-1), blockCommitBytes)\n\t}\n\n\t// Save seen commit (seen +2/3 precommits for block)\n\t// NOTE: we can delete this at a later height\n\tif seenCommit != nil || height > types.BlockHeightZero {\n\t\tseenCommitBytes := ser.MustEncodeToBytes(seenCommit)\n\t\tbsBatch.Set(calcSeenCommitKey(height), seenCommitBytes)\n\t}\n\n\tvar wg sync.WaitGroup\n\twg.Add(3)\n\tgo func() {\n\t\t// Save block Receipts\n\t\tbs.saveReceipts(height, receipts)\n\t\twg.Done()\n\t}()\n\n\tgo func() {\n\t\t// Save process result of transactions\n\t\tbs.saveTxsResult(height, txsResult)\n\t\twg.Done()\n\t}()\n\n\tgo func() {\n\t\tif bs.crossState != nil {\n\t\t\t// Save index\n\t\t\tbs.crossState.SaveTxEntry(block, txsResult)\n\t\t\t// Save specialtx\n\t\t\tbs.crossState.AddSpecialTx(txsResult.SpecialTxs())\n\t\t\t// flush\n\t\t\tbs.crossState.Sync()\n\t\t}\n\t\twg.Done()\n\t}()\n\n\twg.Wait()\n\n\t// Save block meta\n\tblockMeta := types.NewBlockMeta(block, blockParts)\n\tmetaBytes := ser.MustEncodeToBytes(blockMeta)\n\tbsBatch.Set(calcBlockMetaKey(height), metaBytes)\n\tbsBatch.Set(calcBlockHashKey(block.Hash()), metaBytes)\n\n\t// Commit block store db batch\n\tbsBatch.Commit()\n\n\t// Save new BlockStoreStateJSON descriptor\n\tBlockStoreStateJSON{Height: height}.Save(bs.db)\n\n\t// Done!\n\tbs.mtx.Lock()\n\tbs.height = height\n\tbs.mtx.Unlock()\n\n\t// Flush\n\tbs.db.SetSync(nil, nil)\n}", "title": "" }, { "docid": "94a63bccd8063ed73144a22964eb9a9e", "score": "0.6466647", "text": "func (bs *BlockStore) SaveBlock(block *Block) {\n\tif block == nil {\n\t\tpanic(\"BlockStore can only save a non-nil block\")\n\t}\n\n\tbz, err := vbs.Marshal(block)\n\tif err != nil {\n\t\tpanic(\"vbs.Marshal() failed\")\n\t}\n\n\tkey := blockKey(block.Height, block.Hash())\n\tbs.kvdb.Put(key, bz)\n\n\tif block.Height == atomic.LoadInt64(&bs.height) + 1 {\n\t\theight := atomic.AddInt64(&bs.height, 1)\n\t\t_BlockState{Height:height}.Save(bs.kvdb)\n\t}\n}", "title": "" }, { "docid": "0327babbaafd8dd6d26f4967e56f77be", "score": "0.6451328", "text": "func (dao *blockDAO) putBlock(blk *Block) error {\n\theight := utils.Uint64ToBytes(blk.Height())\n\tserialized, err := blk.Serialize()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to serialize block\")\n\t}\n\thash := blk.HashBlock()\n\tif err = dao.kvstore.PutIfNotExists(blockNS, hash[:], serialized); err != nil {\n\t\treturn errors.Wrap(err, \"failed to put block\")\n\t}\n\thashKey := append(hashPrefix, hash[:]...)\n\tif err = dao.kvstore.Put(blockHashHeightMappingNS, hashKey, height); err != nil {\n\t\treturn errors.Wrap(err, \"failed to put hash -> height mapping\")\n\t}\n\theightKey := append(heightPrefix, height...)\n\tif err = dao.kvstore.Put(blockHashHeightMappingNS, heightKey, hash[:]); err != nil {\n\t\treturn errors.Wrap(err, \"failed to put height -> hash mapping\")\n\t}\n\tvalue, err := dao.kvstore.Get(blockNS, topHeightKey)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to get top height\")\n\t}\n\ttopHeight := common.MachineEndian.Uint64(value)\n\tif blk.Height() > topHeight {\n\t\tif err = dao.kvstore.Put(blockNS, topHeightKey, height); err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to put top height\")\n\t\t}\n\t}\n\t// map Tx hash to block hash\n\tfor _, tx := range blk.Tranxs {\n\t\ttxHash := tx.Hash()\n\t\thashKey := append(txPrefix, txHash[:]...)\n\t\tif err = dao.kvstore.Put(blockTxBlockMappingNS, hashKey, hash[:]); err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to put tx hash %x\", txHash)\n\t\t}\n\t}\n\n\tfor _, transfer := range blk.Transfers {\n\t\ttransferHash := transfer.Hash()\n\n\t\t// get transfers count for sender\n\t\tsenderTransferCount, err := dao.getTransferCountBySenderAddress(transfer.Sender)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"for sender %x\", transfer.Sender)\n\t\t}\n\n\t\t// put new transfer to sender\n\t\tsenderKey := append(transferFromPrefix, transfer.Sender...)\n\t\tsenderKey = append(senderKey, utils.Uint64ToBytes(senderTransferCount)...)\n\t\tif err = dao.kvstore.PutIfNotExists(blockAddressTransferMappingNS, senderKey, transferHash[:]); err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to put transfer hash %x for sender %x\",\n\t\t\t\ttransfer.Hash(), transfer.Sender)\n\t\t}\n\n\t\t// update sender transfers count\n\t\tsenderTransferCountKey := append(transferFromPrefix, transfer.Sender...)\n\t\tif err = dao.kvstore.Put(blockAddressTransferCountMappingNS, senderTransferCountKey,\n\t\t\tutils.Uint64ToBytes(senderTransferCount+1)); err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to bump transfer count %x for sender %x\",\n\t\t\t\ttransfer.Hash(), transfer.Sender)\n\t\t}\n\n\t\t// get transfers count for recipient\n\t\trecipientTransferCount, err := dao.getTransferCountByRecipientAddress(transfer.Recipient)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"for recipient %x\", transfer.Recipient)\n\t\t}\n\n\t\t// put new transfer to recipient\n\t\trecipientKey := append(transferToPrefix, transfer.Recipient...)\n\t\trecipientKey = append(recipientKey, utils.Uint64ToBytes(recipientTransferCount)...)\n\t\tif err = dao.kvstore.PutIfNotExists(blockAddressTransferMappingNS, recipientKey, transferHash[:]); err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to put transfer hash %x for recipient %x\",\n\t\t\t\ttransfer.Hash(), transfer.Recipient)\n\t\t}\n\n\t\t// update recipient transfers count\n\t\trecipientTransferCountKey := append(transferToPrefix, transfer.Recipient...)\n\t\tif err = dao.kvstore.Put(blockAddressTransferCountMappingNS, recipientTransferCountKey,\n\t\t\tutils.Uint64ToBytes(recipientTransferCount+1)); err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to bump transfer count %x for recipient %x\",\n\t\t\t\ttransfer.Hash(), transfer.Recipient)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "cb44411247a66380bc865b51091f17b4", "score": "0.64349633", "text": "func SerializeBlock(b *Block) []byte{\n\tvar result bytes.Buffer\n\tencoder := gob.NewEncoder(&result)\n\n\terr := encoder.Encode(b)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\treturn result.Bytes()\n}", "title": "" }, { "docid": "f43571528204f5982cc1ac619466f6b6", "score": "0.64292717", "text": "func (blockStore *BlockStore) writeBlockByBatch(batch dbstore.Batch, block *types.Block) error {\n\t// write block\n\tlog.Info(\"Start writing block %x to database.\", block.HeaderHash)\n\tblockByte, err := encodeEntity(block)\n\tif err != nil {\n\t\tlog.Error(\"Failed to encode block %v to byte, as: %v \", block, err)\n\t\treturn fmt.Errorf(\"Failed to encode block %v to byte, as: %v \", block, err)\n\t}\n\n\tblockHash := common.HeaderHash(block)\n\tif !bytes.Equal(blockHash[:], block.HeaderHash[:]) {\n\t\tlog.Error(\"Invalid block, as block's hash %x is not same to expected %x \", blockHash, block.HeaderHash)\n\t\treturn fmt.Errorf(\"Invalid block, as block's hash %x is not same to expected %x \", blockHash, block.HeaderHash)\n\t}\n\terr = batch.Put(append(blockPrefix, common.HashToBytes(blockHash)...), blockByte)\n\tif err != nil {\n\t\tlog.Error(\"Failed to write block %x to database, as: %v \", blockHash, err)\n\t\treturn fmt.Errorf(\"Failed to write block %x to database, as: %v \", blockHash, err)\n\t}\n\n\t// write block height and hash mapping\n\terr = batch.Put(append(blockHeightPrefix, encodeBlockHeight(block.Header.Height)...), common.HashToBytes(blockHash))\n\tif err != nil {\n\t\tlog.Error(\"Failed to record the mapping between block and height\")\n\t\treturn fmt.Errorf(\"Failed to record the mapping between block and height \")\n\t}\n\n\t// write tx lookup index\n\terr = blockStore.writeTxLookUpIndex(batch, blockHash, block.Header.Height, block.Transactions)\n\tif err != nil {\n\t\tlog.Error(\"Failed to record the tx lookup index from block %x\", blockHash)\n\t\treturn fmt.Errorf(\"Failed to record the tx lookup index from block %x \", blockHash)\n\t}\n\n\t// update latest block\n\terr = batch.Put([]byte(latestBlockKey), common.HashToBytes(blockHash))\n\tif err != nil {\n\t\tlog.Warn(\"Failed to record latest block, as: %v. we will still use the previous latest block as current latest block \", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5c29f468751fade5ef1ccaf5bef59485", "score": "0.636466", "text": "func (a *ApiDB) CreateBlock(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\tp := MODELS.BLOCKS{}\n\terr := json.NewDecoder(r.Body).Decode(&p)\n\tif err != nil {\n\t\tio.WriteString(w, `{\"message\": \"wrong format!\"}`+err.Error())\n\t\treturn\n\t}\n\n\tresult, _ := BUSINESS.CreateBlock(a.Db, p)\n\tif result {\n\t\tio.WriteString(w, `{ \"status\": 200,\n \t\t\t\t\t\t\"message\": \"Create block success\",\n \t\t\t\t\t\t\t\"data\": {\n \t\t\t\t\t\t\"status\": 1\n \t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}`)\n\t} else {\n\t\tio.WriteString(w, `{ \"message\": \"Can’t create block \"}`)\n\t}\n}", "title": "" }, { "docid": "0357460f4204056b96dd14c676bbce7a", "score": "0.63646406", "text": "func writeBlock(b wire.MsgBlock, tipnum int, f *os.File,\n\tbatchan chan *leveldb.Batch, wg *sync.WaitGroup) error {\n\n\t//s is the string that gets written to .txos\n\t//var s string\n\n\tblockBatch := new(leveldb.Batch)\n\n\tfor blockindex, tx := range b.Transactions {\n\t\tfor _, in := range tx.TxIn {\n\t\t\tif blockindex > 0 { // skip coinbase \"spend\"\n\t\t\t\t//hashing because blockbatch wants a byte slice\n\t\t\t\topString := in.PreviousOutPoint.String()\n\t\t\t\th := simutil.HashFromString(opString)\n\t\t\t\t//s += \"-\" + opString + \"\\n\"\n\t\t\t\t//fmt.Println(\"just txin:\", in.PreviousOutPoint)\n\t\t\t\t//fmt.Println(\"hight:\", tipnum)\n\t\t\t\tblockBatch.Put(h[:], simutil.U32tB(uint32(tipnum)))\n\t\t\t}\n\t\t}\n\n\t\t// creates all txos up to index indicated\n\t\t//s += \"+\" + wire.OutPoint{tx.TxHash(), uint32(len(tx.TxOut))}.String()\n\n\t\t//for i, out := range tx.TxOut {\n\t\t//\tif isUnspendable(out) {\n\t\t//\t\ts += \"z\" + fmt.Sprintf(\"%d\", i)\n\t\t//\t}\n\t\t//}\n\t\t//s += \"\\n\"\n\t}\n\n\t//fmt.Printf(\"--- sending off %d dels at tipnum %d\\n\", batch.Len(), tipnum)\n\twg.Add(1)\n\t//sent to dbworker to be written to ttldb asynchronously\n\tbatchan <- blockBatch\n\n\t//s += fmt.Sprintf(\"h: %d\\n\", tipnum)\n\t//write to the .txos file\n\t//_, err := f.WriteAt(simutil.U32tB(uint32(tipnum))[:], 0)\n\t_, err := f.WriteAt(simutil.U32tB(uint32(tipnum)), 0)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "4b1f1d1a86e5f71d8d009b4ed1d77340", "score": "0.6339767", "text": "func (r *Receiver) WriteFileBlock(ctx context.Context, req *WriteFileBlockRequest) (*EmptyResponse, error) {\n\t// TODO: We should cache the filedescriptor and don't reopen it between each call.\n\tfh, err := os.OpenFile(req.GetFilePath(), os.O_WRONLY|os.O_CREATE, 0644)\n\tif err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"Error opening %s: %s\\n\", req.GetFilePath(), err.Error())\n\t\treturn &EmptyResponse{}, err\n\t}\n\n\tdefer fh.Close()\n\n\t_, err = fh.WriteAt(req.GetData(), req.GetOffset())\n\tif err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"Error writing %d bytes to %s @ offset %d: %s\\n\", req.GetSize(), req.GetFilePath(), req.GetOffset(), err.Error())\n\t\treturn &EmptyResponse{}, err\n\t}\n\n\treturn &EmptyResponse{}, nil\n}", "title": "" }, { "docid": "de44476e141ed9029b32682e88c219ff", "score": "0.63388", "text": "func (r *Renderer) renderBlock(node *ast.Block, w *render.BufferedWriter) error {\n\tr.writeCommentNode(w, false, \"\", node.ObjComment)\n\tw.Printf(\"{\\n\")\n\tfor _, n := range node.Nodes {\n\t\tif err := r.renderNode(n, w); err != nil {\n\t\t\treturn fmt.Errorf(\"unable to render node in block: %w\", err)\n\t\t}\n\t}\n\n\tw.Printf(\"}\\n\")\n\n\treturn nil\n}", "title": "" }, { "docid": "e93b53dd477b6ffe4051d511cf42400d", "score": "0.63376784", "text": "func (chain *BlockChain) StoreBlock(block *types.Block) error {\n\n\thash := block.BlockHash()\n\tdata, err := block.Marshal()\n\tif err != nil {\n\t\treturn err\n\t}\n\tchain.db.Put(BlockKey(hash), data)\n\treturn nil\n}", "title": "" }, { "docid": "7b501605a90164adf811e1f6238e81a0", "score": "0.63052905", "text": "func (tt *Block) WriteHeader(out io.Writer) (int, error) {\n outCount := 0\n\n // version bytes\n err := binary.Write(out, binary.LittleEndian, uint32(tt.version))\n if err != nil {\n return outCount, err\n }\n outCount += 4\n // previous block\n n, err := out.Write(tt.prevBlock.Bytes())\n outCount += n\n if err != nil {\n return outCount, err\n }\n // merkle root: from cache or computed\n var merkleRoot Hash\n if tt.merkleRoot != nil {\n merkleRoot = *tt.merkleRoot\n } else {\n merkleRoot = tt.MerkleRoot()\n }\n n, err = out.Write(merkleRoot.Bytes())\n outCount += n\n if err != nil {\n return outCount, err\n }\n // timestamp\n err = binary.Write(out, binary.LittleEndian, uint32(tt.timestamp.Unix()))\n if err != nil {\n return outCount, err\n }\n outCount += 4\n // target bits\n err = binary.Write(out, binary.LittleEndian, uint32(tt.targetBits))\n if err != nil {\n return outCount, err\n }\n outCount += 4\n // nonce\n err = binary.Write(out, binary.LittleEndian, uint32(tt.nonce))\n if err != nil {\n return outCount, err\n }\n outCount += 4\n\n return outCount, nil\n}", "title": "" }, { "docid": "1671b60f37226cfb73192cfb8419b937", "score": "0.62373894", "text": "func (bc *BlockChain) writeBlockInfo(block *Block) {\n\tbc.LastBlockNumber++\n\tbi := BlockInfo{Number: bc.LastBlockNumber, Hash: block.Hash(), Parent: block.PrevHash}\n\n\t// For now we use the block hash with the words \"info\" appended as key\n\tethutil.Config.Db.Put(append(block.Hash(), []byte(\"Info\")...), bi.RlpEncode())\n}", "title": "" }, { "docid": "1050e330c4a4c389484aedf8aa2a5ebe", "score": "0.6236485", "text": "func (s *Server) handleWriteBlock(w http.ResponseWriter, r *http.Request) {\n\tvar m Message\n\n\tdecoder := json.NewDecoder(r.Body)\n\tif err := decoder.Decode(&m); err != nil {\n\t\trespondWithJSON(w, r, http.StatusBadRequest, r.Body)\n\t\treturn\n\t}\n\tdefer r.Body.Close()\n\n\tnewBlock, err := s.blockchainService.Add(m.Payload)\n\tif err != nil {\n\t\trespondWithJSON(w, r, http.StatusInternalServerError, m)\n\t\treturn\n\t}\n\n\trespondWithJSON(w, r, http.StatusCreated, newBlock)\n}", "title": "" }, { "docid": "7fe365737d4e4f0535c72c6154873c94", "score": "0.6219907", "text": "func (block *Block) SerializeBlock() []byte {\n\tvar result bytes.Buffer\n\n\tencoder := gob.NewEncoder(&result)\n\n\t_ = encoder.Encode(block)\n\n\treturn result.Bytes()\n}", "title": "" }, { "docid": "be577fd135ee80813325320af778cc9a", "score": "0.6217823", "text": "func (s *Store) SetBlock(b *Block) {\n\ts.set(s.blocks, intToBytes(b.Index), b.ToWire())\n}", "title": "" }, { "docid": "cb22224d49b30fde10f1007e1e870485", "score": "0.621197", "text": "func WriteBadBlock(db database.KeyValueStore, block *model.Block) {\n\tblob, err := db.Get(badBlockKey, \"invalidBlock\")\n\tif err != nil {\n\t\tlog.Warning(\"Failed to load old bad blocks\", \"error\", err)\n\t}\n\tvar badBlocks badBlockList\n\tif len(blob) > 0 {\n\t\tif err := rlp.DecodeBytes(blob, &badBlocks); err != nil {\n\t\t\tlog.Critical(\"Failed to decode old bad blocks\", \"error\", err)\n\t\t}\n\t}\n\tfor _, b := range badBlocks {\n\t\tif b.Header.Number.Uint64() == block.NumberU64() && b.Header.Hash() == block.Hash() {\n\t\t\tlog.Info(\"Skip duplicated bad block\", \"number\", block.NumberU64(), \"hash\", block.Hash())\n\t\t\treturn\n\t\t}\n\t}\n\tbadBlocks = append(badBlocks, &badBlock{\n\t\tHeader: block.Header(),\n\t\tBody: block.Body(),\n\t})\n\tsort.Sort(sort.Reverse(badBlocks))\n\tif len(badBlocks) > badBlockToKeep {\n\t\tbadBlocks = badBlocks[:badBlockToKeep]\n\t}\n\tdata, err := rlp.EncodeToBytes(badBlocks)\n\tif err != nil {\n\t\tlog.Critical(\"Failed to encode bad blocks\", \"err\", err)\n\t}\n\tif err := db.Put(badBlockKey, data, \"invalidBlock\"); err != nil {\n\t\tlog.Critical(\"Failed to write bad blocks\", \"err\", err)\n\t}\n\n\tlog.Debugf(\"DB WriteBadBlock. key:InvalidBlock, blocks:%d, vSize:%d\", len(badBlocks), len(data))\n}", "title": "" }, { "docid": "01e940c02d4f25b02260f0fbdee24f11", "score": "0.6211862", "text": "func (this *BlockStore) SaveHeader(block *types.Block) error {\n\tblockHash := block.Hash()\n\n\tkey := this.getHeaderKey(blockHash)\n\tvalue := bytes.NewBuffer(nil)\n\terr := block.Header.Serialize(value)\n\tif err != nil {\n\t\treturn err\n\t}\n\tserialization.WriteUint32(value, uint32(block.Transactions.Len()))\n\tfor _, v := range block.Transactions {\n\t\ttxHash := v.Hash()\n\t\terr := txHash.Serialize(value)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tthis.store.BatchPut(key, value.Bytes())\n\treturn nil\n}", "title": "" }, { "docid": "31d4dbce74e54694b7047be1d7770ab4", "score": "0.62076163", "text": "func (blockChain *BlockChain) SaveBlock(block *Block) {\n\tif len(block.blockHash) <= 0 {\n\t\tblockHash := block.ComputeBlockHash()\n\t\tblock.SetBlockHash(blockHash)\n\t}\n\n\tif block.digitalSignature == nil {\n\t\tblock.Sign(blockChain.keyStore)\n\t}\n\n\tblock.LinkToPreviousBlock()\n\tblockChain.blocks = append(blockChain.blocks, block)\n}", "title": "" }, { "docid": "2047f248d35cba32071ea0e88ddaee37", "score": "0.6207181", "text": "func (dao *blockDAO) putBlock(blk *Block) error {\n\theight := utils.Uint64ToBytes(blk.Height())\n\tserialized, err := blk.Serialize()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to serialize block\")\n\t}\n\thash := blk.HashBlock()\n\tif err = dao.kvstore.PutIfNotExists(blockNS, hash[:], serialized); err != nil {\n\t\treturn errors.Wrap(err, \"failed to put block\")\n\t}\n\thashKey := append(hashPrefix, hash[:]...)\n\tif err = dao.kvstore.Put(blockHashHeightMappingNS, hashKey, height); err != nil {\n\t\treturn errors.Wrap(err, \"failed to put hash -> height mapping\")\n\t}\n\theightKey := append(heightPrefix, height...)\n\tif err = dao.kvstore.Put(blockHashHeightMappingNS, heightKey, hash[:]); err != nil {\n\t\treturn errors.Wrap(err, \"failed to put height -> hash mapping\")\n\t}\n\tvalue, err := dao.kvstore.Get(blockNS, topHeightKey)\n\ttopHeight := common.MachineEndian.Uint64(value)\n\tif blk.Height() > topHeight {\n\t\tdao.kvstore.Put(blockNS, topHeightKey, height)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to get top height\")\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3f3e183e1fc1251cc6e5848830b5d455", "score": "0.619816", "text": "func (a *ApiDB) UpdateBlock(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\tvars := mux.Vars(r)\n\tidblock, err := strconv.Atoi(vars[\"id\"])\n\tif err != nil {\n\t\t//w.WriteHeader(http.StatusBadRequest)\n\t\tio.WriteString(w, `{\"message\":\"can not convert id as int\"}`)\n\t\treturn\n\t}\n\tp := MODELS.BLOCKS{}\n\terr1 := json.NewDecoder(r.Body).Decode(&p)\n\tif err1 != nil {\n\t\tio.WriteString(w, `{\"message\": \"wrong format!\"}`)\n\t\treturn\n\t}\n\tp.Id = idblock\n\thasroweffected, _ := BUSINESS.UpdateBlock(a.Db, p)\n\tif hasroweffected == false {\n\t\tio.WriteString(w, `{ \"message\": \"Can’t update block\" }`)\n\t\treturn\n\t}\n\tstringresult := `{ \"status\": 200,\n \t\t\t\t\t\"message\": \"Update Success\",\n\t\t\t\t\t\t\"data\": {\n \t\t\t\t\t\t\"status\": 1\n \t\t\t\t\t\t\t}\n\t\t\t\t\t\t}`\n\tio.WriteString(w, stringresult)\n\treturn\n}", "title": "" }, { "docid": "076c6cdb34783ae9ce454e3a02b3006d", "score": "0.61966914", "text": "func (bs *MsgBlockSign) Serialize(w io.Writer) error {\n\tif err := serialization.WriteUint32(w, uint32(bs.BlockHeight)); err != nil {\n\t\treturn err\n\t}\n\tif err := serialization.WriteNBytes(w, bs.BlockHash[:]); err != nil {\n\t\treturn err\n\t}\n\tif err := serialization.WriteNBytes(w, bs.Signer[:]); err != nil {\n\t\treturn err\n\t}\n\tif err := serialization.WriteNBytes(w, bs.Signature[:]); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "52d609539a6a6c1d6cf34f81e41539b1", "score": "0.6185851", "text": "func UploadBlock(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tubHeight, err := strconv.Atoi(vars[\"height\"])\n\tif err != nil {\n\t\treturnCode500(w, r)\n\t} else {\n\t\tubHash := vars[\"hash\"]\n\t\t//fmt.Println(\"\\nuploading block of -\\nubHeight : \", ubHeight)\n\t\t//fmt.Println(\"ubHash : \", ubHash, \"\\n\\n\")\n\n\t\tuBlock, found := SBC.GetBlock(int32(ubHeight), ubHash)\n\t\tif found == false {\n\t\t\tfmt.Println(\"Err : in Handlers - UploadBlock - found = false - 204\")\n\t\t\treturnCode204(w, r)\n\t\t} else {\n\t\t\tfmt.Println(\"in Handlers - UploadBlock - found = true\")\n\t\t\tblockJson := b.EncodeToJSON(&uBlock)\n\t\t\t_, err = fmt.Fprint(w, blockJson)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(\"Err : in Handlers - UploadBlock - during writing response\")\n\t\t\t}\n\t\t}\n\t}\n\n}", "title": "" }, { "docid": "73a632adab3a5bca5ef050293c936f7a", "score": "0.61625725", "text": "func (w *Writer) AddBlock(block *bookkeeping.Block, modifiedTxns []transactions.SignedTxnInBlock, delta ledgercore.StateDelta) error {\n\tvar batch pgx.Batch\n\n\tspecialAddresses := transactions.SpecialAddresses{\n\t\tFeeSink: block.FeeSink,\n\t\tRewardsPool: block.RewardsPool,\n\t}\n\n\taddBlockHeader(&block.BlockHeader, &batch)\n\tsetSpecialAccounts(specialAddresses, &batch)\n\terr := addTransactions(block, modifiedTxns, &batch)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t}\n\terr = addTransactionParticipation(block, &batch)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t}\n\twriteStateDelta(block.Round(), delta, specialAddresses, &batch)\n\terr = updateAccountSigType(block.Payset, &batch)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t}\n\n\tresults := w.tx.SendBatch(context.Background(), &batch)\n\t// Clean the results off the connection's queue. Without this, weird things happen.\n\tfor i := 0; i < batch.Len(); i++ {\n\t\t_, err := results.Exec()\n\t\tif err != nil {\n\t\t\tresults.Close()\n\t\t\treturn fmt.Errorf(\"AddBlock() exec err: %w\", err)\n\t\t}\n\t}\n\terr = results.Close()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"AddBlock() close results err: %w\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "4a7173832670789402db93b797de04c7", "score": "0.6098956", "text": "func dispatchBlock(ctx *cli.Context, baseDir string, block *model.Block) error {\n\traw, _ := rlp.EncodeToBytes(block)\n\n\ttype blockInfo struct {\n\t\tRlp hexutil.Bytes `json:\"rlp\"`\n\t\tHash common.Hash `json:\"hash\"`\n\t}\n\tvar enc blockInfo\n\tenc.Rlp = raw\n\tenc.Hash = block.Hash()\n\n\tb, err := json.MarshalIndent(enc, \"\", \" \")\n\tif err != nil {\n\t\treturn NewError(ErrorJson, fmt.Errorf(\"failed marshalling output: %v\", err))\n\t}\n\tswitch dest := ctx.String(OutputBlockFlag.Name); dest {\n\tcase \"stdout\":\n\t\tos.Stdout.Write(b)\n\t\tos.Stdout.WriteString(\"\\n\")\n\tcase \"stderr\":\n\t\tos.Stderr.Write(b)\n\t\tos.Stderr.WriteString(\"\\n\")\n\tdefault:\n\t\tif err := saveFile(baseDir, dest, enc); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8140d007f94c98e74ef47bfbb94fd4a3", "score": "0.6076772", "text": "func handleWriteBlock(w http.ResponseWriter, r *http.Request) {\n\tvar m Message\n\tvar newBlock Block\n\tdecoder := json.NewDecoder(r.Body)\n\tif err := decoder.Decode(&m); err != nil {\n\n\t\trespondWithJSON(w, r, http.StatusBadRequest, r.Body)\n\t\treturn\n\t}\n\n\tdefer r.Body.Close()\n\t//log.Printf(\"%+v\", m.Version)\n\n\tswitch m.Type {\n\tcase \"Client\":\n\t\tnewBlock, err := generateBlock(Blockchain[len(Blockchain)-1], m.Data, m.BlockData.PubKey)\n\t\tif err != nil {\n\t\t\trespondWithJSON(w, r, http.StatusInternalServerError, m)\n\t\t\treturn\n\t\t}\n\t\taddNode(r.RemoteAddr)\n\t\taddBlock(newBlock)\n\t\trespondWithJSON(w, r, http.StatusCreated, newBlock)\n\tcase \"Noda\":\n\t\tnewBlock.Index = m.BlockData.Index\n\t\tnewBlock.Timestamp = m.BlockData.Timestamp\n\t\tnewBlock.PubKey = m.BlockData.PubKey\n\t\tnewBlock.Data = m.BlockData.Data\n\t\tnewBlock.Hash = m.BlockData.Hash\n\t\tnewBlock.PrevHash = m.BlockData.PrevHash\n\t\taddNode(r.RemoteAddr)\n\n\t\tif addBlock(newBlock) != false {\n\t\t\tannonceBlock(newBlock)\n\t\t}\n\n\t\trespondWithJSON(w, r, http.StatusCreated, newBlock)\n\n\tcase \"Bootstrap\":\n\n\t\tlog.Println(\"Responding to: \" + r.RemoteAddr)\n\t\tvar respMsg Message\n\t\trespMsg.NodeAddr = nodeList // send only alive nodes?\n\n\t\tnodeList = addListNodes(getAliveNodes(m.NodeAddr))\n\t\tlog.Println(\"Nodes has been recieved : \", m.NodeAddr)\n\t\trespondWithJSON(w, r, http.StatusCreated, respMsg)\n\n\tcase \"Alive\":\n\n\t\tlog.Println(\"Responding Alive to: \" + r.RemoteAddr)\n\t\tvar respMsg Message\n\t\trespMsg.Data = \"True\"\n\t\trespondWithJSON(w, r, http.StatusCreated, respMsg)\n\n\t}\n\t//log.Printf(\"%+v\", m)\n}", "title": "" }, { "docid": "f9533bfdc8f3cad7392406c4bc035471", "score": "0.60076416", "text": "func addBlock(db *sql.DB, block wire.MsgBlock) {\n\taddBlockHeader(db, block.Header)\n\tblockHash := block.BlockHash()\n\tfor idx, val := range block.Transactions {\n\t\taddTX(db, blockHash[:], idx, val)\n\t\t//\tt.Log(\"TX\", idx)\n\t\t//\tt.Log(val)\n\t\t//\tt.Log(\"TX ins\")\n\t\t//\tfor _, txval := range val.TxIn {\n\t\t//\t\tt.Log(txval)\n\t\t//\t}\n\t\t//\tt.Log(\"TX outs\")\n\t\t//\tfor _, txval := range val.TxOut {\n\t\t//\t\tt.Log(txval)\n\t\t//\t}\n\t}\n}", "title": "" }, { "docid": "53e3b55d0a43c3e074f7db4c64aba1bc", "score": "0.5996439", "text": "func (sc *Chain) StoreBlockSummary(ctx context.Context, bs *block.BlockSummary) error {\n\tbSummaryEntityMetadata := bs.GetEntityMetadata()\n\tbctx := ememorystore.WithEntityConnection(ctx, bSummaryEntityMetadata)\n\tdefer ememorystore.Close(bctx)\n\tif len(bs.Hash) < 64 {\n\t\tLogger.Error(\"Writing block summary - block hash less than 64\", zap.Any(\"hash\", bs.Hash))\n\t}\n\terr := bs.Write(bctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcon := ememorystore.GetEntityCon(bctx, bSummaryEntityMetadata)\n\terr = con.Commit()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "771f8b4685ab8437e90cfb9efdbf0f37", "score": "0.59890574", "text": "func postBlock(w http.ResponseWriter, r *http.Request) {\n\n\tvar m Message //Message of the Block- block No\n\n\tdecoder := json.NewDecoder(r.Body)\n\n\t// Decode the message\n\tif err := decoder.Decode(&m); err != nil {\n\t\trespondWithJSON(w, r, http.StatusBadRequest, r.Body)\n\t\treturn\n\t}\n\tdefer r.Body.Close()\n\n\t// Generate a block according to the message(Block No)\n\tnewBlock, err := generateBlock(Blockchain[len(Blockchain)-1], m.BlockNo)\n\n\t// If there is an error with creating the block return the json error response\n\tif err != nil {\n\t\trespondWithJSON(w, r, http.StatusInternalServerError, m)\n\t\treturn\n\t}\n\n\t// Check blocks validity\n\tif isValidBlock(newBlock, Blockchain[len(Blockchain)-1]) {\n\n\t\t// Append new Block to Master Blockchain and assign it to a local block chain\n\t\tnewBlockchain := append(Blockchain,newBlock)\n\n\t\t// Relace Master Block chain with the Local one\n\t\treplaceChain(newBlockchain)\n\t\tspew.Dump(Blockchain)\n\t}\n\n\t// Return the Json Output of the newBlock\n\trespondWithJSON(w, r, http.StatusCreated, newBlock)\n\n}", "title": "" }, { "docid": "0c8910bb58ac7a2b3d80a6759eca620f", "score": "0.59735125", "text": "func (db *SqlBlockDB) AddBlock(block wire.MsgBlock) {\n\tdb.AddBlockHeader(block.Header)\n\tblockHash := block.BlockHash()\n\tfor idx, val := range block.Transactions {\n\t\tdb.AddTX(blockHash[:], int32(idx), val)\n\t}\n}", "title": "" }, { "docid": "0d44fb7802ebfdad34007a8e564a8879", "score": "0.59711754", "text": "func (db *IndexerDb) AddBlock(block *bookkeeping.Block) error {\n\tdb.log.Printf(\"adding block %d\", block.Round())\n\n\tdb.accountingLock.Lock()\n\tdefer db.accountingLock.Unlock()\n\n\tf := func(tx pgx.Tx) error {\n\t\t// Check and increment next round counter.\n\t\timportstate, err := db.getImportState(context.Background(), tx)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t\t}\n\t\tif block.Round() != basics.Round(importstate.NextRoundToAccount) {\n\t\t\treturn fmt.Errorf(\n\t\t\t\t\"AddBlock() adding block round %d but next round to account is %d\",\n\t\t\t\tblock.Round(), importstate.NextRoundToAccount)\n\t\t}\n\t\timportstate.NextRoundToAccount++\n\t\terr = db.setImportState(tx, &importstate)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t\t}\n\n\t\twriter, err := writer.MakeWriter(tx)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t\t}\n\t\tdefer writer.Close()\n\n\t\tif block.Round() == basics.Round(0) {\n\t\t\t// Block 0 is special, we cannot run the evaluator on it.\n\t\t\terr := writer.AddBlock0(block)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t\t\t}\n\t\t} else {\n\t\t\tledgerForEval, err := ledger_for_evaluator.MakeLedgerForEvaluator(tx, block.Round()-1)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t\t\t}\n\t\t\tdefer ledgerForEval.Close()\n\n\t\t\tproto, ok := config.Consensus[block.BlockHeader.CurrentProtocol]\n\t\t\tif !ok {\n\t\t\t\treturn fmt.Errorf(\n\t\t\t\t\t\"AddBlock() cannot find proto version %s\", block.BlockHeader.CurrentProtocol)\n\t\t\t}\n\t\t\tproto.EnableAssetCloseAmount = true\n\n\t\t\tresources, err := prepareEvalResources(&ledgerForEval, block)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"AddBlock() eval err: %w\", err)\n\t\t\t}\n\n\t\t\tstart := time.Now()\n\t\t\tdelta, modifiedTxns, err :=\n\t\t\t\tledger.EvalForIndexer(ledgerForEval, block, proto, resources)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"AddBlock() eval err: %w\", err)\n\t\t\t}\n\t\t\tmetrics.PostgresEvalTimeSeconds.Observe(time.Since(start).Seconds())\n\n\t\t\terr = writer.AddBlock(block, modifiedTxns, delta)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n\terr := db.txWithRetry(serializable, f)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"AddBlock() err: %w\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "8fc39036e6902b177ba19b89b1196eed", "score": "0.5954721", "text": "func (ws *writeState) AddBlock(be *exec.BlockExecution) error {\n\tif ws.state.height > 0 && be.Height != ws.state.height+1 {\n\t\treturn fmt.Errorf(\"AddBlock received block for height %v but last block height was %v\",\n\t\t\tbe.Height, ws.state.height)\n\t}\n\tws.state.height = be.Height\n\t// Index transactions so they can be retrieved by their TxHash\n\tfor i, txe := range be.TxExecutions {\n\t\tws.addTx(txe.TxHash, be.Height, uint64(i))\n\t}\n\tbs, err := be.Encode()\n\tif err != nil {\n\t\treturn err\n\t}\n\tws.state.refs.Set(blockRefKeyFormat.Key(be.Height), bs)\n\treturn nil\n}", "title": "" }, { "docid": "3ce16fada81cb5523069b5318fb4337d", "score": "0.5925616", "text": "func (pb *PositionedBlock) WriteBinaryBlock(indices map[uint32]struct{}, hasBackground bool, op *OutputOp, bounds dvid.Bounds) error {\n\toffset, err := pb.OffsetDVID()\n\tif err != nil {\n\t\treturn err\n\t}\n\tbuf := make([]byte, 13)\n\tbinary.LittleEndian.PutUint32(buf[0:4], uint32(offset[0]))\n\tbinary.LittleEndian.PutUint32(buf[4:8], uint32(offset[1]))\n\tbinary.LittleEndian.PutUint32(buf[8:12], uint32(offset[2]))\n\n\tvar mixedData bool\n\tif len(indices) == 0 {\n\t\tbuf[12] = 0 // background only\n\t} else if hasBackground {\n\t\tbuf[12] = 2 // background + foreground\n\t\tmixedData = true\n\t} else {\n\t\tbuf[12] = 1 // foreground only\n\t}\n\tif n, err := op.w.Write(buf); err != nil {\n\t\treturn err\n\t} else if n != 13 {\n\t\treturn fmt.Errorf(\"couldn't write header for block %s, only %d bytes written\", pb.BCoord, n)\n\t}\n\tif !mixedData {\n\t\treturn nil\n\t}\n\n\tvar multiForeground bool\n\tvar labelIndex uint32\n\tif len(indices) > 1 {\n\t\tmultiForeground = true\n\t} else {\n\t\tfor labelIndex = range indices {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tgx, gy, gz := pb.Size[0]/SubBlockSize, pb.Size[1]/SubBlockSize, pb.Size[2]/SubBlockSize\n\n\tsubBlockNumVoxels := SubBlockSize * SubBlockSize * SubBlockSize\n\tcurIndices := make([]uint32, subBlockNumVoxels) // preallocate max # of indices for sub-block\n\n\tdata := make([]byte, 65) // sub-block data will at most be status byte + 64 bytes (8x8x8 bits).\n\n\tvar indexPos, bitpos uint32\n\tvar subBlockNum int\n\tvar sx, sy, sz int32\n\tfor sz = 0; sz < gz; sz++ {\n\t\tfor sy = 0; sy < gy; sy++ {\n\t\t\tfor sx = 0; sx < gx; sx, subBlockNum = sx+1, subBlockNum+1 {\n\n\t\t\t\tnumSBLabels := pb.NumSBLabels[subBlockNum]\n\t\t\t\tbits := bitsFor(numSBLabels)\n\n\t\t\t\tfor i := uint16(0); i < numSBLabels; i++ {\n\t\t\t\t\tcurIndices[i] = pb.SBIndices[indexPos]\n\t\t\t\t\tindexPos++\n\t\t\t\t}\n\n\t\t\t\tswitch numSBLabels {\n\t\t\t\tcase 0:\n\t\t\t\t\tdata[0] = 0\n\t\t\t\t\tif _, err := op.w.Write(data[:1]); err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\tcontinue\n\t\t\t\tcase 1:\n\t\t\t\t\tvar foreground bool\n\t\t\t\t\tif multiForeground {\n\t\t\t\t\t\t_, foreground = indices[curIndices[0]]\n\t\t\t\t\t} else {\n\t\t\t\t\t\tforeground = (curIndices[0] == labelIndex)\n\t\t\t\t\t}\n\t\t\t\t\tif foreground {\n\t\t\t\t\t\tdata[0] = 1\n\t\t\t\t\t} else {\n\t\t\t\t\t\tdata[0] = 0\n\t\t\t\t\t}\n\t\t\t\t\tif _, err := op.w.Write(data[:1]); err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\tcontinue\n\t\t\t\tdefault:\n\t\t\t\t}\n\n\t\t\t\toutbytepos := int(1)\n\t\t\t\toutbitpos := int(8) // Start at 2nd byte for output\n\n\t\t\t\tvar background bool // true if a non-index voxel is in block\n\t\t\t\tvar foreground bool // true if index is in block\n\n\t\t\t\tvar x, y, z int32\n\t\t\t\tfor z = 0; z < SubBlockSize; z++ {\n\t\t\t\t\tfor y = 0; y < SubBlockSize; y++ {\n\t\t\t\t\t\tfor x = 0; x < SubBlockSize; x++ {\n\t\t\t\t\t\t\tindex := getPackedValue(pb.SBValues, bitpos, bits)\n\n\t\t\t\t\t\t\t// write binary sub-block data\n\t\t\t\t\t\t\tvar curForeground bool\n\t\t\t\t\t\t\tif multiForeground {\n\t\t\t\t\t\t\t\t_, curForeground = indices[curIndices[index]]\n\t\t\t\t\t\t\t} else if curIndices[index] == labelIndex {\n\t\t\t\t\t\t\t\tcurForeground = true\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif curForeground {\n\t\t\t\t\t\t\t\tdata[outbytepos] |= bitMask[outbitpos%8]\n\t\t\t\t\t\t\t\tforeground = true\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tdata[outbytepos] &^= bitMask[outbitpos%8]\n\t\t\t\t\t\t\t\tbackground = true\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tbitpos += bits\n\t\t\t\t\t\t\toutbitpos++\n\t\t\t\t\t\t\tif outbitpos%8 == 0 {\n\t\t\t\t\t\t\t\toutbytepos++\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif background && foreground {\n\t\t\t\t\tdata[0] = 2\n\t\t\t\t\tif _, err := op.w.Write(data); err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t} else if foreground {\n\t\t\t\t\tdata[0] = 1\n\t\t\t\t\tif _, err := op.w.Write(data[:1]); err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tdata[0] = 0\n\t\t\t\t\tif _, err := op.w.Write(data[:1]); err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\trem := bitpos % 8\n\t\t\t\tif rem != 0 {\n\t\t\t\t\tbitpos += 8 - rem\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f846ed3f7858d71a92811845d5ef050b", "score": "0.58796054", "text": "func (b *Block) Serialize() []byte {\n\t// set up buffer, and encoder\n\tvar res bytes.Buffer\n\tencoder := gob.NewEncoder(&res)\n\n\t// encode our block struct\n\terr := encoder.Encode(b)\n\n\tHandle(err)\n\n\treturn res.Bytes()\n}", "title": "" }, { "docid": "7b83f95629c35212b2197ee795f9a4b4", "score": "0.5864903", "text": "func DeleteBlock(db database.KeyValueWriter, hash common.Hash, number uint64) {\n\tDeleteReceipts(db, hash, number)\n\tDeleteHeader(db, hash, number)\n\tDeleteBody(db, hash, number)\n\tDeleteTd(db, hash, number)\n\tlog.Debugf(\"DB DeleteBlock. number:%d, hash:%x\", number, hash)\n}", "title": "" }, { "docid": "c4c4613088a5d3559890711a467db53d", "score": "0.58471376", "text": "func (blockStore *BlockStore) WriteBlockWithReceipts(block *types.Block, receipts []*types.Receipt) error {\n\tbatch := blockStore.store.NewBatch()\n\treceiptsByte, err := encodeEntity(receipts)\n\tif err != nil {\n\t\tlog.Error(\"Failed to encode receipts %v to byte, as: %v \", receipts, err)\n\t\treturn fmt.Errorf(\"Failed to encode receipts %v to byte, as: %v \", receipts, err)\n\t}\n\tblockHash := common.HeaderHash(block)\n\tbatch.Put(append(receiptPrefix, common.HashToBytes(blockHash)...), receiptsByte)\n\tblockStore.writeBlockByBatch(batch, block)\n\tif err != nil {\n\t\tbatch.Reset()\n\t\treturn err\n\t}\n\terr = batch.Write()\n\tif err != nil {\n\t\tlog.Error(\"failed to commit block %x to database, as: %v\", block.HeaderHash, err)\n\t\treturn err\n\t}\n\n\t// update current block\n\tblockStore.recordCurrentBlock(block)\n\treturn nil\n}", "title": "" }, { "docid": "db29ab93c2e3141731c1267e16085bdc", "score": "0.58380157", "text": "func (f *File) WriteBlocksFile(outfile string) error {\n fd, err := os.Create(outfile)\n if err != nil {\n return err\n }\n defer fd.Close()\n for _, block := range f.blocks {\n fd.WriteString(block.ID + \"\\n\")\n }\n return nil\n}", "title": "" }, { "docid": "3fc5f519ef7e4a28d575d4b3c72d9b20", "score": "0.5833858", "text": "func (chain *BlockChain) StoreTailBlock(block *types.Block, db storage.Writer) error {\n\tdata, err := block.Marshal()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn db.Put(TailKey, data)\n}", "title": "" }, { "docid": "1a56f51446d7061b42ef209c46d98313", "score": "0.5832739", "text": "func (manager *Manager) storeBlock(block *core.Block) error {\n\tpath := manager.getJSONStorePath(block.Header.Number)\n\tif util.FileExists(path) {\n\t\treturn errors.New(\"The block file is aleardy exists\")\n\t}\n\tfile, err := os.Create(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tenc := json.NewEncoder(file)\n\n\terr = enc.Encode(block)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "030bac72a45c466934a190e1ce0ad3cb", "score": "0.5831923", "text": "func (d *Data) writeBlocks(v dvid.VersionID, b storage.TKeyValues, wg1, wg2 *sync.WaitGroup) error {\n\tbatcher, err := datastore.GetKeyValueBatcher(d)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tpreCompress, postCompress := 0, 0\n\tblockSize := d.BlockSize().(dvid.Point3d)\n\n\tctx := datastore.NewVersionedCtx(d, v)\n\tevt := datastore.SyncEvent{d.DataUUID(), labels.IngestBlockEvent}\n\n\tserver.CheckChunkThrottling()\n\tblockCh := make(chan blockChange, 100)\n\tsvmap, err := getMapping(d, v)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"writeBlocks couldn't get mapping for data %q, version %d: %v\", d.DataName(), v, err)\n\t}\n\tgo d.aggregateBlockChanges(v, svmap, blockCh)\n\tgo func() {\n\t\tdefer func() {\n\t\t\twg1.Done()\n\t\t\twg2.Done()\n\t\t\tdvid.Debugf(\"Wrote voxel blocks. Before %s: %d bytes. After: %d bytes\\n\", d.Compression(), preCompress, postCompress)\n\t\t\tclose(blockCh)\n\t\t\tserver.HandlerToken <- 1\n\t\t}()\n\n\t\tmutID := d.NewMutationID()\n\t\tbatch := batcher.NewBatch(ctx)\n\t\tfor i, block := range b {\n\t\t\tpreCompress += len(block.V)\n\t\t\tlblBlock, err := labels.MakeBlock(block.V, blockSize)\n\t\t\tif err != nil {\n\t\t\t\tdvid.Errorf(\"unable to compute dvid block compression in %q: %v\\n\", d.DataName(), err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tgo d.updateBlockMaxLabel(v, lblBlock)\n\n\t\t\tcompressed, _ := lblBlock.MarshalBinary()\n\t\t\tserialization, err := dvid.SerializeData(compressed, d.Compression(), d.Checksum())\n\t\t\tif err != nil {\n\t\t\t\tdvid.Errorf(\"Unable to serialize block in %q: %v\\n\", d.DataName(), err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tpostCompress += len(serialization)\n\t\t\tbatch.Put(block.K, serialization)\n\n\t\t\t_, indexZYX, err := DecodeBlockTKey(block.K)\n\t\t\tif err != nil {\n\t\t\t\tdvid.Errorf(\"Unable to recover index from block key: %v\\n\", block.K)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tblock := IngestedBlock{mutID, indexZYX.ToIZYXString(), lblBlock}\n\t\t\td.handleBlockIndexing(v, blockCh, block)\n\n\t\t\tmsg := datastore.SyncMessage{labels.IngestBlockEvent, v, block}\n\t\t\tif err := datastore.NotifySubscribers(evt, msg); err != nil {\n\t\t\t\tdvid.Errorf(\"Unable to notify subscribers of ChangeBlockEvent in %s\\n\", d.DataName())\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// Check if we should commit\n\t\t\tif i%KVWriteSize == KVWriteSize-1 {\n\t\t\t\tif err := batch.Commit(); err != nil {\n\t\t\t\t\tdvid.Errorf(\"Error on trying to write batch: %v\\n\", err)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tbatch = batcher.NewBatch(ctx)\n\t\t\t}\n\t\t}\n\t\tif err := batch.Commit(); err != nil {\n\t\t\tdvid.Errorf(\"Error on trying to write batch: %v\\n\", err)\n\t\t\treturn\n\t\t}\n\t}()\n\treturn nil\n}", "title": "" }, { "docid": "2d747669bedf9c6c653a74291e1ca8da", "score": "0.5825952", "text": "func exportBlock(block *types.Block, client *rpc.CodaClient) error {\n\tlogger.Infof(\"exporting block %v at height %v\", block.StateHash, block.Height)\n\texists, err := db.BlockExists(block.StateHash)\n\n\tif err == nil && exists {\n\t\tlogger.Infof(\"block %v already exported\", block.StateHash)\n\t\treturn nil\n\t}\n\n\tstart := time.Now()\n\n\taccountsInBlock := make(map[string]bool)\n\taccountsInBlock[block.Creator] = true\n\n\tfor _, uj := range block.UserJobs {\n\t\taccountsInBlock[uj.Sender] = true\n\t\taccountsInBlock[uj.Recipient] = true\n\t}\n\n\tfor _, ft := range block.FeeTransfers {\n\t\taccountsInBlock[ft.Recipient] = true\n\t}\n\n\tfor _, sj := range block.SnarkJobs {\n\t\taccountsInBlock[sj.Prover] = true\n\t}\n\tlogger.Infof(\"block mutated %v accounts\", len(accountsInBlock))\n\n\tfor pubKey := range accountsInBlock {\n\t\t//logger.Infof(\"exporting account %v\", pubKey)\n\t\taccount, err := client.GetAccount(pubKey)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error retrieving account data for account %v via rpc: %w\", pubKey, err)\n\t\t}\n\t\t//logger.Infof(\"account data retrieved\")\n\n\t\taccount.FirstSeen = block.Ts\n\t\taccount.LastSeen = block.Ts\n\n\t\terr = db.SaveAccount(account)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error saving account data for account %v: %w\", pubKey, err)\n\t\t}\n\t\t//logger.Infof(\"account data exported to db\")\n\t}\n\tlogger.Infof(\"accounts updated, saving block to db\")\n\n\terr = db.SaveBlock(block)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error saving block data for block %v: %w\", block.StateHash, err)\n\t}\n\tlogger.WithField(\"txs\", block.UserCommandsCount).WithField(\"snarks\", block.SnarkJobsCount).WithField(\"feeTransfers\", block.FeeTransferCount).Infof(\"block data exported to db, took %v\", time.Since(start))\n\n\treturn nil\n}", "title": "" }, { "docid": "844dfa719d7fa41ade9e6042b67ea238", "score": "0.5814539", "text": "func RollbackBlock(data []byte, deleteBlock bool) error {\n\tbuf := bytes.NewBuffer(data)\n\tif buf.Len() == 0 {\n\t\tlog.WithFields(log.Fields{\"type\": consts.EmptyObject}).Error(\"empty buffer\")\n\t\treturn fmt.Errorf(\"empty buffer\")\n\t}\n\n\tblock, err := block.UnmarshallBlock(buf, false, true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdbTransaction, err := model.StartTransaction()\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\"type\": consts.DBError, \"error\": err}).Error(\"starting transaction\")\n\t\treturn err\n\t}\n\n\terr = rollbackBlock(dbTransaction, block)\n\n\tif err != nil {\n\t\tdbTransaction.Rollback()\n\t\treturn err\n\t}\n\n\tif deleteBlock {\n\t\tb := &model.Block{}\n\t\terr = b.DeleteById(dbTransaction, block.Header.BlockID)\n\t\tif err != nil {\n\t\t\tlog.WithFields(log.Fields{\"type\": consts.DBError, \"error\": err}).Error(\"deleting block by id\")\n\t\t\tdbTransaction.Rollback()\n\t\t\treturn err\n\t\t}\n\t}\n\n\terr = dbTransaction.Commit()\n\treturn err\n}", "title": "" }, { "docid": "b96dc8579b76c680166b50472d09ec8a", "score": "0.58062845", "text": "func (c *Carves) CreateBlock(block CarvedBlock, uuid, data string) error {\n\tswitch c.Carver {\n\tcase settings.CarverDB:\n\t\treturn c.DB.Create(&block).Error // can be nil or err\n\tcase settings.CarverS3:\n\t\tif c.S3 != nil {\n\t\t\tif err := c.DB.Create(&block).Error; err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn c.S3.Upload(block, uuid, data)\n\t\t}\n\t\treturn fmt.Errorf(\"S3 carver not initialized\")\n\t}\n\treturn fmt.Errorf(\"Unknown carver\") // can be nil or err\n}", "title": "" }, { "docid": "491512494966c4342fe356073d25a3fa", "score": "0.5805083", "text": "func handleWritetBlockchain(w http.ResponseWriter, r *http.Request) {\n\n\tvar m Message\n\n\tdecoder := json.NewDecoder(r.Body)\n\tif err := decoder.Decode(&m); err != nil {\n\t\trespondWithJSON(w, r, http.StatusBadRequest, r.Body)\n\t}\n\n\tdefer r.Body.Close()\n\n\tnewB, err := CreateBlock(Blockchain[len(Blockchain)-1], m.BPM)\n\tif err != nil {\n\t\trespondWithJSON(w, r, http.StatusInternalServerError, m)\n\t}\n\n\tif isValidBlock(newB, Blockchain[len(Blockchain)-1]) {\n\t\tnewBlockchain := append(Blockchain, newB)\n\t\tReplaceChain(newBlockchain)\n\t\t//pretty print our structs into the console\n\t\tspew.Dump(Blockchain)\n\t}\n\n\trespondWithJSON(w, r, http.StatusCreated, newB)\n\n}", "title": "" }, { "docid": "25235015c3a67e8da3eb08161df58b5a", "score": "0.579939", "text": "func (b Block) Write(f io.Writer) (int, error) {\n\tbuff := make([]byte, b.BlockEnd-b.BlockStart)\n\t_, err := b.pbf.Stream.ReadRange(b.BlockStart, buff)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tsize, err := f.Write(buff)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn size, nil\n}", "title": "" }, { "docid": "286f7fcdc5885372ee2a9faaf9d33ec7", "score": "0.5797592", "text": "func EncodeBlock(data []byte) ([]byte, error) {\n\tif len(data) > MaximumBlockSize {\n\t\treturn nil, errors.New(\"data exceeds maximum block size\")\n\t}\n\n\tvar buffer bytes.Buffer\n\tgzw := gzip.NewWriter(&buffer)\n\n\tgzw.Header.Extra = []byte{\n\t\t0x42, 0x43, // Extra ID.\n\t\t0x02, 0x00, // Length of extra data (2 bytes).\n\t\t0x88, 0x88, // BSIZE (filled in after writing the archive).\n\t}\n\tif _, err := gzw.Write(data); err != nil {\n\t\treturn nil, fmt.Errorf(\"writing compressed data: %v\", err)\n\t}\n\tif err := gzw.Close(); err != nil {\n\t\treturn nil, fmt.Errorf(\"closing writer: %v\", err)\n\t}\n\tbsize := buffer.Len() - 1\n\tencoded := buffer.Bytes()\n\tencoded[16] = byte(bsize)\n\tencoded[17] = byte(bsize >> 8)\n\treturn encoded, nil\n}", "title": "" }, { "docid": "68e26fa4bc3a91b815a60ed7e48ace2b", "score": "0.57918274", "text": "func (bm *BlockMatrix) writeLockBlock( BlockNumber int ) bool {\n\n i, j := bm.blockIndex(BlockNumber)\n\n return bm.writeLockBlockAt(i, j)\n}", "title": "" }, { "docid": "7268d9d8ae5e5317f893889677767d55", "score": "0.578542", "text": "func BenchmarkWriteBlockHeader(b *testing.B) {\n\theader := blockOne.Header\n\tfor i := 0; i < b.N; i++ {\n\t\twriteBlockHeader(ioutil.Discard, 0, &header)\n\t}\n}", "title": "" }, { "docid": "e9991207fcd261e015fa5e6a773463ea", "score": "0.5767516", "text": "func (node *Node) MarshalBlock(block Block) string {\n\tjson, _ := json.Marshal(block)\n\treturn string(json)\n}", "title": "" }, { "docid": "d3b1e51358275e6f77a9c6f925b872fc", "score": "0.57473975", "text": "func (chain *Blockchain) AddBlock(block *Block) {\n\terr := chain.Database.Update(func(txn *badger.Txn) error {\n\t\t// if the block is already in the db, skip\n\t\tif _, err := txn.Get(block.Hash); err == nil {\n\t\t\treturn nil\n\t\t}\n\n\t\tblockData := block.Serialize()\n\t\t// add the block to the db\n\t\terr := txn.Set(block.Hash, blockData)\n\t\thandle(err)\n\n\t\t// get the last hash\n\t\titem, err := txn.Get([]byte(\"lh\"))\n\t\thandle(err)\n\t\tlastHash := valueHash(item)\n\n\t\t// get the last block from the lasthash\n\t\titem, err = txn.Get(lastHash)\n\t\thandle(err)\n\t\tlastBlockData := valueHash(item)\n\n\t\tlastBlock := Deserialize(lastBlockData)\n\n\t\t// compare the block height with the last block height\n\t\t// if it is larger, then set the new block to the last hash\n\t\tif block.Height > lastBlock.Height {\n\t\t\terr = txn.Set([]byte(\"lh\"), block.Hash)\n\t\t\thandle(err)\n\t\t\tchain.LastHash = block.Hash\n\t\t}\n\n\t\treturn nil\n\t})\n\thandle(err)\n}", "title": "" }, { "docid": "afad6cf3149fcacbeaf14212a2dbec64", "score": "0.5745655", "text": "func (c *spiConnection) WriteBlockData(reg byte, data []byte) error {\n\tc.mutex.Lock()\n\tdefer c.mutex.Unlock()\n\n\tbuf := make([]byte, len(data)+1)\n\tcopy(buf[1:], data)\n\tbuf[0] = reg\n\treturn c.writeBytes(buf)\n}", "title": "" }, { "docid": "34994d79ec6f7cc0972c366c1a3f379f", "score": "0.5738609", "text": "func (bc *Blockchain) AddBlock(txs []*Transaction) {\n\tvar lastHash []byte\n\tvar lastHeight []byte\n\n\terr := bc.db.View(func(dbtx *bolt.Tx) error {\n\t\tbBucket := dbtx.Bucket([]byte(blocksBucket))\n\t\tlastHash = bBucket.Get([]byte(\"l\"))\n\t\tlastHeight = bBucket.Get([]byte(\"b\"))\n\n\t\treturn nil\n\t})\n\n\tlastHeightInt, err := strconv.ParseInt(string(lastHeight), 10, 64)\n\n\tnewBlock := NewBlock(txs, lastHash, uint64(lastHeightInt+1))\n\tbc.tip, err = newBlock.Persist(bc.db)\n\n\tstart := time.Now().Unix()\n\tlog.Debug(\"start indexing the block:\" + strconv.FormatInt(start, 10))\n\tbc.search.IndexBlock(newBlock)\n\tend := time.Now().Unix()\n\tlog.Debug(\"end indexing the block:\" + strconv.FormatInt(end, 10) + \", duration:\" + strconv.FormatInt(end-start, 10))\n\n\tif err != nil {\n\t\tlog.Error(err)\n\t}\n}", "title": "" }, { "docid": "a2d71ab8361b4ac2cc570d510771473b", "score": "0.5730114", "text": "func TestBlockStoreSaveLoadBlock(t *testing.T) {\n\tst, bs := makeStateAndBlockStore(log.New(new(bytes.Buffer)))\n\trequire.Equal(t, bs.Height(), int64(0), \"initially the height should be zero\")\n\n\t// check there are no blocks at various heights\n\tnoBlockHeights := []int64{0, -1, 100, 1000, 2}\n\tfor i, height := range noBlockHeights {\n\t\tif g := bs.LoadBlockByHeight(height); g != nil {\n\t\t\tt.Errorf(\"#%d: height(%d) got a block; want nil\", i, height)\n\t\t}\n\t}\n\n\t// save a block\n\tblock := makeBlock(bs.Height()+1, st)\n\tbs.SaveBlock(block)\n\trequire.Equal(t, bs.Height(), block.Header.Height, \"expecting the new height to be changed\")\n\n\theader1 := &Header{\n\t\tHeight: 1,\n\t\tTimestamp: time.Now().Unix(),\n\t}\n\ttmp := *header1\n\theader2 := &tmp\n\theader2.Height = 4\n\n\t// End of setup, test data\n\n\ttuples := []struct {\n\t\tblock *Block\n\t\twantErr bool\n\t\twantPanic string\n\n\t\tcorruptBlockInDb bool\n\t}{\n\t\t{\n\t\t\tblock: newBlock(header1),\n\t\t},\n\n\t\t{\n\t\t\tblock: nil,\n\t\t\twantPanic: \"only save a non-nil block\",\n\t\t},\n\n\t\t{\n\t\t\tblock: newBlock(header2),\n\t\t\twantPanic: \"only save contiguous blocks\", // and incomplete and uncontiguous parts\n\t\t},\n\n\t\t{\n\t\t\tblock: newBlock(header1),\n\t\t\twantPanic: \"only save complete block\", // incomplete parts\n\t\t},\n\n\t\t{\n\t\t\tblock: newBlock(header1),\n\t\t\twantPanic: \"unmarshal to types.BlockMeta failed\",\n\t\t\tcorruptBlockInDb: true, // Corrupt the DB's block entry\n\t\t},\n\t}\n\n\ttype quad struct {\n\t\tblock *Block\n\t}\n\n\tfor i, tuple := range tuples {\n\t\tbs, db := freshBlockStore()\n\t\tres, err, panicErr := doFn(func() (interface{}, error) {\n\t\t\tbs.SaveBlock(tuple.block)\n\t\t\tif tuple.block == nil {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\n\t\t\tif tuple.corruptBlockInDb {\n\t\t\t\tdb.Put(height2HashKey(tuple.block.Height), []byte(\"block-bogus-hash\"))\n\t\t\t}\n\t\t\tbBlock := bs.LoadBlockByHeight(tuple.block.Height)\n\n\t\t\treturn &quad{block: bBlock,}, nil\n\t\t})\n\n\t\tif subStr := tuple.wantPanic; subStr != \"\" {\n\t\t\tif panicErr == nil {\n\t\t\t\tt.Errorf(\"#%d: want a non-nil panic\", i)\n\t\t\t} else if got := panicErr.Error(); !strings.Contains(got, subStr) {\n\t\t\t\tt.Errorf(\"#%d:\\n\\tgotErr: %q\\nwant substring: %q\", i, got, subStr)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\tif tuple.wantErr {\n\t\t\tif err == nil {\n\t\t\t\tt.Errorf(\"#%d: got nil error\", i)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\tassert.Nil(t, panicErr, \"#%d: unexpected panic\", i)\n\t\tassert.Nil(t, err, \"#%d: expecting a non-nil error\", i)\n\t\tqua, ok := res.(*quad)\n\t\tif !ok || qua == nil {\n\t\t\tt.Errorf(\"#%d: got nil quad back; gotType=%T\", i, res)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "title": "" }, { "docid": "e012894486177c0fe6b999eae326e435", "score": "0.5709125", "text": "func (b *Block) Serialize() ([]byte, error) {\n\tvar buff bytes.Buffer\n\tencoder := gob.NewEncoder(&buff)\n\terr := encoder.Encode(b)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn buff.Bytes(), nil\n}", "title": "" }, { "docid": "358409cd147c178ec4ded6e626728a50", "score": "0.57088166", "text": "func (sc *Chain) StoreBlockSummaryFromBlock(ctx context.Context, b *block.Block) error {\n\tbs := b.GetSummary()\n\tbSummaryEntityMetadata := bs.GetEntityMetadata()\n\tbctx := ememorystore.WithEntityConnection(ctx, bSummaryEntityMetadata)\n\tdefer ememorystore.Close(bctx)\n\tif len(bs.Hash) < 64 {\n\t\tLogger.Error(\"Writing block summary - block hash less than 64\", zap.Any(\"hash\", bs.Hash))\n\t}\n\terr := bs.Write(bctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcon := ememorystore.GetEntityCon(bctx, bSummaryEntityMetadata)\n\terr = con.Commit()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "75699ec18f4cdb44ccdb26ded8ba490e", "score": "0.57076764", "text": "func generateBlock(oldBlock *Block, document Document) (Block, error) {\n\n\tvar newBlock Block\n\n\tt := time.Now()\n\n\tif oldBlock != nil {\n\t\tnewBlock.Index = oldBlock.Index + 1\n\t\tnewBlock.PrevHash = oldBlock.Hash\n\t\tnewBlock.PrevIPFSHash = oldBlock.IPFSHash\n\t}\n\tnewBlock.Timestamp = t.String()\n\tnewBlock.Document = document\n\tnewBlock.Hash = calculateHash(newBlock)\n\tdata, err := json.Marshal(newBlock)\n\tif err != nil {\n\t\treturn newBlock, err\n\t}\n\tr := bytes.NewReader(data)\n\tipfsHash, err := sh.Add(r)\n\tnewBlock.IPFSHash = ipfsHash\n\treturn newBlock, err\n}", "title": "" }, { "docid": "2d52bec7f19368ddfc731ab89e48278b", "score": "0.56917274", "text": "func (m *DB) AddBlock(bl *types.Block) error {\n\tm.blockMutex.Lock()\n\tdefer m.blockMutex.Unlock()\n\tif _, err := m.getBlockBytes(bl.ID()); err == nil {\n\t\tm.With().Warning(ErrAlreadyExist.Error(), bl.ID())\n\t\treturn ErrAlreadyExist\n\t}\n\tif err := m.writeBlock(bl); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e7fd2e73c83280055db09fc4cbca06c6", "score": "0.5658186", "text": "func (a *BlockAPIService) Block(\n\tctx _context.Context,\n\tblockRequest *types.BlockRequest,\n) (*types.BlockResponse, *types.Error, error) {\n\tvar (\n\t\tlocalVarPostBody interface{}\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/block\"\n\tlocalVarHeaderParams := make(map[string]string)\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = blockRequest\n\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarPostBody, localVarHeaderParams)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(ctx, r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn nil, nil, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tdefer localVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode != _nethttp.StatusOK {\n\t\tvar v types.Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\n\t\treturn nil, &v, fmt.Errorf(\"%+v\", v)\n\t}\n\n\tvar v types.BlockResponse\n\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn &v, nil, nil\n}", "title": "" }, { "docid": "dda47f11a8c1f792f1d08f2b45f619ec", "score": "0.5645537", "text": "func HandleBlock(block *fabcommon.Block) (*CustomBlock, error) {\n\tcustomBlock := &CustomBlock{}\n\n\t// get block hash\n\thash := hex.EncodeToString(block.Header.DataHash)\n\n\t// get hash of the previous block\n\tprevioushash := hex.EncodeToString(block.Header.PreviousHash)\n\n\trawdata := block.GetData()\n\tfor _, value := range rawdata.Data {\n\n\t\t// get validation code (0 is valid)\n\t\tprocessedtx := &peer.ProcessedTransaction{}\n\t\terr := proto.Unmarshal(value, processedtx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tvalidationCode := processedtx.GetValidationCode()\n\n\t\tenvelope, err := protoutil.GetEnvelopeFromBlock(value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// get ChannelHeader\n\t\tchannelHeader, err := protoutil.ChannelHeader(envelope)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// get timestamp\n\t\ttxtime, err := ptypes.Timestamp(channelHeader.Timestamp)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// get RW sets\n\t\taction, _ := protoutil.GetActionFromEnvelopeMsg(envelope)\n\t\tactionResults := action.GetResults()\n\n\t\tReadWriteSet := &rwset.TxReadWriteSet{}\n\n\t\terr = proto.Unmarshal(actionResults, ReadWriteSet)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\ttxRWSet, err := rwsetutil.TxRwSetFromProtoMsg(ReadWriteSet)\n\t\tif err != nil {\n\t\t\t//fmt.Printf(\"Failed to convert rwset.TxReadWriteSet to rwsetutil.TxRWSet: %s\", err)\n\t\t\treturn nil, err\n\t\t}\n\n\t\t//get tx id\n\t\tbytesEnvelope, err := protoutil.GetBytesEnvelope(envelope)\n\t\tif err != nil {\n\t\t\t//fmt.Printf(\"Can't convert common.Envelope to bytes: \", err)\n\t\t\treturn nil, err\n\t\t}\n\t\tTxId, err := protoutil.GetOrComputeTxIDFromEnvelope(bytesEnvelope)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif protoutil.IsConfigBlock(block) {\n\t\t\t// cast \"github.com/hyperledger/fabric-sdk-go/third_party/github.com/hyperledger/fabric/protos/common\".Block to\n\t\t\t// \"github.com/hyperledger/fabric/fabric-protos-go/common\".Block\n\t\t\tconfigEnvelope, blockType, err := ConfigEnvelopeFromBlock(block)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tvar writeSet []models.WriteKV\n\t\t\tswitch blockType {\n\t\t\tcase \"Config\":\n\t\t\t\tconfigPayload, err := protoutil.UnmarshalPayload(configEnvelope.Payload)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.Wrap(err, \"failed to unmarshal config payload\")\n\t\t\t\t}\n\n\t\t\t\tconfigEnv := &fabcommon.ConfigEnvelope{}\n\t\t\t\terr = proto.Unmarshal(configPayload.Data, configEnv)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.Wrap(err, \"failed to unmarshal config envelope\")\n\t\t\t\t}\n\n\t\t\t\tconfig := configEnv.GetConfig()\n\n\t\t\t\tconfigGroup := config.GetChannelGroup()\n\n\t\t\t\tgroups, err := json.Marshal(configGroup.Groups)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.Wrap(err, \"failed to marshal config groups\")\n\t\t\t\t}\n\n\t\t\t\tvalues, err := json.Marshal(configGroup.Values)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.Wrap(err, \"failed to marshal config values\")\n\t\t\t\t}\n\n\t\t\t\tpolicies, err := json.Marshal(configGroup.Policies)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.Wrap(err, \"failed to marshal config policies\")\n\t\t\t\t}\n\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"Type\", Value: base64.StdEncoding.EncodeToString([]byte(blockType))})\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"Sequence\", Value: base64.StdEncoding.EncodeToString([]byte(strconv.Itoa(int(config.GetSequence()))))})\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"Version\", Value: base64.StdEncoding.EncodeToString([]byte(strconv.Itoa(int(configGroup.Version))))})\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"Groups\", Value: base64.StdEncoding.EncodeToString(groups)})\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"Values\", Value: base64.StdEncoding.EncodeToString(values)})\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"Policies\", Value: base64.StdEncoding.EncodeToString(policies)})\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"ModPolicy\", Value: base64.StdEncoding.EncodeToString([]byte(configGroup.ModPolicy))})\n\n\t\t\t// get config update\n\t\t\tcase \"ConfigUpdate\":\n\t\t\t\tconfigUpdateEnvelope, err := protoutil.EnvelopeToConfigUpdate(configEnvelope)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.Wrap(err, \"failed read config update\")\n\t\t\t\t}\n\t\t\t\tconfigUpdateBytes := configUpdateEnvelope.GetConfigUpdate()\n\t\t\t\tvar configUpdate = &fabcommon.ConfigUpdate{}\n\t\t\t\terr = proto.Unmarshal(configUpdateBytes, configUpdate)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.Wrap(err, \"failed to unmarshal config update\")\n\t\t\t\t}\n\n\t\t\t\t// extract config update data\n\t\t\t\tReadSet, err := json.Marshal(configUpdate.GetReadSet())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tWriteSet, err := json.Marshal(configUpdate.GetWriteSet())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"ChannelId\", Value: base64.StdEncoding.EncodeToString([]byte(configUpdate.GetChannelId()))})\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"ReadSet\", Value: base64.StdEncoding.EncodeToString(ReadSet)})\n\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: \"WriteSet\", Value: base64.StdEncoding.EncodeToString(WriteSet)})\n\n\t\t\t}\n\n\t\t\tjsonPayload, err := json.Marshal(writeSet)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\ttx := db.Tx{\n\t\t\t\tchannelHeader.ChannelId,\n\t\t\t\tTxId,\n\t\t\t\thash,\n\t\t\t\tprevioushash,\n\t\t\t\tblock.Header.Number,\n\t\t\t\tjsonPayload,\n\t\t\t\tvalidationCode,\n\t\t\t\ttxtime.Unix(),\n\t\t\t}\n\t\t\tcustomBlock.Txs = append(customBlock.Txs, tx)\n\n\t\t}\n\t\tfor _, nsRwSet := range txRWSet.NsRwSets {\n\t\t\t// get only those txs that changes state\n\t\t\tif len(nsRwSet.KvRwSet.Writes) != 0 {\n\t\t\t\tvar writeSet []models.WriteKV\n\t\t\t\tfor _, write := range nsRwSet.KvRwSet.Writes {\n\t\t\t\t\twriteSet = append(writeSet, models.WriteKV{Key: write.Key, Value: base64.StdEncoding.EncodeToString(write.Value)})\n\t\t\t\t}\n\n\t\t\t\tjsonPayload, err := json.Marshal(writeSet)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\ttx := db.Tx{\n\t\t\t\t\tchannelHeader.ChannelId,\n\t\t\t\t\tTxId,\n\t\t\t\t\thash,\n\t\t\t\t\tprevioushash,\n\t\t\t\t\tblock.Header.Number,\n\t\t\t\t\tjsonPayload,\n\t\t\t\t\tvalidationCode,\n\t\t\t\t\ttxtime.Unix(),\n\t\t\t\t}\n\t\t\t\tcustomBlock.Txs = append(customBlock.Txs, tx)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn customBlock, nil\n}", "title": "" }, { "docid": "6c89d0882ed3f1af121a4dc47236673e", "score": "0.5644088", "text": "func (writer *BlockWriter) Write(data []byte) (written int, err error) {\n\twritten, err = writer.target.Write(data)\n\twriter.bytesWritten += uint32(written)\n\treturn\n}", "title": "" }, { "docid": "ac14848f2483aea227f29f4d58cb4be6", "score": "0.5643055", "text": "func (bd *OptimizedBlockDeviceWriter) Write(b []byte) (n int, err error) {\n\n\tframeSize := int64(len(b))\n\tpayloadBuf := make([]byte, frameSize)\n\n\t//\n\t// Read len(b) bytes from the block-device\n\t//\n\tn, err = io.ReadFull(bd.blockDevice, payloadBuf)\n\tif err != nil {\n\t\tlog.Errorf(\"Failed to read a full frame of size: %d from the block-device\", err)\n\t\treturn 0, err\n\t}\n\n\t//\n\t// Write the frame if it is dirty.\n\t//\n\tif !bytes.Equal(payloadBuf, b) {\n\t\t// In order to write, we need to seek back to\n\t\t// the start of the chunk.\n\t\tif _, err = bd.blockDevice.Seek(-int64(frameSize), io.SeekCurrent); err != nil {\n\t\t\tlog.Errorf(\"Failed to seek back to the start of the frame. Err: %v\", err)\n\t\t\treturn 0, err\n\t\t}\n\t\tbd.totalFrames += 1\n\t\tbd.dirtyFrames += 1\n\t\treturn bd.blockDevice.Write(b)\n\t}\n\n\t// No need to write a clean frame\n\tbd.totalFrames += 1\n\treturn n, err\n}", "title": "" }, { "docid": "cbe6c0094b5de65c856496d70155713f", "score": "0.5641448", "text": "func (b *PrestoThriftJson) AppendBlock(blocks []Column) {\n\tcount := b.Count()\n\tfor _, a := range blocks {\n\t\tcount += a.(*PrestoThriftJson).Count()\n\t}\n\n\tnulls := make([]bool, 0, count)\n\tsizes := make([]int32, 0, count)\n\tbytes := make([]byte, 0, count)\n\n\tb.Nulls = append(nulls, b.Nulls...)\n\tb.Sizes = append(sizes, b.Sizes...)\n\tb.Bytes = append(bytes, b.Bytes...)\n\n\tfor _, a := range blocks {\n\t\tblock := a.(*PrestoThriftJson)\n\t\tb.Nulls = append(b.Nulls, block.Nulls...)\n\t\tb.Sizes = append(b.Sizes, block.Sizes...)\n\t\tb.Bytes = append(b.Bytes, block.Bytes...)\n\t}\n}", "title": "" }, { "docid": "649dc36b4b6d42c225c22cb1d25081f1", "score": "0.56343794", "text": "func (a *aesCbcWriter) writeBlocks(plaintext []byte) (n int, err error) {\n\t// calculate the number of bytes we will write\n\t// (integer math rounds down, so this does what we want)\n\tbytes := (len(plaintext) / 16) * 16\n\n\t// encrypt\n\tciphertext := make([]byte, bytes)\n\ta.cbc.CryptBlocks(ciphertext, plaintext[:bytes])\n\n\t// write the encrypted data\n\treturn a.w.Write(ciphertext)\n}", "title": "" }, { "docid": "4d0f35d5f70ca3b4388ef9989f062f62", "score": "0.5629468", "text": "func (e *Engine) writeBlocks(bkt *bolt.Bucket, a [][]byte) error {\n\tvar block []byte\n\n\t// Dedupe points by key.\n\ta = DedupeEntries(a)\n\n\t// Group points into blocks by size.\n\ttmin, tmax := int64(math.MaxInt64), int64(math.MinInt64)\n\tfor i, p := range a {\n\t\t// Update block time range.\n\t\ttimestamp := int64(btou64(p[0:8]))\n\t\tif timestamp < tmin {\n\t\t\ttmin = timestamp\n\t\t}\n\t\tif timestamp > tmax {\n\t\t\ttmax = timestamp\n\t\t}\n\n\t\t// Append point to the end of the block.\n\t\tblock = append(block, p...)\n\n\t\t// If the block is larger than the target block size or this is the\n\t\t// last point then flush the block to the bucket.\n\t\tif len(block) >= e.BlockSize || i == len(a)-1 {\n\t\t\t// Encode block in the following format:\n\t\t\t// tmax int64\n\t\t\t// data []byte (snappy compressed)\n\t\t\tvalue := append(u64tob(uint64(tmax)), snappy.Encode(nil, block)...)\n\n\t\t\t// Write block to the bucket.\n\t\t\tif err := bkt.Put(u64tob(uint64(tmin)), value); err != nil {\n\t\t\t\treturn fmt.Errorf(\"put: ts=%d-%d, err=%s\", tmin, tmax, err)\n\t\t\t}\n\n\t\t\t// Reset the block & time range.\n\t\t\tblock = nil\n\t\t\ttmin, tmax = int64(math.MaxInt64), int64(math.MinInt64)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "441b5cf8ff97ab27efbb8f0ea632e402", "score": "0.5615864", "text": "func (chain *BlockChain) StoreBlockWithIndex(block *types.Block, db storage.Writer) error {\n\n\thash := block.BlockHash()\n\tdb.Put(BlockHashKey(block.Header.Height), hash[:])\n\treturn chain.StoreBlock(block)\n}", "title": "" }, { "docid": "4119601da4f6de5e4d932a43d6088cbb", "score": "0.56127375", "text": "func (p *PublicBlock) PutBlock() error {\n\ttransaction, err := json.Marshal(p)\n\tif err != nil {\n\t\treturn err\n\t}\n\turl := baseURL + \"/buckets/block/keys/\" + p.ID\n\tres, err := PutRequest(url, string(transaction))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif res.StatusCode != http.StatusNoContent {\n\t\treturn HTTPError(res)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "57e9d5423567c25ba824e5089a8b7bc7", "score": "0.56012225", "text": "func (b *Block) Serialize() ([]byte, error) {\n\tvar result bytes.Buffer\n\tencoder := gob.NewEncoder(&result)\n\tif err := encoder.Encode(b); err != nil {\n\t\treturn []byte{}, err\n\t} else {\n\t\treturn result.Bytes(), nil\n\t}\n}", "title": "" }, { "docid": "52c54c9e4adfac691ef2bd717374df1c", "score": "0.56006783", "text": "func StoreMagicBlock(ctx context.Context, magicBlock *block.MagicBlock) (\n\terr error) {\n\n\tvar (\n\t\tdata = block.NewMagicBlockData(magicBlock)\n\t\temd = data.GetEntityMetadata()\n\t\tdctx = ememorystore.WithEntityConnection(ctx, emd)\n\t)\n\tdefer ememorystore.Close(dctx)\n\n\tif err = data.Write(dctx); err != nil {\n\t\treturn\n\t}\n\n\tvar connection = ememorystore.GetEntityCon(dctx, emd)\n\treturn connection.Commit()\n}", "title": "" }, { "docid": "d5ecae461b68e02d5ed36a7b364903d5", "score": "0.5594384", "text": "func (l *Logger) AddBlockStream(\n\tctx context.Context,\n\tblock *types.Block,\n) error {\n\tif !l.logBlocks {\n\t\treturn nil\n\t}\n\n\tf, err := os.OpenFile(\n\t\tpath.Join(l.logDir, blockStreamFile),\n\t\tos.O_APPEND|os.O_CREATE|os.O_WRONLY,\n\t\tos.FileMode(utils.DefaultFilePermissions),\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer closeFile(f)\n\n\tblockString := fmt.Sprintf(\n\t\t\"%s Block %d:%s with Parent Block %d:%s\\n\",\n\t\taddEvent,\n\t\tblock.BlockIdentifier.Index,\n\t\tblock.BlockIdentifier.Hash,\n\t\tblock.ParentBlockIdentifier.Index,\n\t\tblock.ParentBlockIdentifier.Hash,\n\t)\n\tfmt.Print(blockString)\n\tif _, err := f.WriteString(blockString); err != nil {\n\t\treturn err\n\t}\n\n\treturn l.TransactionStream(ctx, block)\n}", "title": "" }, { "docid": "6083f4eec0cd8f8dce9f71a3998a292a", "score": "0.5592144", "text": "func (f *Flusher) AddBlock(block *schema.TlogBlock) error {\n\tf.mux.Lock()\n\tdefer f.mux.Unlock()\n\n\t// creates aggregation if needed\n\tif f.curAgg == nil {\n\t\tif err := f.initAggregation(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn f.curAgg.AddBlock(block)\n}", "title": "" }, { "docid": "d222b6b90b8149d9ec1299fa065e1c89", "score": "0.55802876", "text": "func (_Rollup *RollupTransactorSession) AddBlock(header []byte, rollup []byte, signature []byte) (*types.Transaction, error) {\n\treturn _Rollup.Contract.AddBlock(&_Rollup.TransactOpts, header, rollup, signature)\n}", "title": "" }, { "docid": "6815ff397c7cf6f69ba2df517c02f102", "score": "0.5570323", "text": "func (node *DRNode) SendBlock(args *SendBlockArgs, reply *SendBlockReply) error {\n\tDPrintf(\"%d received a block with hash %x\", node.me, args.SentBlock.BlockHash)\n\n\tnewBlock := &args.SentBlock\n\tprevHash := newBlock.PrevBlock\n\tcurHash := newBlock.BlockHash\n\tnode.mu.Lock()\n\tdefer node.mu.Unlock()\n\tnode.bcmu.Lock()\n\tdefer node.bcmu.Unlock()\n\n\tnode.Log = append(node.Log, *newBlock)\n\n\tsucc, err := ValidateBlock(newBlock)\n\tif !succ {\n\t\treturn err\n\t}\n\n\tif len(node.Blockchain) == 0 ||\n\t\tbytes.Compare(prevHash, node.Blockchain[len(node.Blockchain)-1].BlockHash) == 0 { // adds to main chain\n\t\tsucc, err := node.validateBlockTxs(newBlock)\n\t\tif !succ {\n\t\t\treturn err\n\t\t}\n\t\t// append to main chain\n\t\tnode.Blockchain = append(node.Blockchain, newBlock)\n\n\t\t// Finally, mark all the successful transactions as valid\n\t\tnode.utxoMu.Lock()\n\t\tfor _, tx := range newBlock.Transactions {\n\t\t\tif tx.Type != COINBASE {\n if val, ok := node.PendingTxs[string((&tx).Hash())]; ok {\n val.Status = SUCCESS\n }\n\t\t\t}\n\t\t\tnode.updateUtxoDb(tx)\n\t\t}\n\t\tnode.numPending -= (len(newBlock.Transactions) - 1)\n\t\tnode.utxoMu.Unlock()\n\t\treturn nil\n\t}\n\n\tvar curSideChain *SideChain\n\tstatus := 0 // 0 for orphan, 1 for new sidechain, 2 for expanding old sidechain/orphan chain\n\n\t// now we scan through our current blocks to see where our block falls\n\n\tfor index, block := range node.Blockchain {\n\t\tif bytes.Compare(curHash, block.BlockHash) == 0 { // we've seen this block before\n\t\t\treturn nil\n\t\t}\n\t\tif bytes.Compare(prevHash, block.BlockHash) == 0 { // this is the parent, create sidechain\n\t\t\tnewSideChain := SideChain{nil, index, newBlock, 0, make([]*SideChain, 0)}\n\t\t\tcurSideChain = &newSideChain\n\t\t\tstatus = 1\n\t\t}\n\t}\n\n\tfor _, sc := range node.SideChains {\n\t\tresult := sc.FindParent(prevHash, curHash)\n\t\tif result != nil {\n\t\t\tif result.ParentIndex == -2 {\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\tnewSideChain := SideChain{result, result.ParentIndex, newBlock, result.Depth + 1, make([]*SideChain, 0)}\n\t\t\tcurSideChain = &newSideChain\n\t\t\tresult.Children = append(result.Children, curSideChain)\n\t\t\tstatus = 2\n\t\t}\n\t}\n\n\tfor _, oc := range node.OrphanChains {\n\t\tresult := oc.FindParent(prevHash, curHash)\n\t\tif result != nil {\n\t\t\tif result.ParentIndex == -2 {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tnewSideChain := SideChain{result, result.ParentIndex, newBlock, result.Depth + 1, make([]*SideChain, 0)}\n\t\t\tcurSideChain = &newSideChain\n\t\t\tresult.Children = append(result.Children, curSideChain)\n\t\t\tstatus = 2\n\t\t}\n\t}\n\n\tif status == 0 { // did not find parent of any kind\n\t\tnewSideChain := SideChain{nil, -1, newBlock, 0, make([]*SideChain, 0)}\n\t\tcurSideChain = &newSideChain\n\t}\n\n\t// now we search to see if we can attach orphan chains to our current block\n\n\tdeleted := 0\n\tfor i := range node.OrphanChains {\n\t\tj := i - deleted\n\t\toc := node.OrphanChains[j]\n\t\tif bytes.Compare(curHash, oc.Block.PrevBlock) == 0 { // found a match\n\t\t\tcurSideChain.Children = append(curSideChain.Children, oc)\n\t\t\toc.Parent = curSideChain\n\t\t\toc.Recompute(curSideChain.Depth, curSideChain.ParentIndex)\n\n\t\t\t// magic trick to do deletion in O(1) time\n\t\t\tnode.OrphanChains[j] = node.OrphanChains[len(node.OrphanChains)-1]\n\t\t\tnode.OrphanChains = node.OrphanChains[0 : len(node.OrphanChains)-1]\n\t\t}\n\t}\n\n\t// now that the block has been added to the system, we need to see if the main chain\n\t// has changed. In particular, only curSideChain could have become the new main chain\n\n\tif curSideChain.ParentIndex == -1 { // if orphan chain, dont do anything\n\t\treturn nil\n\t}\n\n\tnewLength := curSideChain.Depth + curSideChain.ParentIndex + 2\n\tif newLength > len(node.Blockchain) { // breaks ties in favor of current mainchain so >\n\t\t// TODO: gotta validate the side chain before doing this\n\t\tbreakPoint := curSideChain.ParentIndex\n\t\tbreakMainChain := node.Blockchain[curSideChain.ParentIndex+1 : len(node.Blockchain)]\n\n\t\t// we construct sidechain nodes for the nodes originally on the mainchain\n\t\tnewSideNodes := make([]*SideChain, len(breakMainChain))\n\t\tfor i := range newSideNodes {\n\t\t\tnewSCNode := SideChain{nil, breakPoint, breakMainChain[i], i, make([]*SideChain, 0)}\n\t\t\tif i != 0 {\n\t\t\t\tnewSCNode.Parent = newSideNodes[i-1]\n\t\t\t\tnewSideNodes[i-1].Children = append(newSideNodes[i-1].Children, &newSCNode)\n\t\t\t}\n\t\t\tnewSideNodes[i] = &newSCNode\n\t\t}\n\n\t\t// we look for sidechains off the mainchain after breakpoint and adjust them\n\t\tfor i := range node.SideChains {\n\t\t\tj := i - deleted\n\t\t\tsc := node.SideChains[j]\n\t\t\tif sc.ParentIndex > breakPoint {\n\t\t\t\tnewParent := newSideNodes[sc.ParentIndex-breakPoint-1]\n\t\t\t\tsc.Parent = newParent\n\t\t\t\tsc.Recompute(newParent.Depth, breakPoint)\n\n\t\t\t\t// magic trick to do deletion in O(1) time\n\t\t\t\tnode.SideChains[j] = node.SideChains[len(node.SideChains)-1]\n\t\t\t\tnode.SideChains = node.SideChains[0 : len(node.SideChains)-1]\n\t\t\t}\n\t\t}\n\n\t\tnewMainChain := make([]*Block, curSideChain.Depth+1)\n\n\t\t// heal the mainchain and construct new sidechains in the process\n\t\tfor curSideChain.Parent != nil {\n\t\t\tdepth := curSideChain.Depth\n\t\t\tnewMainChain[depth] = curSideChain.Block\n\t\t\tparent := curSideChain.Parent\n\n\t\t\tfor _, sc := range parent.Children {\n\t\t\t\tif bytes.Compare(curSideChain.Block.BlockHash, sc.Block.BlockHash) == 0 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tsc.Parent = nil\n\t\t\t\tsc.Recompute(0, breakPoint+depth+1) // because depth = 0 corresponds to index breakpoint + 1\n\t\t\t\tnode.SideChains = append(node.SideChains, sc)\n\t\t\t}\n\t\t\tcurSideChain = curSideChain.Parent\n\t\t}\n\n\t\tnode.Blockchain = append(node.Blockchain, newMainChain...)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "eba3ae4d71099763f4f548a8af8f6fa2", "score": "0.5564323", "text": "func (historyDB *HistoryDB) Commit(block *common.Block) error {\n\n\tblockNo := block.Header.Number\n\t//Set the starting tranNo to 0\n\tvar tranNo uint64\n\n\tdbBatch := NewUpdateBatch()\n\n\tlogger.Debugf(\"Channel [%s]: Updating history database for blockNo [%v] with [%d] transactions\",\n\t\thistoryDB.chainName, blockNo, len(block.Data.Data))\n\n\t// Get the invalidation byte array for the block\n\ttxsFilter := util.TxValidationFlags(block.Metadata.Metadata[common.BlockMetadataIndex_TRANSACTIONS_FILTER])\n\n\tifHistoryRichQueryEnabled := ledgerconfig.IfHistoryRichQueryEnabled()\n\tlogger.Debugf(\"[Commit] ifHistoryRichQueryEnabled=%v\", ifHistoryRichQueryEnabled)\n\n\tvar indexFieldsMap map[string][]string\n\tif ifHistoryRichQueryEnabled && indexFieldsMap == nil {\n\t\tindexFieldsMap = make(map[string][]string)\n\t}\n\n\t// write each tran's write set to history db\n\tfor _, envBytes := range block.Data.Data {\n\n\t\t// If the tran is marked as invalid, skip it\n\t\tif txsFilter.IsInvalid(int(tranNo)) {\n\t\t\tlogger.Debugf(\"Channel [%s]: Skipping history write for invalid transaction number %d\",\n\t\t\t\thistoryDB.chainName, tranNo)\n\t\t\ttranNo++\n\t\t\tcontinue\n\t\t}\n\n\t\tenv, err := putils.GetEnvelopeFromBlock(envBytes)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tpayload, err := putils.GetPayload(env)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tchdr, err := putils.UnmarshalChannelHeader(payload.Header.ChannelHeader)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif common.HeaderType(chdr.Type) == common.HeaderType_ENDORSER_TRANSACTION {\n\n\t\t\t// extract actions from the envelope message\n\t\t\trespPayload, err := putils.GetActionFromEnvelope(envBytes)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t//preparation for extracting RWSet from transaction\n\t\t\ttxRWSet := &rwsetutil.TxRwSet{}\n\n\t\t\t// Get the Result from the Action and then Unmarshal\n\t\t\t// it into a TxReadWriteSet using custom unmarshalling\n\t\t\tif err = txRWSet.FromProtoBytes(respPayload.Results); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\t// for each transaction, loop through the namespaces and writesets\n\t\t\t// and add a history record for each write\n\t\t\tfor _, nsRWSet := range txRWSet.NsRwSets {\n\t\t\t\tns := nsRWSet.NameSpace\n\n\t\t\t\tfor _, kvWrite := range nsRWSet.KvRwSet.Writes {\n\t\t\t\t\twriteKey := kvWrite.Key\n\n\t\t\t\t\t// logger.Debugf(\"Commit func: ns=%s, key=%s, blockNo=%v, tranNo=%v, historyDbName=%s\\n\", ns, writeKey, blockNo, tranNo, historyDB.constructHistoryDbNameFromNs(ns))\n\t\t\t\t\tlogger.Debugf(\"[Commit] ns=%s, key=%s, isDel=%v, value=%v\", ns, writeKey, kvWrite.IsDelete, string(kvWrite.Value))\n\n\t\t\t\t\tjMap := make(jsonMap)\n\t\t\t\t\tjMap[idField] = fmt.Sprintf(\"%s~%020v\", writeKey, time.Now().UnixNano())\n\t\t\t\t\tjMap[blockNumField] = blockNo\n\t\t\t\t\tjMap[txNumField] = tranNo\n\n\t\t\t\t\tif ifHistoryRichQueryEnabled {\n\t\t\t\t\t\t// read index fields\n\t\t\t\t\t\thdbName := historyDB.constructHistoryDbNameFromNs(ns)\n\t\t\t\t\t\tcurIndexFields := indexFieldsMap[hdbName]\n\t\t\t\t\t\tif curIndexFields == nil {\n\t\t\t\t\t\t\thdb, err := historyDB.getDBHandleByDbName(hdbName)\n\t\t\t\t\t\t\tif err == nil {\n\t\t\t\t\t\t\t\tcurIndexFields, err = hdb.ListIndexFields()\n\t\t\t\t\t\t\t\tif err == nil {\n\t\t\t\t\t\t\t\t\tindexFieldsMap[hdbName] = curIndexFields\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// read keys from value bytes\n\t\t\t\t\t\tvar valueMap map[string]interface{}\n\t\t\t\t\t\terr := json.Unmarshal(kvWrite.Value, &valueMap)\n\t\t\t\t\t\tif err == nil {\n\t\t\t\t\t\t\tfor k, v := range valueMap {\n\t\t\t\t\t\t\t\tfor _, indexField := range curIndexFields {\n\t\t\t\t\t\t\t\t\tif k == indexField {\n\t\t\t\t\t\t\t\t\t\tjMap[k] = v\n\t\t\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tjsonBytes, _ := json.Marshal(jMap)\n\t\t\t\t\tdbBatch.Put(historyDB.constructHistoryDbNameFromNs(ns), jsonBytes)\n\t\t\t\t}\n\t\t\t}\n\n\t\t} else {\n\t\t\tlogger.Debugf(\"Skipping transaction [%d] since it is not an endorsement transaction\\n\", tranNo)\n\t\t}\n\t\ttranNo++\n\t}\n\n\t// add savepoint for recovery purpose\n\theight := version.NewHeight(blockNo, tranNo)\n\n\t// do batch update\n\terr := historyDB.applyUpdates(dbBatch, height)\n\n\tif err != nil {\n\t\tlogger.Errorf(\"Channel [%s], blockNo [%v], failed to applyUpdates: %s\\n\", historyDB.chainName, blockNo, err.Error())\n\t\treturn err\n\t}\n\n\tlogger.Debugf(\"[Commit] indexFieldsMap=%v\", indexFieldsMap)\n\tlogger.Debugf(\"Channel [%s]: Updates committed to history database for blockNo [%v]\", historyDB.chainName, blockNo)\n\treturn nil\n}", "title": "" }, { "docid": "e9355d9e16d76aa142ace682fc91be70", "score": "0.55531335", "text": "func (f Field) DrawBlock(b Block) {\n\tfor i := 0; i < len(b.Matrix); i++ {\n\t\tx := i%b.Width + b.X\n\t\ty := i/b.Width + b.Y\n\n\t\ttargeti := y*f.Width + x\n\n\t\tif b.Matrix[i] != 0 {\n\t\t\tf.Matrix[targeti] = b.Matrix[i]\n\t\t}\n\t}\n}", "title": "" }, { "docid": "dc35a67e08ebd492a8ce00534225d047", "score": "0.55495733", "text": "func (_Rollup *RollupSession) AddBlock(header []byte, rollup []byte, signature []byte) (*types.Transaction, error) {\n\treturn _Rollup.Contract.AddBlock(&_Rollup.TransactOpts, header, rollup, signature)\n}", "title": "" }, { "docid": "230fca2b0654a42ca7dfcf0f13dbe4ec", "score": "0.55477226", "text": "func (bc *Blockchain) AddBlock(block *Block) {\n\terr := bc.Db.Update(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(blocksBucket))\n\t\tblockInDb := b.Get(block.Hash.Bytes())\n\n\t\tif blockInDb != nil {\n\t\t\treturn errors.New(\"block exist!\")\n\t\t}\n\n\t\tblockData := block.Serialize()\n\t\terr := b.Put(block.Hash.Bytes(), blockData)\n\t\tif err != nil {\n\t\t\t//log.Panic(err)\n\t\t\treturn err\n\t\t}\n\n\t\tlastHash := b.Get([]byte(\"l\"))\n\t\tlastBlockData := b.Get(lastHash)\n\t\tlastBlock := DeserializeBlock(lastBlockData)\n\n\t\tif block.Height.Cmp(lastBlock.Height) > 0 {\n\t\t\terr = b.Put([]byte(\"l\"), block.Hash.Bytes())\n\t\t\tif err != nil {\n\t\t\t\t//log.Panic(err)\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tbc.tip = block.Hash\n\t\t}else{\n\t\t\tlog.Panic(\"block not valid!\")\n\t\t}\n\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n}", "title": "" }, { "docid": "19f349f12b5cf75fee83d7cae11506b0", "score": "0.55424565", "text": "func (b *Block) Serialize() []byte {\n\tbytes, _ := proto.Marshal(b)\n\treturn bytes\n}", "title": "" }, { "docid": "b30031ca6224467f14f0737228ac723b", "score": "0.55375445", "text": "func addBlockHeader(db *sql.DB, h wire.BlockHeader) {\n\tblockHash := h.BlockHash()\n\t_, err := db.Exec(\n\t\t\"INSERT INTO headers (blockHash, version, PervBlock, MerkleRoot, Timestamp, Bits, Nonce)\"+\n\t\t\t\"VALUES ($1, $2, $3, $4, $5, $6, $7) \", blockHash[:], h.Version, h.PrevBlock[:], h.MerkleRoot[:], h.Timestamp, h.Bits, h.Nonce)\n\tif err != nil {\n\t\t// Should be checked or something, left for debug\n\t\tlog.Print(err)\n\t}\n}", "title": "" }, { "docid": "3b6b7c1a6bce18f8916de00f084be20c", "score": "0.55371094", "text": "func storeBlockByTxs(block *protocol.Block) {\n\n\t// Agg\n\tfor _, txHash := range block.AggTxData {\n\t\tstorage.WriteBlockHashByTxHash(txHash, block.Hash)\n\t}\n\n\t// Funds\n\tfor _, txHash := range block.FundsTxData {\n\t\tstorage.WriteBlockHashByTxHash(txHash, block.Hash)\n\t}\n\n\t// Accounts\n\tfor _, txHash := range block.AccTxData {\n\t\tstorage.WriteBlockHashByTxHash(txHash, block.Hash)\n\t}\n\n\t// Config\n\tfor _, txHash := range block.ConfigTxData {\n\t\tstorage.WriteBlockHashByTxHash(txHash, block.Hash)\n\t}\n\n\t// Delete\n\tfor _, txHash := range block.UpdateTxData {\n\t\tstorage.WriteBlockHashByTxHash(txHash, block.Hash)\n\t}\n}", "title": "" }, { "docid": "a5b3a525aff81b9c285f745076f07e14", "score": "0.5534229", "text": "func (bh BlobHandler) WriteBytes( offset int64, bytesRead int, data []byte, blobURL *azblob.BlockBlobURL, uploadedBlockList []signatures.UploadedBlock) (*signatures.UploadedBlock, error ) {\n\n\tsig,err := signatures.GenerateBlockSig(data, offset, bytesRead, 0 )\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tblockID := base64.StdEncoding.EncodeToString(sig.MD5Signature[:])\n\n\tisDupe := checkIfDupe(uploadedBlockList, blockID)\n\n\tnewBlock := signatures.UploadedBlock{\n\t\tBlockID: blockID,\n\t\tOffset: offset,\n\t\tSig : *sig,\n\t\tSize : int64(bytesRead),\n\t\tIsNew: true,\n\t\tIsDuplicate : isDupe}\n\n\t// not a dupe, upload it.\n\tif !isDupe {\n\t\tctx := context.Background() // This example uses a never-expiring context\n\t\t_, err = blobURL.StageBlock(ctx, blockID, bytes.NewReader(data), azblob.LeaseAccessConditions{}, nil )\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn &newBlock, nil\n}", "title": "" }, { "docid": "0693a211ff1d1fa50937b258998fc4fb", "score": "0.55339646", "text": "func (chain *BlockChain) AddBlock(block *Block) {\n\terr := chain.Database.Update(func(txn *badger.Txn) error {\n\t\tif _, err := txn.Get(block.Hash); err != nil {\n\t\t\treturn nil // the block is already store\n\t\t}\n\n\t\tblockData := block.Serialize()\n\t\tif err := txn.Set(block.Hash, blockData); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\titem, err := txn.Get([]byte(\"lh\"))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tlastHash, _ := item.ValueCopy(nil)\n\n\t\titem, err = txn.Get(lastHash)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tlastBlockData, _ := item.ValueCopy(nil)\n\t\tlastBlock := Deserialize(lastBlockData)\n\n\t\tif block.Heigth > lastBlock.Heigth {\n\t\t\terr = txn.Set([]byte(\"lh\"), block.Hash)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tchain.LastHash = block.Hash\n\t\t}\n\n\t\treturn nil\n\t})\n\n\tCheckError(err)\n}", "title": "" }, { "docid": "099a9564cdc5c24db8a7a6bb3907ca11", "score": "0.55242765", "text": "func (b *Block) Serialize() []byte {\n\tvar result bytes.Buffer\n\tencoder := gob.NewEncoder(&result)\n\tif err := encoder.Encode(b); err != nil {\n\t\tlog.Fatal(\"serialize :\", err)\n\t}\n\treturn result.Bytes()\n}", "title": "" }, { "docid": "db6b72a84a91b318d5b68e786cc096ef", "score": "0.5518079", "text": "func (c *consensus) syncBlock(block *types.Block) bool {\n\tif block.Header == nil || block.Cert == nil || block.Data == nil {\n\t\treturn false\n\t}\n\tif block.Header.View <= c.commit.View {\n\t\treturn true\n\t}\n\tif !c.verifier.VerifyAggregated(block.Header.Hash(), block.Cert.Sig) {\n\t\treturn false\n\t}\n\tlog := c.log.With(\n\t\tzap.Uint64(\"block view\", block.Header.View),\n\t\tzap.Binary(\"block hash\", block.Header.Hash()),\n\t)\n\tlog.Debug(\"syncing block\")\n\n\tif err := c.store.SaveBlock(block); err != nil {\n\t\tlog.Fatal(\"can't save block\")\n\t}\n\n\tc.updatePrepare(block.Header, block.Cert)\n\tc.update(block.Header, block.Cert)\n\treturn true\n}", "title": "" }, { "docid": "d2d164e9130ee125f1de5486a0687279", "score": "0.5503791", "text": "func (b *Block) Serialize() []byte {\n\tvar result bytes.Buffer\n\tencoder := gob.NewEncoder(&result)\n\n\terr := encoder.Encode(b)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\treturn result.Bytes()\n}", "title": "" }, { "docid": "cd62806e01fc22620021311e88ac7305", "score": "0.5489776", "text": "func (block *Material) Write(w io.Writer) error {\n\n\terr := WriteDataBlockHeader(w, DataBlockHeader{\n\t\tType: typeMaterial,\n\t\tVersion: materialBlockVersion,\n\t\tSize: uint32(block.GetSize() - rexDataBlockHeaderSize),\n\t\tID: block.ID,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar data = []interface{}{\n\t\tfloat32(block.KaRgb.X()),\n\t\tfloat32(block.KaRgb.Y()),\n\t\tfloat32(block.KaRgb.Z()),\n\t\tuint64(block.KaTextureID),\n\n\t\tfloat32(block.KdRgb.X()),\n\t\tfloat32(block.KdRgb.Y()),\n\t\tfloat32(block.KdRgb.Z()),\n\t\tuint64(block.KdTextureID),\n\n\t\tfloat32(block.KsRgb.X()),\n\t\tfloat32(block.KsRgb.Y()),\n\t\tfloat32(block.KsRgb.Z()),\n\t\tuint64(block.KsTextureID),\n\n\t\tfloat32(block.Ns),\n\t\tfloat32(block.Alpha),\n\t}\n\tfor _, v := range data {\n\t\terr := binary.Write(w, binary.LittleEndian, v)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "21f7dd5e8b2c761def9c4213c117d64b", "score": "0.5470024", "text": "func (bc *BlockChain) AddBlock(block *Block) error {\n\tglg.Info(\"Core: Adding block to the blockchain - \" + hex.EncodeToString(block.GetHeader().GetHash()))\n\tif block.VerifyBlock() == false {\n\t\treturn ErrUnverifiedBlock\n\t}\n\terr := bc.getDB().Update(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(BlockBucket))\n\t\tinDb := b.Get(block.Header.GetHash())\n\t\tif inDb != nil {\n\t\t\tglg.Warn(\"Block exists in blockchain\")\n\t\t\treturn nil\n\t\t}\n\n\t\tblockinfo := BlockInfo{\n\t\t\tHeader: block.GetHeader(),\n\t\t\tHeight: block.GetHeight(),\n\t\t\tTotalJobs: uint(len(block.GetNodes())),\n\t\t\tFileName: block.fileStats().Name(),\n\t\t\tFileSize: block.fileStats().Size(),\n\t\t}\n\n\t\tif err := b.Put(block.GetHeader().GetHash(), blockinfo.Serialize()); err != nil {\n\t\t\tglg.Fatal(err)\n\t\t}\n\n\t\t//FIXME: handle a fork\n\t\tlatest, err := bc.GetBlockInfo(bc.getTip())\n\t\tif err != nil {\n\t\t\tglg.Fatal(err)\n\t\t}\n\t\tif block.GetHeight() > latest.GetHeight() {\n\t\t\tif err := b.Put([]byte(\"l\"), block.GetHeader().GetHash()); err != nil {\n\t\t\t\tglg.Fatal(err)\n\t\t\t}\n\t\t\tbc.setTip(block.GetHeader().GetHash())\n\t\t}\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\tglg.Fatal(err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "afb8e98b12ff9529542ea75f2e402e82", "score": "0.5468711", "text": "func (b *Block) serialize() []byte {\n\tvar blobBuf bytes.Buffer\n\tenc := gob.NewEncoder(&blobBuf)\n\n\tif err := enc.Encode(b); err != nil {\n\t\tpanic(fmt.Sprintf(\"Serialized block failed, error with:`%s`\\n\", err))\n\t}\n\n\treturn blobBuf.Bytes()\n}", "title": "" }, { "docid": "5871e8dbaa0ec590fd6e8d8f0e346b9f", "score": "0.5467157", "text": "func NewBlockWriter(logger log.Logger, dir string, blockSize int64) (*BlockWriter, error) {\n\tw := &BlockWriter{\n\t\tlogger: logger,\n\t\tdestinationDir: dir,\n\t\tblockSize: blockSize,\n\t}\n\tif err := w.initHead(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn w, nil\n}", "title": "" }, { "docid": "f0939bb3882e421429539b9578822d77", "score": "0.5459061", "text": "func (_Rollup *RollupTransactor) AddBlock(opts *bind.TransactOpts, header []byte, rollup []byte, signature []byte) (*types.Transaction, error) {\n\treturn _Rollup.contract.Transact(opts, \"addBlock\", header, rollup, signature)\n}", "title": "" } ]
1dab691f1311c4e656e1ecbf69c84ed8
String returns the string representation
[ { "docid": "7eaf3040fc72cf3491869ccc01a0efcf", "score": "0.0", "text": "func (s LabelingJobOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" } ]
[ { "docid": "ec007bbe3327b5255b8b1c9f1bd93d5e", "score": "0.7395052", "text": "func (r QueryResCheckProof) String() string {\n\tjson, _ := json.Marshal(r)\n\treturn string(json)\n}", "title": "" }, { "docid": "da4d8b897637d127f500cb4192484dc7", "score": "0.7215058", "text": "func (s CreateAlgorithmOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "cdd3975194547c61a108d58ed15e80f9", "score": "0.72000957", "text": "func (s CreateCanaryOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "c83a5bd12162ec99d9b1a53614b442ab", "score": "0.71656793", "text": "func (s CreateSipRuleOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "db8842fe62264dda9500171233b2527b", "score": "0.7136329", "text": "func (e *Output) String() string {\n\ts, _ := json.Marshal(e)\n\treturn string(s)\n}", "title": "" }, { "docid": "520165f795f98fe572fcdc518731680a", "score": "0.71093225", "text": "func (format Format) String() string {\n\tif b, err := format.MarshalText(); err == nil {\n\t\treturn string(b)\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "2d6c072e1bff1f4a686a5542f18a5fe9", "score": "0.7057528", "text": "func (c Clips) String() string {\n\tjc, _ := json.Marshal(c)\n\treturn string(jc)\n}", "title": "" }, { "docid": "1472055af0168dc370ab058189f85504", "score": "0.7056295", "text": "func (o *Analyticsproposedagent) String() string {\n \n \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "bf22b02f24978fee496d5e512b792759", "score": "0.70489895", "text": "func (s *storeImpl) String() string {\n\treturn s.root.print(0)\n}", "title": "" }, { "docid": "0137340e8f3253deab5174b9d62ec007", "score": "0.7044831", "text": "func (o *Trustrequestcreate) String() string {\n o.UserIds = []string{\"\"} \n o.GroupIds = []string{\"\"} \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "b9d2c9ac285fd4432653ddb865e3c664", "score": "0.7029217", "text": "func (r Recipes) String() string {\n\tjr, _ := json.Marshal(r)\n\treturn string(jr)\n}", "title": "" }, { "docid": "8832ec20e073665e1076cc5c9bd161ee", "score": "0.7028946", "text": "func (o *Digitalcondition) String() string {\n \n \n \n \n \n \n \n \n \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "08080e32f3d4e3b2796729e5983bc821", "score": "0.7022526", "text": "func (s Extra) String() string {\n\tres := make([]string, 6)\n\tres[0] = \"ID: \" + reform.Inspect(s.ID, true)\n\tres[1] = \"Name: \" + reform.Inspect(s.Name, true)\n\tres[2] = \"Bytes: \" + reform.Inspect(s.Bytes, true)\n\tres[3] = \"Bytes2: \" + reform.Inspect(s.Bytes2, true)\n\tres[4] = \"Byte: \" + reform.Inspect(s.Byte, true)\n\tres[5] = \"Array: \" + reform.Inspect(s.Array, true)\n\treturn strings.Join(res, \", \")\n}", "title": "" }, { "docid": "4227d23b7db534200fb84a55e9ba6e09", "score": "0.7020032", "text": "func (g GroupTour) String() string {\n\tjg, _ := json.Marshal(g)\n\treturn string(jg)\n}", "title": "" }, { "docid": "86db9e500a890f3e8a5a4e83baf135a0", "score": "0.70127267", "text": "func (s RestoreObjectOutput) String() string {\n\treturn helper.Prettify(s)\n}", "title": "" }, { "docid": "37b155138c139822d50b994c5a3331e5", "score": "0.7010184", "text": "func (s CreateHITTypeOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "fc77724b5b5fb6ad25a57c6f9f290bd7", "score": "0.7002574", "text": "func (s CreateHITOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "37ae39d3ccfcc677077bc3f72107897b", "score": "0.6996832", "text": "func (s CreateProjectVersionOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "f567f4d9cc26fe5a2b4c893f59399681", "score": "0.69932973", "text": "func (s OutputSerialization) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "f567f4d9cc26fe5a2b4c893f59399681", "score": "0.69932973", "text": "func (s OutputSerialization) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "427583d39f7eb5e3589cf3a93cec6205", "score": "0.6991622", "text": "func (s CreateEntityRecognizerOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "329138c2f7911a48ea9c7a5663fdc43c", "score": "0.69891053", "text": "func (r Record) String() string {\n\ts, _ := r.Marshal()\n\treturn string(s)\n}", "title": "" }, { "docid": "d8185db0b98df24641e3555ce5618619", "score": "0.6986732", "text": "func (s CodegenGenericDataNonModel) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "7a41282ddfe1fb25d6379908b4ee8cca", "score": "0.69831926", "text": "func (o *Schedulegenerationresult) String() string {\n \n \n \n o.Messages = []Schedulegenerationmessage{{}} \n o.MessageSeverities = []Schedulermessagetypeseverity{{}} \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "a494ef93c257a9c37a4cfcebd4b6998e", "score": "0.6980524", "text": "func (m Lyric) String() string {\n\treturn fmt.Sprintf(\"%T: %#v\", m, m.Text())\n}", "title": "" }, { "docid": "be7daba54fa9cf50b37c2372f8b9d03f", "score": "0.69791585", "text": "func (s CreateKxDatabaseOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "7e1dc3b76e617a56b057009fc425a44c", "score": "0.6978618", "text": "func (s CreateSolutionOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "63062bc65338c89109fabccd65405752", "score": "0.6975082", "text": "func (s Replay) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "edea32e6794e33a9ff01a58b4f666123", "score": "0.69603413", "text": "func (s CreateSolutionVersionOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "0db46e4d413d7ef3e68390fe7046ed48", "score": "0.6958592", "text": "func (s RedactChannelMessageOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "0e453919c23e79a82f7fa0e1ebf43961", "score": "0.6956877", "text": "func (stats *Stats) String() string {\n\tvar b bytes.Buffer\n\tstats.Print(&b)\n\treturn b.String()\n}", "title": "" }, { "docid": "6a8ddfaa6e1969adedabc1176b65432b", "score": "0.6956473", "text": "func (s CreateQualificationTypeOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "f83e33b3cb9505975437ce9769ed0e90", "score": "0.69549096", "text": "func String() string {\n\treturn Get()\n}", "title": "" }, { "docid": "0853bf682facb857bf899ef1cba0a457", "score": "0.6952188", "text": "func (t tInfo) String() string {\n\tf, w, i := t.getAll()\n\treturn fmt.Sprintf(\"%02x:%02x:%d\", f, w, i)\n}", "title": "" }, { "docid": "4f146ee06ae1d70287b1769c210796d7", "score": "0.69520515", "text": "func (o *Queueutilizationdiagnostic) String() string {\n\tj, _ := json.Marshal(o)\n\tstr, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n\treturn str\n}", "title": "" }, { "docid": "15554db724f9a32bda5b2d389ae7af67", "score": "0.695193", "text": "func (s *store) String() string {\n\treturn s.root.print(0)\n}", "title": "" }, { "docid": "4dec7e7982a96fd50bc9d28f68259673", "score": "0.69501793", "text": "func (s StartReplayOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "4053670e5622ba1dfd39768b0bc15524", "score": "0.69486177", "text": "func (b *Binary) String() string {\n\tif b == nil {\n\t\treturn \"<nil>\"\n\t}\n\treturn fmt.Sprintf(\"&Binary{Subtype:%d, Data:%#v}\", b.Subtype, b.Data)\n}", "title": "" }, { "docid": "5414906c190f5389f44e1f64fc727b0b", "score": "0.69464415", "text": "func (o *Worktypecreate) String() string {\n\tj, _ := json.Marshal(o)\n\tstr, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n\treturn str\n}", "title": "" }, { "docid": "62139d383a9352c35a1017272ea215eb", "score": "0.69460964", "text": "func (s CreateRuleOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "db558f20eaf3407b71a07c20cf1c7e8d", "score": "0.69424075", "text": "func String(encodable Encodable) (string, error) {\n\tencd := bytes.Buffer{}\n\tif err := encodable.Encode(&encd); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn encd.String(), nil\n}", "title": "" }, { "docid": "53da424ddfd66edc586c234353fdb946", "score": "0.6939724", "text": "func (s CreateStackOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "53da424ddfd66edc586c234353fdb946", "score": "0.6939724", "text": "func (s CreateStackOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "2407e9d59a60df678cefb91d039039af", "score": "0.693922", "text": "func (s GetSipRuleOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "8d551f9c9c6e8fdd42a4525062a430ec", "score": "0.69369525", "text": "func (s TranslationIsoFormats) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "a258768e1ec89dce6e5ff4b9c4665e8e", "score": "0.6935911", "text": "func (s DescribeFeatureTransformationOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "0a71ab8f05506651cfd85d531ae631bb", "score": "0.69312066", "text": "func (s CreateThesaurusOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "6cb9e63dcbb92f288fc3d2fbf5b2d1b7", "score": "0.69294906", "text": "func (o *Reportingturnaction) String() string {\n \n \n \n \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "42012657dfcf41498b818ec3722ef7b7", "score": "0.69287163", "text": "func (s CreateRateBasedRuleOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "adef145cc716637838723b8ad7331b27", "score": "0.6928428", "text": "func (s StartCanaryOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "adef145cc716637838723b8ad7331b27", "score": "0.6928428", "text": "func (s StartCanaryOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "3b3ea8085c21d11a318ee263475a0261", "score": "0.69257265", "text": "func (s CreateFlywheelOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "7cecd17575cadd63d9bd0cca9fd43a3f", "score": "0.69235957", "text": "func (p Problem) String() string {\n\tb, _ := json.Marshal(p)\n\treturn string(b)\n}", "title": "" }, { "docid": "c888b42fa516bb64f8356548203c1e07", "score": "0.69232535", "text": "func (s DefineExpressionOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "77be63b622be0c28ffdf52d02bb3eeb2", "score": "0.6921127", "text": "func (a ACOS) String() string {\n\tja, _ := json.Marshal(a)\n\treturn string(ja)\n}", "title": "" }, { "docid": "af16d1bd8d4fe3262c53ef582e925394", "score": "0.69204015", "text": "func (o *Testmoderesults) String() string {\n\tj, _ := json.Marshal(o)\n\tstr, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n\treturn str\n}", "title": "" }, { "docid": "0a4951aca5536cdd7f3b0e6fd63268bf", "score": "0.69188565", "text": "func (n *node) String() string {\n\treturn fmt.Sprintf(\"{meta:%v, fixed:%t, minSatisfyingVersion:%v}\", n.meta, n.fixed, n.minSatisfyingVersion)\n}", "title": "" }, { "docid": "eb0758a670111889abf2b40069751575", "score": "0.6915167", "text": "func (r Recipe) String() string {\n\tjr, _ := json.Marshal(r)\n\treturn string(jr)\n}", "title": "" }, { "docid": "8185a525fe70c2d10b5e52328cae688b", "score": "0.69141936", "text": "func (r Registration) String() string {\n\tjr, _ := json.Marshal(r)\n\treturn string(jr)\n}", "title": "" }, { "docid": "ca0c58daaf0d87b2eedeef4d2b21efff", "score": "0.6913508", "text": "func (o *Learningassessment) String() string {\n\tj, _ := json.Marshal(o)\n\tstr, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n\treturn str\n}", "title": "" }, { "docid": "cc078a3efd8f4ab892ce29e735af0651", "score": "0.69133043", "text": "func (m *T3) String() string {\n x := &m.matrix\n return fmt.Sprintf(\"%f %f %f\\n\"+\n \"%f %f %f\\n\"+\n \"%f %f %f\\n\"+\n \"%f %f %f\\n\",\n x[0], x[1], x[2],\n x[3], x[4], x[5],\n x[6], x[7], x[8],\n x[9], x[10], x[11])\n}", "title": "" }, { "docid": "d4d664d42c1264e369743ce67c40523c", "score": "0.69123167", "text": "func (r QueryResCheckRequests) String() string {\n\tjson, _ := json.Marshal(r)\n\treturn string(json)\n}", "title": "" }, { "docid": "491ec12d0e658fbc1c5d00228f7966d3", "score": "0.69123024", "text": "func (s RebuildEnvironmentOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "ee1c82439f428593804e7d41e67a88bb", "score": "0.6911058", "text": "func (s DeleteAlgorithmOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "f3d17e2063beb77383a36ea3b5385691", "score": "0.69106245", "text": "func (s CanaryCodeOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "f3d17e2063beb77383a36ea3b5385691", "score": "0.69106245", "text": "func (s CanaryCodeOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "2a7047abe98f55cdae1f8f9737cdce8d", "score": "0.6910465", "text": "func (s GetRawMessageContentOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "ad47f192d74a333770b858cab44b6f33", "score": "0.6909528", "text": "func (i *Info) String() string {\n\tb, _ := json.Marshal(i)\n\treturn string(b)\n}", "title": "" }, { "docid": "a5f154353fbe365afb8ad8ced471efce", "score": "0.690952", "text": "func (a Answers) String() string {\n\tja, _ := json.Marshal(a)\n\treturn string(ja)\n}", "title": "" }, { "docid": "7d8fb6be9b020d11465e9adcc8ce4d23", "score": "0.69052833", "text": "func (r *Resource) String() string {\n\tbs, err := r.MarshalJSON()\n\tif err != nil {\n\t\treturn \"<\" + err.Error() + \">\"\n\t}\n\treturn strings.TrimSpace(string(bs)) + r.options.String()\n}", "title": "" }, { "docid": "79249e6cc9b94d1e1cdf4599277a1612", "score": "0.6903937", "text": "func (o *Provisioninfo) String() string {\n \n \n \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "42cb2a1938d71b576d0f59f207efe6c5", "score": "0.6903488", "text": "func (id Identification) String() string {\n\tstr, err := json.MarshalIndent(id, \"\", \" \")\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\treturn fmt.Sprintf(\"%s\", str)\n}", "title": "" }, { "docid": "78ba1519c3923aa3f09f81c2569e4238", "score": "0.6900515", "text": "func (s CancelReplayOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "796dddc637d58c65df620d369b284781", "score": "0.69000846", "text": "func (c Clip) String() string {\n\tjc, _ := json.Marshal(c)\n\treturn string(jc)\n}", "title": "" }, { "docid": "5d0ef855212dd74d9c8f6bdec1114922", "score": "0.6895479", "text": "func (o *Limitchangerequestdetails) String() string {\n\tj, _ := json.Marshal(o)\n\tstr, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n\treturn str\n}", "title": "" }, { "docid": "e1ccf1817f41db110a0412aa9173cc4b", "score": "0.68944854", "text": "func (s CreateDetectorOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "e1ccf1817f41db110a0412aa9173cc4b", "score": "0.68944854", "text": "func (s CreateDetectorOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "c1e61dcc3f2c005835b59fad0a71e8da", "score": "0.6894107", "text": "func (s CreateResolverRuleOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "c7b28830138ec71f64ffc63011643b2e", "score": "0.6892943", "text": "func (s ReloadTablesOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "3cc64037e831dc747b21f738c72394ea", "score": "0.6892793", "text": "func (a Articles) String() string {\n\tja, _ := json.Marshal(a)\n\treturn string(ja)\n}", "title": "" }, { "docid": "ccb9cfe7096e9780aacc00ad49b755db", "score": "0.68885386", "text": "func (self *cat) String() string {\n\ts, _ := json.Marshal(self)\n\tvar pretty bytes.Buffer\n\tjson.Indent(&pretty, s, \"\", \" \")\n\treturn string(pretty.Bytes())\n}", "title": "" }, { "docid": "f93d8a32a6f01770a9e1a9237eee3537", "score": "0.6888019", "text": "func (s StartCelebrityRecognitionOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "3aad9005760fdaf03c048b7b9e45b6f4", "score": "0.68866765", "text": "func (s CreateFlowOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "50c9158ef84c35ab98485c1222f83efc", "score": "0.6886227", "text": "func (o *Workplanconstraintmessage) String() string {\n \n o.Arguments = []Workplanvalidationmessageargument{{}} \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "99464edfeb75b0e760e5aad8006d8d25", "score": "0.6885683", "text": "func (o *Outcomepercentilecondition) String() string {\n \n \n \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "c50750ae8ca4ac8cad54efbe03fcd467", "score": "0.68851185", "text": "func (s PathToObjectIdentifiers) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "f4d5ca267242dbb34de825e8f61f6af3", "score": "0.6883426", "text": "func (s CreateBGPPeerOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "7fe286cc2dc72e22ac80cd923dc22541", "score": "0.6881843", "text": "func (s IDOnly) String() string {\n\tres := make([]string, 1)\n\tres[0] = \"ID: \" + reform.Inspect(s.ID, true)\n\treturn strings.Join(res, \", \")\n}", "title": "" }, { "docid": "7fe286cc2dc72e22ac80cd923dc22541", "score": "0.6881843", "text": "func (s IDOnly) String() string {\n\tres := make([]string, 1)\n\tres[0] = \"ID: \" + reform.Inspect(s.ID, true)\n\treturn strings.Join(res, \", \")\n}", "title": "" }, { "docid": "da52b4d08629a4054eacbc7dec399415", "score": "0.68807954", "text": "func (o *Createsecuresession) String() string {\n \n \n \n \n\n j, _ := json.Marshal(o)\n str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n return str\n}", "title": "" }, { "docid": "99e04df764f43b854ca8bc8c19b313df", "score": "0.6880362", "text": "func (s *Session) String() string {\n\treturn fmt.Sprintf(s.GetStringRepresentation())\n}", "title": "" }, { "docid": "0ff975a5d54856496d1fea213a63ed2a", "score": "0.6879339", "text": "func (s DescribeCanariesOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "0ff975a5d54856496d1fea213a63ed2a", "score": "0.6879339", "text": "func (s DescribeCanariesOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "18969c4dd06de4391c90b6cef495cc71", "score": "0.6877248", "text": "func (o *Evaluationquestiongroupscore) String() string {\n\tj, _ := json.Marshal(o)\n\tstr, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\\\u`, `\\u`, -1))\n\n\treturn str\n}", "title": "" }, { "docid": "328d864c8b9c74ecafe47e27d4ad016f", "score": "0.68766147", "text": "func (s CreateModelOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "04b2192c69c22d2929f11f3d49c7c8c5", "score": "0.6875457", "text": "func (s StartFlywheelIterationOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "cd1dd0bbea4ed6554dfbf9632ce99a1c", "score": "0.6874318", "text": "func (r Result) String() string {\n\tjr, _ := json.Marshal(r)\n\treturn string(jr)\n}", "title": "" }, { "docid": "cd1dd0bbea4ed6554dfbf9632ce99a1c", "score": "0.6874318", "text": "func (r Result) String() string {\n\tjr, _ := json.Marshal(r)\n\treturn string(jr)\n}", "title": "" }, { "docid": "38167540dad3daaaa2851aa7c2116d55", "score": "0.6871602", "text": "func (s CreateCodeRepositoryOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "daf9c1a4f5be9be430167b4424f99958", "score": "0.6871366", "text": "func (s CreateModelVersionOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "0ee975d7279a386d0e5229d511086f00", "score": "0.6870403", "text": "func (s GetDatabaseOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" } ]
c4e9c4c0571843de654b20f1ee741cbf
GridDist returns a matrix which contains Euclidean distances between SOM units
[ { "docid": "91fb2bb3fdac01a8d1b02bbceca1176f", "score": "0.74860525", "text": "func (m Map) GridDist() *mat64.Dense {\n\treturn m.gridDist\n}", "title": "" } ]
[ { "docid": "09bc6498884c925cfcb46bfce844e348", "score": "0.702323", "text": "func (destination Point) gridDist() int { // `destination` is only a name\n\treturn destination.X + destination.Y\n}", "title": "" }, { "docid": "f8670d613f5f9e33b6a9469a086885ac", "score": "0.5762495", "text": "func (ug UniformGrid) GridToCoord(in []int) []float64 {\n\ttheta := math.Pi * (float64(in[0]) + 0.5) / float64(ug.MTheta)\n\tmPhi := math.Round(2.0 * math.Pi * math.Sin(theta) / ug.DPhi)\n\tphi := 2 * math.Pi * float64(in[1]) / mPhi\n\treturn []float64{(phi / math.Pi) * 180.0, (theta/math.Pi)*180.0 - 90.0}\n}", "title": "" }, { "docid": "6f1fe3cbde3c821af98ea5ee17208bee", "score": "0.55676645", "text": "func (ug UniformGrid) CoordToGrid(lng, lat float64) []int {\n\ttheta := (lat + 90.0) * math.Pi / 180.0\n\tm := math.Round((theta * ug.MTheta / math.Pi) - 0.5)\n\ttheta = math.Pi * (float64(m) + 0.5) / float64(ug.MTheta)\n\tvar phi float64\n\tif lng < 0 {\n\t\tphi = float64(lng+360.0) * math.Pi / 180.0\n\t} else {\n\t\tphi = lng * math.Pi / 180.0\n\t}\n\tmPhi := math.Round(2.0 * math.Pi * math.Sin(theta) / ug.DPhi)\n\tn := math.Round(phi * mPhi / (2.0 * math.Pi))\n\treturn []int{mod(int(m), int(ug.MTheta)), mod(int(n), int(mPhi))}\n}", "title": "" }, { "docid": "e0359d63894e4cee8b195b4867b4f206", "score": "0.5518122", "text": "func euclDist(X, Y, Z []float64, n int) mat.Matrix {\n //Square elements\n X1 := squareElem(X, n)\n Y1 := squareElem(Y, n)\n Z1 := squareElem(Z, n)\n\n D1 := make([]float64, 0)\n for i:=0; i<n; i++ {\n D1 = append(D1, X1[i]+Y1[i]+Z1[i])\n }\n\n temp_D1 := D1\n for i:=0; i<(n-1); i++ {\n D1 = append(D1,temp_D1...)\n }\n\n\n D1_Mat := mat.NewDense(n,n,D1)\n D1_Mat.Add(D1_Mat,D1_Mat.T())\n\n D2 := make([]float64, 0)\n D2 = append(D2, X...)\n D2 = append(D2, Y...)\n D2 = append(D2, Z...)\n\n D3 := make([]float64, 0)\n D3 = append(D3, X...)\n D3 = append(D3, Y...)\n D3 = append(D3, Z...)\n \n D2_Mat := mat.NewDense(3,n,D2)\n D2_Scaled := mat.NewDense(3,n,D3)\n D2_Scaled.Scale(2, D2_Scaled)\n\n D := mat.NewDense(n, n, nil)\n D.Product(D2_Mat.T(),D2_Scaled)\n\n D.Sub(D1_Mat, D)\n D.Apply(squareRoot, D)\n\n return D\n}", "title": "" }, { "docid": "483ba74b0e721ce7abb23ae6273f11ba", "score": "0.54598963", "text": "func Grid2D(samples []Vec3, filter filters.GridFilter) (Function2D, error) {\n\tstride, offs, max, vals, err := makeGrid2d(samples)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn func(pos Vec2) (v float64, err error) {\n\t\tvar rPos [2]float64\n\t\tfor i, p := range pos {\n\t\t\tif p < offs[i] || p > max[i] {\n\t\t\t\terr = ErrBadCoord\n\t\t\t\treturn\n\t\t\t}\n\t\t\trPos[i] = (p - offs[i]) / stride[i]\n\t\t}\n\t\tv = interp2d(vals, Vec2{rPos[0], rPos[1]}, filter)\n\t\tif math.IsNaN(v) {\n\t\t\tv = 0\n\t\t\terr = ErrBadCoord\n\t\t}\n\t\treturn\n\t}, nil\n}", "title": "" }, { "docid": "227d4f4a67233aea61b52c06a89fbd31", "score": "0.5178979", "text": "func Grid2DPositions(min, stride, max Vec2) <-chan Vec2 {\n\tpositions := make(chan Vec2, runtime.GOMAXPROCS(0)*2)\n\tgo func() {\n\t\tfor x := min[0]; x < max[0]; x += stride[0] {\n\t\t\tfor y := min[1]; y < max[1]; y += stride[1] {\n\t\t\t\tpositions <- Vec2{x, y}\n\t\t\t}\n\t\t}\n\t\tclose(positions)\n\t}()\n\treturn positions\n}", "title": "" }, { "docid": "a8509861588254408ace6dbb0515e5a1", "score": "0.5160864", "text": "func GenerateGrid(rows, cols int, diffc float64) Grid {\n\tcells := rows * cols\n\tbombs := int(math.Ceil(float64(cells) * diffc))\n\tm := newGrid(rows, cols)\n\tvar offset int\n\tfor bombs > 0 {\n\t\toffset += rand.Intn(rand.Intn(cells-offset-bombs) + 1)\n\t\ti := offset / cols\n\t\tj := offset % cols\n\t\tm[i][j] = Bomb\n\t\tbombs--\n\t\toffset++\n\t}\n\treturn m\n}", "title": "" }, { "docid": "d82aaf17903a0664096ec499544adc6c", "score": "0.51538926", "text": "func (d Distances) Cells() []*Cell {\n\tkeys := make([]*Cell, len(d.cells))\n\n\ti := 0\n\tfor key := range d.cells {\n\t\tkeys[i] = key\n\t\ti++\n\t}\n\n\treturn keys\n}", "title": "" }, { "docid": "55ce88070a6d7575b384c38573188fcd", "score": "0.512576", "text": "func Grid(m *Machine) {\n\terrorMsg := \"Invalid arguments. Usage: 'grid [int]' or 'grid [cols] x [rows]'\"\n\n\tcols := m.ArgAsString(0)\n\trows := cols\n\tif m.ArgCount() == 3 {\n\t\tm.Assert(m.ArgAsString(1) == \"x\", errorMsg)\n\t\trows = m.ArgAsString(2)\n\t}\n\n\tif _, err := strconv.Atoi(cols); err != nil {\n\t\tm.Throw(errorMsg)\n\t}\n\tif _, err := strconv.Atoi(rows); err != nil {\n\t\tm.Throw(errorMsg)\n\t}\n\n\tm.Vars[\".grid.cols\"] = cols\n\tm.Vars[\".grid.rows\"] = rows\n}", "title": "" }, { "docid": "a4ee8493d8232c6fd8ffd5766dd8f158", "score": "0.5107406", "text": "func (c Coords) Dist(c2 Coords) float64 {\n\tdelta := getDelta(c, c2)\n\tdLat := toRad(delta.Lat)\n\tdLon := toRad(delta.Lon)\n\ta := math.Pow(math.Sin(dLat/2), 2) + math.Cos(toRad(c.Lat))*math.Cos(toRad(c2.Lat))*math.Pow(math.Sin(dLon/2), 2)\n\treturn 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a)) * 6372.8 // Earth's radius in km\n}", "title": "" }, { "docid": "3fdd816479625cfd4feb6efbb25508db", "score": "0.4998547", "text": "func initGrid(cols, rows int) []*cell {\n\tgrid := []*cell{}\n\tfor j := 0; j < rows; j++ {\n\t\tfor i := 0; i < cols; i++ {\n\t\t\tnewCell := newCell(i, j)\n\t\t\tgrid = append(grid, newCell)\n\t\t}\n\t}\n\treturn grid\n}", "title": "" }, { "docid": "9dc0e497b6eed2e262b76650676cdab2", "score": "0.49870178", "text": "func Grid(w, h float64, nx, ny int, r float64) *Path {\n\tif nx < 1 || ny < 1 || w <= float64(nx+1)*r || h <= float64(ny+1)*r {\n\t\treturn &Path{}\n\t}\n\n\tp := Rectangle(w, h)\n\tdx, dy := (w-float64(nx+1)*r)/float64(nx), (h-float64(ny+1)*r)/float64(ny)\n\tcell := Rectangle(dx, dy).Reverse()\n\tfor j := 0; j < ny; j++ {\n\t\tfor i := 0; i < nx; i++ {\n\t\t\tx := r + float64(i)*(r+dx)\n\t\t\ty := r + float64(j)*(r+dy)\n\t\t\tp = p.Append(cell.Translate(x, y))\n\t\t}\n\t}\n\treturn p\n}", "title": "" }, { "docid": "cd4389de97d348b95f79e3c4aad548c6", "score": "0.49864075", "text": "func dist(x, y float64) float64 {\n\tx2 := math.Pow(float64(x), 2)\n\ty2 := math.Pow(float64(y), 2)\n\treturn math.Sqrt(x2 + y2)\n}", "title": "" }, { "docid": "45393c7683cea4c2e2cb642e996070e9", "score": "0.49499568", "text": "func EuclDist(x1, y1, x2, y2 int) float64 {\n\treturn math.Sqrt(math.Pow(float64(x1-x2), 2) + math.Pow(float64(y1-y2), 2))\n}", "title": "" }, { "docid": "06c7fd137d518d496bea1745c3db4eab", "score": "0.49352852", "text": "func EuclideanDistance(fromLat, fromLng, toLat, toLng float64) float64 {\n\tfromLocation := geoutils.NewLocation(fromLat, fromLng)\n\ttoLocation := geoutils.NewLocation(toLat, toLng)\n\treturn fromLocation.EuclideanDistance(toLocation)\n}", "title": "" }, { "docid": "0945c4b5e54d9330f4382d4361dc3da5", "score": "0.49271682", "text": "func GenerateGrid(setup Grid, plotCoords chan<- Coordinate) {\n\tdefer close(plotCoords)\n\n\tcellInt := int(setup.Cells)\n\tcellWidth := setup.Width / setup.Cells\n\n\tfor y := 0; y < cellInt; y++ {\n\t\tyf := float64(y)\n\n\t\tif y%2 == 0 {\n\t\t\tplotCoords <- Coordinate{X: setup.Cells * cellWidth, Y: yf * cellWidth}\n\t\t\tplotCoords <- Coordinate{X: setup.Cells * cellWidth, Y: (yf + 1) * cellWidth}\n\t\t} else {\n\t\t\tplotCoords <- Coordinate{X: 0, Y: yf * cellWidth}\n\t\t\tplotCoords <- Coordinate{X: 0, Y: (yf + 1) * cellWidth}\n\t\t}\n\t}\n\n\tplotCoords <- Coordinate{X: 0, Y: setup.Cells * cellWidth}\n\n\tfor x := 0; x < cellInt; x++ {\n\t\txf := float64(x)\n\n\t\tif x%2 == 0 {\n\t\t\tplotCoords <- Coordinate{X: xf * cellWidth, Y: 0}\n\t\t\tplotCoords <- Coordinate{X: (xf + 1) * cellWidth, Y: 0}\n\t\t} else {\n\t\t\tplotCoords <- Coordinate{X: xf * cellWidth, Y: setup.Cells * cellWidth}\n\t\t\tplotCoords <- Coordinate{X: (xf + 1) * cellWidth, Y: setup.Cells * cellWidth}\n\t\t}\n\t}\n\n\tif cellInt%2 == 0 {\n\t\tplotCoords <- Coordinate{X: setup.Cells * cellWidth, Y: 0}\n\t}\n\tplotCoords <- Coordinate{X: 0, Y: 0}\n}", "title": "" }, { "docid": "bb68f622a7ed07ed39a845431d016148", "score": "0.49249873", "text": "func distanciaJogadores(p,q GridPosition) GridPosition{\n\tx := abs(p.x - q.x)\n\ty := abs(p.y - q.y)\n\treturn GridPosition{x,y}\n}", "title": "" }, { "docid": "3b17eae4a0893da7d807cdb9b69e0f92", "score": "0.48929355", "text": "func dijkstra(grid [][][]gridsquare, start coord, end coord) (int) {\n\t\n\tunvisited := make(map[int]bool)\n\tgridwidth := len(grid)\n\t//gridheight := len(grid[0])\n\tnodegrid := make([][][]dijkstranode,len(grid))\n\tfor i:= 0; i < len(grid); i++ {\n\t\tnodegrid[i] = make([][]dijkstranode,len(grid[i]))\n\t\tfor j:=0; j<len(grid[i]); j++ {\n\t\t\tnodegrid[i][j] = make([]dijkstranode, 3)\n\t\t\tfor k:=0; k<3; k++ {\n\t\t\t\tnodegrid[i][j][k].dist = -1 // infinity\n\t\t\t\tnodegrid[i][j][k].x = i\n\t\t\t\tnodegrid[i][j][k].y = j\n\t\t\t\tnodegrid[i][j][k].tool = k\n\t\t\t\tif !isObstacle(grid,i,j,k) {\n\t\t\t\t\tunvisited[((j*gridwidth) + i)*3 + k] = true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t\n\tvar currnode coord\n\tnodegrid[start.x][start.y][start.tool].dist = 0\n\tcurrnode = start\n\tsteps := 0\n\tfor {\n\t\tcurrdist := nodegrid[currnode.x][currnode.y][currnode.tool].dist\n\t\tnextdist := currdist+1\n\t\t\n\t\tfor i := 0; i < 6; i++ {\n\t\t\tvar n coord\n\t\t\tif i == 0 { // north\n\t\t\t\tn = coord{x:currnode.x, y:currnode.y-1, tool:currnode.tool}\n\t\t\t} else if i == 1 { // south\n\t\t\t\t\tn = coord{x:currnode.x, y:currnode.y+1, tool:currnode.tool}\n\t\t\t} else if i == 2 { // east\n\t\t\t\t\tn = coord{x:currnode.x + 1, y:currnode.y, tool:currnode.tool}\n\t\t\t} else if i == 3 { // west\n\t\t\t\t\tn = coord{x:currnode.x - 1 , y:currnode.y, tool:currnode.tool}\n\t\t\t} else if i == 4 { // change tool 1\n\t\t\t\t\tn = coord{x:currnode.x , y:currnode.y, tool:(currnode.tool+1) % 3}\n\t\t\t} else if i == 5 { // change tool 2\n\t\t\t\t\tn = coord{x:currnode.x , y:currnode.y, tool:(currnode.tool+2) % 3}\n\t\t\t}\n\t\t\tif !isObstacle(grid, n.x, n.y, n.tool) {\n\t\t\t\tnextdist2 := nextdist\n\t\t\t\tif(i == 4 || i == 5) {\n\t\t\t\t\tnextdist2+=6\n\t\t\t\t}\n\t\t\t\tif(!nodegrid[n.x][n.y][n.tool].visited && (nodegrid[n.x][n.y][n.tool].dist > nextdist2 || nodegrid[n.x][n.y][n.tool].dist < 0 )) {\n\t\t\t\t\tnodegrid[n.x][n.y][n.tool].dist = nextdist2\n\t\t\t\t\tnodegrid[n.x][n.y][n.tool].parent = currnode\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tnodegrid[currnode.x][currnode.y][currnode.tool].visited = true\n\t\tdelete(unvisited, (currnode.x + (currnode.y*gridwidth))*3 + currnode.tool)\n\t\t\n\t\tif(currnode == end) {\n\t\t\tbreak\n\t\t} else {\n\t\t\t// find node in unvisited set with smallest distance\n\t\t\t// if smallest distance is infinity, destination is unreachable\n\t\t\tcandidates := getMapIntBoolKeys(unvisited)\n\t\t\tleast_x := -1\n\t\t\tleast_y := -1\n\t\t\tleast_tool := -1\n\t\t\tleast_dist := -1\n\t\t\tfor _,k := range candidates {\n\t\t\t\ttool:=k % 3\n\t\t\t\tk = (k - tool)/3\n\t\t\t\ty:=k/gridwidth\n\t\t\t\tx:=k - (y*gridwidth)\n\t\t\t\tif(nodegrid[x][y][tool].dist != -1 && (least_dist == -1 || nodegrid[x][y][tool].dist < least_dist)) {\n\t\t\t\t\tleast_dist = nodegrid[x][y][tool].dist\n\t\t\t\t\tleast_x = x\n\t\t\t\t\tleast_y = y\n\t\t\t\t\tleast_tool = tool\n\t\t\t\t}\n\t\t\t}\n\t\t\tif(least_dist == -1) {\n\t\t\t\tbreak\n\t\t\t} else {\n\t\t\t\tcurrnode.x = least_x\n\t\t\t\tcurrnode.y = least_y\n\t\t\t\tcurrnode.tool = least_tool\n\t\t\t}\n\t\t}\t\n\t\tsteps++\n\t}\n\t\n\tif(currnode == end) { // if we reached the target\n\t\ttemp := currnode\n\t\ttemp_node := nodegrid[temp.x][temp.y][temp.tool]\n\t\tfor {\n\t\t\ttemp = temp_node.parent\n\t\t\ttemp_node = nodegrid[temp.x][temp.y][temp.tool]\n\t\t\tif(temp.x == start.x && temp.y == start.y && temp.tool == start.tool) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\t\n\t\n\t\treturn nodegrid[end.x][end.y][end.tool].dist\n\t} else { // no path\n\t\treturn -1\n\t}\n}", "title": "" }, { "docid": "85359040fb980679cccfe555694e8ab0", "score": "0.4879047", "text": "func (s settings) GridToGeodetic(coordinate RT90Cordinate) (GeodeticCoordinate, error) {\n\tif s.centralMeridian == nil {\n\t\treturn GeodeticCoordinate{}, errors.New(\"centeral meridian is not set\")\n\t}\n\n\t// Prepare ellipsoid-based stuff.\n\te2 := s.flattening * (2.0 - s.flattening)\n\tn := s.flattening / (2.0 - s.flattening)\n\taRoof := s.axis / (1.0 + n) * (1.0 + n*n/4.0 + n*n*n*n/64.0)\n\tdelta1 := n/2.0 - 2.0*n*n/3.0 + 37.0*n*n*n/96.0 - n*n*n*n/360.0\n\tdelta2 := n*n/48.0 + n*n*n/15.0 - 437.0*n*n*n*n/1440.0\n\tdelta3 := 17.0*n*n*n/480.0 - 37*n*n*n*n/840.0\n\tdelta4 := 4397.0 * n * n * n * n / 161280.0\n\n\tAstar := e2 + e2*e2 + e2*e2*e2 + e2*e2*e2*e2\n\tBstar := -(7.0*e2*e2 + 17.0*e2*e2*e2 + 30.0*e2*e2*e2*e2) / 6.0\n\tCstar := (224.0*e2*e2*e2 + 889.0*e2*e2*e2*e2) / 120.0\n\tDstar := -(4279.0 * e2 * e2 * e2 * e2) / 1260.0\n\n\t// Convert.\n\tdegToRad := math.Pi / 180\n\tlambdaZero := *s.centralMeridian * degToRad\n\txi := (coordinate.X - s.falseNorthing) / (s.scale * aRoof)\n\teta := (coordinate.Y - s.falseEasting) / (s.scale * aRoof)\n\txiPrim := (xi -\n\t\tdelta1*math.Sin(2.0*xi)*math.Cosh(2.0*eta) -\n\t\tdelta2*math.Sin(4.0*xi)*math.Cosh(4.0*eta) -\n\t\tdelta3*math.Sin(6.0*xi)*math.Cosh(6.0*eta) -\n\t\tdelta4*math.Sin(8.0*xi)*math.Cosh(8.0*eta))\n\n\tetaPrim := (eta -\n\t\tdelta1*math.Cos(2.0*xi)*math.Sinh(2.0*eta) -\n\t\tdelta2*math.Cos(4.0*xi)*math.Sinh(4.0*eta) -\n\t\tdelta3*math.Cos(6.0*xi)*math.Sinh(6.0*eta) -\n\t\tdelta4*math.Cos(8.0*xi)*math.Sinh(8.0*eta))\n\n\tphiStar := math.Asin(math.Sin(xiPrim) / math.Cosh(etaPrim))\n\tdeltaLambda := math.Atan(math.Sinh(etaPrim) / math.Cos(xiPrim))\n\tlonRadian := lambdaZero + deltaLambda\n\tlatRadian := (phiStar + math.Sin(phiStar)*math.Cos(phiStar)*\n\t\t(Astar+\n\t\t\tBstar*math.Pow(math.Sin(phiStar), 2)+\n\t\t\tCstar*math.Pow(math.Sin(phiStar), 4)+\n\t\t\tDstar*math.Pow(math.Sin(phiStar), 6)))\n\n\treturn GeodeticCoordinate{\n\t\tLatitude: latRadian * 180.0 / math.Pi,\n\t\tLongitude: lonRadian * 180.0 / math.Pi,\n\t}, nil\n}", "title": "" }, { "docid": "16c714602cf312ab2f130f18021d74c9", "score": "0.4865734", "text": "func EuclideanDist(pa, pb []float64) float64 {\n\tdistSqrd := 0.0\n\tfor i, pai := range pa {\n\t\tdistSqrd += math.Pow(pai-pb[i], 2.)\n\t}\n\treturn math.Sqrt(distSqrd)\n}", "title": "" }, { "docid": "34a91b00ebfee16edf4ec3d2f77ca280", "score": "0.48582545", "text": "func NewGrid(width, height int) *Grid {\n\tif width < 1 {\n\t\twidth = 1\n\t}\n\tif height < 1 {\n\t\theight = 1\n\t}\n\tg := &Grid{}\n\tg.elts = make([]byte, width*height)\n\tg.cells = make([][]byte, height)\n\tfor i := range g.cells {\n\t\tg.cells[i] = g.elts[i*width : (i+1)*width : (i+1)*width]\n\t}\n\treturn g\n}", "title": "" }, { "docid": "f14967c073ed2d542560b574daade6b8", "score": "0.48437136", "text": "func initGrid(s1 string, s2 string) [][]int {\n\tgrid := make([][]int, 1+len(s1))\n\tfor i := 0; i < len(grid); i++ {\n\t\tgrid[i] = make([]int, 1+len(s2))\n\t}\n\treturn grid\n}", "title": "" }, { "docid": "60cf759593c0096c09e4ebb2bed94240", "score": "0.48435786", "text": "func utmgrid() {\n\tif len(os.Args) != 5 {\n\t\tfmt.Printf(\"\\nUsage:\\n\")\n\t\tfmt.Printf(\" %s utmgrid utmmin utmmax distance\\n\", progName)\n\t\tfmt.Printf(\"\\nExample:\\n\")\n\t\tfmt.Printf(\" %s utmgrid \\\"32 North 390000 5730000\\\" \\\"32 North 430000 5760000\\\" 10000\\n\", progName)\n\t\tfmt.Printf(\"\\nRemarks:\\n\")\n\t\tfmt.Printf(\" utmmin = lower left start point in UTM format\\n\")\n\t\tfmt.Printf(\" utmmax = upper right end point in UTM format\\n\")\n\t\tfmt.Printf(\" distance = grid distance / square sidelength in meter\\n\")\n\t\tfmt.Printf(\" utm format = zone number, hemisphere, easting, northing\\n\")\n\t\tfmt.Printf(\" the hemisphere is either North or South\\n\")\n\t\tfmt.Printf(\" a zone letter is not necessary\\n\")\n\t\tfmt.Printf(\"\\nHints:\\n\")\n\t\tfmt.Printf(\" useful to create a geodetic utm grid\\n\")\n\t\tfmt.Printf(\"\\n\")\n\t\tos.Exit(1)\n\t}\n\n\tutmMinZoneNumber := 0\n\tutmMinHemisphere := \"\"\n\tutmMinEasting := 0.0\n\tutmMinNorthing := 0.0\n\tn, err := fmt.Sscanf(os.Args[2], \"%d%s%f%f\", &utmMinZoneNumber, &utmMinHemisphere, &utmMinEasting, &utmMinNorthing)\n\tif err != nil {\n\t\tlog.Fatalf(\"\\nerror <%v> at fmt.Sscanf(); value = <%v>\\n\", err, os.Args[2])\n\t}\n\tif n != 4 {\n\t\tlog.Fatalf(\"\\nnumber of items unsufficient; expected = <%d>, parsed = <%d>; value = <%v>\\n\", 4, n, os.Args[2])\n\t}\n\n\tutmMaxZoneNumber := 0\n\tutmMaxHemisphere := \"\"\n\tutmMaxEasting := 0.0\n\tutmMaxNorthing := 0.0\n\tn, err = fmt.Sscanf(os.Args[3], \"%d%s%f%f\", &utmMaxZoneNumber, &utmMaxHemisphere, &utmMaxEasting, &utmMaxNorthing)\n\tif err != nil {\n\t\tlog.Fatalf(\"\\nerror <%v> at fmt.Sscanf(); value = <%v>\\n\", err, os.Args[3])\n\t}\n\tif n != 4 {\n\t\tlog.Fatalf(\"\\nnumber of items unsufficient; expected = <%d>, parsed = <%d>; value = <%v>\\n\", 4, n, os.Args[3])\n\t}\n\n\t// verify input data\n\tif utmMinZoneNumber != utmMaxZoneNumber {\n\t\tlog.Fatalf(\"\\nerror: utm zone numbers (%d / %d) not identical\\n\", utmMinZoneNumber, utmMaxZoneNumber)\n\t}\n\tif utmMinHemisphere != utmMaxHemisphere {\n\t\tlog.Fatalf(\"\\nerror: utm hemispheres (%s / %s) not identical\\n\", utmMinHemisphere, utmMaxHemisphere)\n\t}\n\tvar northern bool\n\tif strings.ToLower(utmMinHemisphere) == \"north\" {\n\t\tnorthern = true\n\t} else if strings.ToLower(utmMinHemisphere) == \"south\" {\n\t\tnorthern = false\n\t} else {\n\t\tlog.Fatalf(\"\\nerror: utm hemisphere must be either North or South\\n\")\n\t}\n\n\tgridDistance, err := strconv.ParseFloat(os.Args[4], 64)\n\tif err != nil {\n\t\tlog.Fatalf(\"\\nerror <%v> at strconv.Atoi(); value = <%v>\\n\", err, os.Args[4])\n\t}\n\n\t// latitude / horizontal grid lines (order: longitude, latitude)\n\tlsLat := make(orb.LineString, 0, 2)\n\tfcLat := geojson.NewFeatureCollection()\n\n\tfor utmNorthingTemp := utmMinNorthing; utmNorthingTemp <= utmMaxNorthing; utmNorthingTemp += gridDistance {\n\t\tlatStart, lonStart, err := UTM.ToLatLon(utmMinEasting, utmNorthingTemp, utmMinZoneNumber, \"\", northern)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"\\nerror <%v> at UTM.ToLatLon()\\n\", err)\n\t\t}\n\t\tlatEnd, lonEnd, err := UTM.ToLatLon(utmMaxEasting, utmNorthingTemp, utmMinZoneNumber, \"\", northern)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"\\nerror <%v> at UTM.ToLatLon()\\n\", err)\n\t\t}\n\t\tlsLat = append(lsLat, orb.Point{lonStart, latStart})\n\t\tlsLat = append(lsLat, orb.Point{lonEnd, latEnd})\n\t\tfeature := geojson.NewFeature(lsLat)\n\t\tfeature.Properties = make(map[string]interface{})\n\t\tfeature.Properties[\"name\"] = fmt.Sprintf(\"easting %.0f\", utmNorthingTemp)\n\t\tfcLat.Append(feature)\n\t\tlsLat = nil // reuse LineString object\n\t}\n\n\tdataJSON, err := json.MarshalIndent(fcLat, \"\", \" \")\n\tif err != nil {\n\t\tlog.Fatalf(\"error <%v> at json.MarshalIndent()\", err)\n\t}\n\n\t// write data ([]byte) to file\n\tfilename := fmt.Sprintf(\"utmlatgrid%.0f.geojson\", gridDistance)\n\tif err := ioutil.WriteFile(filename, dataJSON, 0666); err != nil {\n\t\tlog.Fatalf(\"error <%v> at ioutil.WriteFile(); file = <%v>\", err, filename)\n\t}\n\n\tfmt.Printf(\"\\nutm latitude (horizontal) coordinate grid lines: %s\\n\", filename)\n\n\t// longitude / vertical grid lines (order: longitude, latitude)\n\tlsLon := make(orb.LineString, 0, 2)\n\tfcLon := geojson.NewFeatureCollection()\n\n\tfor utmEastingTemp := utmMinEasting; utmEastingTemp <= utmMaxEasting; utmEastingTemp += gridDistance {\n\t\tlatStart, lonStart, err := UTM.ToLatLon(utmEastingTemp, utmMinNorthing, utmMinZoneNumber, \"\", northern)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"\\nerror <%v> at UTM.ToLatLon()\\n\", err)\n\t\t}\n\t\tlatEnd, lonEnd, err := UTM.ToLatLon(utmEastingTemp, utmMaxNorthing, utmMinZoneNumber, \"\", northern)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"\\nerror <%v> at UTM.ToLatLon()\\n\", err)\n\t\t}\n\t\tlsLon = append(lsLon, orb.Point{lonStart, latStart})\n\t\tlsLon = append(lsLon, orb.Point{lonEnd, latEnd})\n\t\tfeature := geojson.NewFeature(lsLon)\n\t\tfeature.Properties = make(map[string]interface{})\n\t\tfeature.Properties[\"name\"] = fmt.Sprintf(\"%.0f northing\", utmEastingTemp)\n\t\tfcLon.Append(feature)\n\t\tlsLon = nil // reuse LineString object\n\t}\n\n\tdataJSON, err = json.MarshalIndent(fcLon, \"\", \" \")\n\tif err != nil {\n\t\tlog.Fatalf(\"error <%v> at json.MarshalIndent()\", err)\n\t}\n\n\t// write data ([]byte) to file\n\tfilename = fmt.Sprintf(\"utmlongrid%.0f.geojson\", gridDistance)\n\tif err := ioutil.WriteFile(filename, dataJSON, 0666); err != nil {\n\t\tlog.Fatalf(\"error <%v> at ioutil.WriteFile(); file = <%v>\", err, filename)\n\t}\n\n\tfmt.Printf(\"utm longitude (vertical) coordinate grid lines: %s\\n\", filename)\n}", "title": "" }, { "docid": "ed025f5e6fa9d196d911852dcc94d925", "score": "0.4838694", "text": "func Distribution(r, c int, prob float64, generator *rand.LockedRand) mat.Matrix {\n\tout := mat.NewEmptyDense(r, c)\n\tdist := uniform.New(0.0, 1.0, generator)\n\tfor i := 0; i < r; i++ {\n\t\tfor j := 0; j < c; j++ {\n\t\t\tval := dist.Next()\n\t\t\tif val < prob {\n\t\t\t\tout.Set(i, j, math.Floor(val))\n\t\t\t} else {\n\t\t\t\tout.Set(i, j, math.Floor(val)+1.0)\n\t\t\t}\n\t\t}\n\t}\n\treturn out\n}", "title": "" }, { "docid": "68ea81dc770754fe87416580d4613871", "score": "0.48351154", "text": "func NewDistances(cell *Cell) Distances {\n\treturn Distances{\n\t\tcells: map[*Cell]int{cell: 0},\n\t\troot: cell,\n\t}\n}", "title": "" }, { "docid": "30ee6f1ba228b77eaf48901308343ed9", "score": "0.48175722", "text": "func GridWH(m *Machine) {\n\terrorMsg := \"Invalid arguments. Usage: 'gridwh [int]' or 'gridwh [width] x [height]'\"\n\n\twidth := m.ArgAsString(0)\n\theight := width\n\n\tif m.ArgCount() == 3 {\n\t\tm.Assert(m.ArgAsString(1) == \"x\", errorMsg)\n\t\theight = m.ArgAsString(2)\n\t}\n\n\tif _, err := strconv.Atoi(width); err != nil {\n\t\tm.Throw(errorMsg)\n\t}\n\tif _, err := strconv.Atoi(height); err != nil {\n\t\tm.Throw(errorMsg)\n\t}\n\n\tm.Vars[\".grid.width\"] = width\n\tm.Vars[\".grid.height\"] = height\n}", "title": "" }, { "docid": "45e21959c2e0f7d3c5553cfb6394dbc6", "score": "0.4812947", "text": "func (d *Dist2D) Entries() int64 {\n\treturn d.X.Entries()\n}", "title": "" }, { "docid": "ca682c8929f01bd2116a0e7b36e51162", "score": "0.47611558", "text": "func calDistance(gpX float64, cunX float64, gpY float64, cunY float64) float64 {\n\n\tradius := 6371000.0 //6378137.0\n\trad := math.Pi / 180.0\n\tcunX = cunX * rad\n\tgpX = gpX * rad\n\tcunY = cunY * rad\n\tgpY = gpY * rad\n\n\ttheta := cunY - gpY\n\tdist := math.Acos(math.Sin(gpX)*math.Sin(cunX) + math.Cos(gpX)*math.Cos(cunX)*math.Cos(theta))\n\tfinalDis := math.Trunc(dist*radius/1000*1e2+0.5) * 1e-2\n\treturn finalDis\n}", "title": "" }, { "docid": "c35e1476a4f2baa4f625b4bd8c60e187", "score": "0.47536197", "text": "func (e *Entity) updateGridPos() {\n\te.gridX = float64(e.x * TILE_SIZE)\n\te.gridY = float64(e.y * TILE_SIZE)\n}", "title": "" }, { "docid": "d7c5717613545a482552114be52894cf", "score": "0.47119468", "text": "func GetGridSize() int {\n\treturn config.Config.Simulation.RoadwaySize\n}", "title": "" }, { "docid": "7a7e02e9cab65dbf5941ff3d06a56300", "score": "0.471048", "text": "func (s settings) GeodeticToGrid(cordinate GeodeticCoordinate) (RT90Cordinate, error) {\n\tif cordinate.Validate() {\n\t\treturn RT90Cordinate{}, errors.New(\"GeodeticCoordinates is not valid\")\n\t}\n\tif s.centralMeridian == nil {\n\t\treturn RT90Cordinate{}, errors.New(\"centeral meridian is not set\")\n\t}\n\t// Prepare ellipsoid-based stuff.\n\te2 := s.flattening * (2.0 - s.flattening)\n\tn := s.flattening / (2.0 - s.flattening)\n\taRoof := s.axis / (1.0 + n) * (1.0 + n*n/4.0 + n*n*n*n/64.0)\n\tA := e2\n\tB := (5.0*e2*e2 - e2*e2*e2) / 6.0\n\tC := (104.0*e2*e2*e2 - 45.0*e2*e2*e2*e2) / 120.0\n\tD := (1237.0 * e2 * e2 * e2 * e2) / 1260.0\n\tbeta1 := n/2.0 - 2.0*n*n/3.0 + 5.0*n*n*n/16.0 + 41.0*n*n*n*n/180.0\n\tbeta2 := 13.0*n*n/48.0 - 3.0*n*n*n/5.0 + 557.0*n*n*n*n/1440.0\n\tbeta3 := 61.0*n*n*n/240.0 - 103.0*n*n*n*n/140.0\n\tbeta4 := 49561.0 * n * n * n * n / 161280.0\n\n\t// Convert.\n\tdegToRad := math.Pi / 180.0\n\tphi := cordinate.Latitude * degToRad\n\tlambda := cordinate.Longitude * degToRad\n\tlambdaZero := *s.centralMeridian * degToRad\n\n\tphiStar := phi - math.Sin(phi)*math.Cos(phi)*(A+\n\t\tB*math.Pow(math.Sin(phi), 2)+\n\t\tC*math.Pow(math.Sin(phi), 4)+\n\t\tD*math.Pow(math.Sin(phi), 6))\n\tdeltaLambda := lambda - lambdaZero\n\txiPrim := math.Atan(math.Tan(phiStar) / math.Cos(deltaLambda))\n\tetaPrim := math.Atanh(math.Cos(phiStar) * math.Sin(deltaLambda))\n\tx := s.scale*aRoof*(xiPrim+\n\t\tbeta1*math.Sin(2.0*xiPrim)*math.Cosh(2.0*etaPrim)+\n\t\tbeta2*math.Sin(4.0*xiPrim)*math.Cosh(4.0*etaPrim)+\n\t\tbeta3*math.Sin(6.0*xiPrim)*math.Cosh(6.0*etaPrim)+\n\t\tbeta4*math.Sin(8.0*xiPrim)*math.Cosh(8.0*etaPrim)) + s.falseNorthing\n\ty := s.scale*aRoof*(etaPrim+\n\t\tbeta1*math.Cos(2.0*xiPrim)*math.Sinh(2.0*etaPrim)+\n\t\tbeta2*math.Cos(4.0*xiPrim)*math.Sinh(4.0*etaPrim)+\n\t\tbeta3*math.Cos(6.0*xiPrim)*math.Sinh(6.0*etaPrim)+\n\t\tbeta4*math.Cos(8.0*xiPrim)*math.Sinh(8.0*etaPrim)) + s.falseEasting\n\n\treturn RT90Cordinate{\n\t\tX: math.Round(x*1000.0) / 1000.0,\n\t\tY: math.Round(y*1000.0) / 1000.0,\n\t}, nil\n}", "title": "" }, { "docid": "62cc737a347db04b90146b6cc5874bf0", "score": "0.47007588", "text": "func (ts *TravellingSalesman) Init(matrix [][]int) {\n\t// Find locations and coordinates\n\tcoords := map[int] [2]int{}\n\tfor i := range matrix { \n\t\tfor j := range matrix[i] {\t\t\t\n\t\t\tif matrix[i][j] != 0 {\n\t\t\t\tvalue := matrix[i][j]\n\t\t\t\tcoords[value] = [2]int{i+1,j+1}\n\t\t\t}\n\t\t}\n\t}\n\t\n\t// Gene length equals the number of coordinates\n\tts.geneLength = len(coords)\n\t\n\t// Calculate distances\n\tvar size = ts.geneLength\n\tts.distMatrix = util.NewDistanceMatrix(ts.geneLength)\n\tfor i := 0; i < size*size; i++ {\t\n\t\trow := i%size\n\t\tcol := i/size\n\t\tarea1 := coords[row+1] // X coordinate (Row)\n\t\tarea2 := coords[col+1] // Y coordinate (Column)\n\t\tif area1[0] == area2[0] {\n\t\t\t// Same Column so distance eq diff(row2-row1)\n\t\t\tts.distMatrix.SetDistance(\n\t\t\t\trow, \n\t\t\t\tcol, \n\t\t\t\tmath.Abs(float64(area2[1]-area1[1])))\n\t\t} else if area1[1] == area2[1] {\n\t\t\t// Same Row so distance eq diff(col2-col1)\n\t\t\tts.distMatrix.SetDistance(\n\t\t\t\trow,\n\t\t\t\tcol,\n\t\t\t\tmath.Abs(float64(area2[0]-area1[0])))\n\t\t} else {\n\t\t\ta := math.Pow(float64(area2[1]-area1[1]), 2)\n\t\t\tb := math.Pow(float64(area2[0]-area1[0]), 2)\n\t\t\tc := math.Sqrt(a+b)\n\t\t\tts.distMatrix.SetDistance(row, col, c)\n\t\t}\n\t\t//fmt.Printf(\"%d -> %d: %f\\n\", row+1, col+1, ts.distMatrix.GetDistance(row,col))\n\t}\n\t\n\t// Generate random genes\n\tts.genes = make([]ga.Gene, ts.nGenes)\n\tvar proto = make([]int, ts.geneLength)\n\tvar i int = 0\n\tfor key, _ := range coords {\n\t\tproto[i] = key\n\t\ti++\n\t}\n\t\n\t// Initialize all len(genes) and shuffle\n\tfor i := 0; i < len(ts.genes); i++ {\n\t\tts.genes[i].Data = make([]int, len(proto))\t\t\n\t\tcopy(ts.genes[i].Data, proto)\n\t\tshuffleArray(&(ts.genes[i].Data))\n\t}\n}", "title": "" }, { "docid": "2f3fce41e8bb7290a534f7d6def8d56c", "score": "0.46893096", "text": "func coordNorm(c geom.Coord) float64 {\n\treturn math.Sqrt(coordNorm2(c))\n}", "title": "" }, { "docid": "df76d129f3b72e546c7d7c23e3b6e79f", "score": "0.46876386", "text": "func Dist(v1, v2 Vect) float32 {\n\treturn float32(math.Sqrt(float64(DistSqr(v1, v2))))\n}", "title": "" }, { "docid": "1e3b3f76bb6da73c8506ef0c145a5673", "score": "0.46708423", "text": "func CalcDistances(se escache.SearchEntry, in <-chan escache.SearchEntry) <-chan escache.SearchEntry {\n\tch := make(chan escache.SearchEntry)\n\n\tgo func(){\n\t\tfor elem := range in {\n\t\t\tdist, _ := elem.Distance(se)\n\t\t\telem.Dist = dist\n\t\t\tch <- elem\n\t\t}\n\t\tclose(ch)\n\t\twg.Done()\n\t}()\n\treturn ch\n}", "title": "" }, { "docid": "5422d94b814368b52c7c58d19d103697", "score": "0.4659759", "text": "func (d distPyramid) dist(a, b int) float64 {\n\tif a > b {\n\t\treturn d[a][b]\n\t}\n\treturn d[b][a]\n}", "title": "" }, { "docid": "239a5e9924fb51fd356678c189eec0bb", "score": "0.4653793", "text": "func MakeGrid(sz point.Point) Grid {\n\tg := Grid{Size: sz}\n\tg.Data = make([]termbox.Cell, sz.X*sz.Y)\n\treturn g\n}", "title": "" }, { "docid": "4abad3a74fd5417e9fd20c2ff15825de", "score": "0.46440426", "text": "func make4DGrid() [][][][]bool {\n\n\tgrid := make([][][][]bool, l)\n\n\tfor i := range grid {\n\t\tgrid[i] = make3DGrid()\n\t}\n\n\treturn grid\n}", "title": "" }, { "docid": "ee48a53d9b25e542cd5e1d0de7743d64", "score": "0.46269175", "text": "func (ly *Layout) GatherSizesGrid() {\n\tif len(ly.Kids) == 0 {\n\t\treturn\n\t}\n\n\tcols := ly.Style.Layout.Columns\n\trows := 0\n\n\tsz := len(ly.Kids)\n\t// collect overal size\n\tfor _, c := range ly.Kids {\n\t\t_, gi := KiToNode2D(c)\n\t\tif gi == nil {\n\t\t\tcontinue\n\t\t}\n\t\tlst := gi.Style.Layout\n\t\tif lst.Col > 0 {\n\t\t\tcols = kit.MaxInt(cols, lst.Col+lst.ColSpan)\n\t\t}\n\t\tif lst.Row > 0 {\n\t\t\trows = kit.MaxInt(rows, lst.Row+lst.RowSpan)\n\t\t}\n\t}\n\n\tif cols == 0 {\n\t\tcols = int(math32.Sqrt(float32(sz))) // whatever -- not well defined\n\t}\n\tif rows == 0 {\n\t\trows = sz / cols\n\t}\n\tfor rows*cols < sz { // not defined to have multiple items per cell -- make room for everyone\n\t\trows++\n\t}\n\n\tly.GridSize.X = cols\n\tly.GridSize.Y = rows\n\n\tif len(ly.GridData[Row]) != rows {\n\t\tly.GridData[Row] = make([]LayoutData, rows)\n\t}\n\tif len(ly.GridData[Col]) != cols {\n\t\tly.GridData[Col] = make([]LayoutData, cols)\n\t}\n\n\tfor i := range ly.GridData[Row] {\n\t\tld := &ly.GridData[Row][i]\n\t\tld.Size.Need.Set(0, 0)\n\t\tld.Size.Pref.Set(0, 0)\n\t}\n\tfor i := range ly.GridData[Col] {\n\t\tld := &ly.GridData[Col][i]\n\t\tld.Size.Need.Set(0, 0)\n\t\tld.Size.Pref.Set(0, 0)\n\t}\n\n\tcol := 0\n\trow := 0\n\tfor _, c := range ly.Kids {\n\t\t_, gi := KiToNode2D(c)\n\t\tif gi == nil {\n\t\t\tcontinue\n\t\t}\n\t\tgi.LayData.UpdateSizes()\n\t\tlst := gi.Style.Layout\n\t\tif lst.Col > 0 {\n\t\t\tcol = lst.Col\n\t\t}\n\t\tif lst.Row > 0 {\n\t\t\trow = lst.Row\n\t\t}\n\t\t// r 0 1 col X = max(ea in col) (Y = not used)\n\t\t// +--+---+\n\t\t// 0 | | | row Y = max(ea in row) (X = not used)\n\t\t// +--+---+\n\t\t// 1 | | |\n\t\t// +--+---+\n\n\t\t// todo: need to deal with span in sums..\n\t\tly.GridData[Row][row].Size.Need.SetMaxDim(Y, gi.LayData.Size.Need.Y)\n\t\tly.GridData[Row][row].Size.Pref.SetMaxDim(Y, gi.LayData.Size.Pref.Y)\n\t\tly.GridData[Col][col].Size.Need.SetMaxDim(X, gi.LayData.Size.Need.X)\n\t\tly.GridData[Col][col].Size.Pref.SetMaxDim(X, gi.LayData.Size.Pref.X)\n\n\t\t// for max: any -1 stretch dominates, else accumulate any max\n\t\tif ly.GridData[Row][row].Size.Max.Y >= 0 {\n\t\t\tif gi.LayData.Size.Max.Y < 0 { // stretch\n\t\t\t\tly.GridData[Row][row].Size.Max.Y = -1\n\t\t\t} else {\n\t\t\t\tly.GridData[Row][row].Size.Max.SetMaxDim(Y, gi.LayData.Size.Max.Y)\n\t\t\t}\n\t\t}\n\t\tif ly.GridData[Col][col].Size.Max.X >= 0 {\n\t\t\tif gi.LayData.Size.Max.Y < 0 { // stretch\n\t\t\t\tly.GridData[Col][col].Size.Max.X = -1\n\t\t\t} else {\n\t\t\t\tly.GridData[Col][col].Size.Max.SetMaxDim(X, gi.LayData.Size.Max.X)\n\t\t\t}\n\t\t}\n\n\t\tcol++\n\t\tif col >= cols { // todo: really only works if NO items specify row,col or ALL do..\n\t\t\tcol = 0\n\t\t\trow++\n\t\t\tif row >= rows { // wrap-around.. no other good option\n\t\t\t\trow = 0\n\t\t\t}\n\t\t}\n\t}\n\n\t// Y = sum across rows which have max's\n\tvar sumPref, sumNeed Vec2D\n\tfor _, ld := range ly.GridData[Row] {\n\t\tsumNeed.SetAddDim(Y, ld.Size.Need.Y)\n\t\tsumPref.SetAddDim(Y, ld.Size.Pref.Y)\n\t}\n\t// X = sum across cols which have max's\n\tfor _, ld := range ly.GridData[Col] {\n\t\tsumNeed.SetAddDim(X, ld.Size.Need.X)\n\t\tsumPref.SetAddDim(X, ld.Size.Pref.X)\n\t}\n\n\tly.LayData.Size.Need.SetMax(sumNeed)\n\tly.LayData.Size.Pref.SetMax(sumPref)\n\n\tspc := ly.Style.BoxSpace()\n\tly.LayData.Size.Need.SetAddVal(2.0 * spc)\n\tly.LayData.Size.Pref.SetAddVal(2.0 * spc)\n\n\tly.LayData.UpdateSizes() // enforce max and normal ordering, etc\n\tif Layout2DTrace {\n\t\tfmt.Printf(\"Size: %v gather sizes grid need: %v, pref: %v\\n\", ly.PathUnique(), ly.LayData.Size.Need, ly.LayData.Size.Pref)\n\t}\n}", "title": "" }, { "docid": "cc6dcccfa5b4a5ed8bedb19ca44366c7", "score": "0.4618753", "text": "func (tsne *TSNE) EmbedDistances(D mat.Matrix, stepFunc func(iter int, divergence float64, embedding mat.Matrix) bool) mat.Matrix {\n\n\t// Verify that D is square\n\tn, d := D.Dims()\n\tif n != d {\n\t\tpanic(\"squared distance matrix is not square\")\n\t}\n\n\ttsne.n = n\n\ttsne.d2p(D, EntropyTolerance, tsne.perplexity)\n\ttsne.initSolution()\n\ttsne.run(stepFunc)\n\treturn tsne.Y\n}", "title": "" }, { "docid": "f2353a7639fd9eb049a110893ce0921a", "score": "0.46120527", "text": "func SurroundingCells(unitRadius int) [][]int {\n\t// Dim gd As New Grid.Definition(\"t1\", 100, 100)\n\t// For i = 0 To 10\n\t// \tDim sb As New Text.StringBuilder\n\t// \tFor Each rc In gd.SurroundingCellSet(i)\n\t// \t\tsb.Append(String.Format(\"{0}{1},{2}{3},\", \"{\", rc.Row, rc.Col, \"}\"))\n\t// \tNext\n\t// \tConsole.WriteLine(\"{0}: {1}{2}{3}\", i, \"{\", Left(sb.ToString, sb.ToString.Length - 1), \"},\")\n\t// Next\n\td := map[int][][]int{\n\t\t0: {{0, 0}},\n\t\t1: {{-1, 0}, {0, -1}, {0, 0}, {0, 1}, {1, 0}},\n\t\t2: {{-2, 0}, {-1, -1}, {-1, 0}, {-1, 1}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {1, -1}, {1, 0}, {1, 1}, {2, 0}},\n\t\t3: {{-3, 0}, {-2, -2}, {-2, -1}, {-2, 0}, {-2, 1}, {-2, 2}, {-1, -2}, {-1, -1}, {-1, 0}, {-1, 1}, {-1, 2}, {0, -3}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {0, 3}, {1, -2}, {1, -1}, {1, 0}, {1, 1}, {1, 2}, {2, -2}, {2, -1}, {2, 0}, {2, 1}, {2, 2}, {3, 0}},\n\t\t4: {{-4, 0}, {-3, -2}, {-3, -1}, {-3, 0}, {-3, 1}, {-3, 2}, {-2, -3}, {-2, -2}, {-2, -1}, {-2, 0}, {-2, 1}, {-2, 2}, {-2, 3}, {-1, -3}, {-1, -2}, {-1, -1}, {-1, 0}, {-1, 1}, {-1, 2}, {-1, 3}, {0, -4}, {0, -3}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {0, 3}, {0, 4}, {1, -3}, {1, -2}, {1, -1}, {1, 0}, {1, 1}, {1, 2}, {1, 3}, {2, -3}, {2, -2}, {2, -1}, {2, 0}, {2, 1}, {2, 2}, {2, 3}, {3, -2}, {3, -1}, {3, 0}, {3, 1}, {3, 2}, {4, 0}},\n\t\t5: {{-5, 0}, {-4, -3}, {-4, -2}, {-4, -1}, {-4, 0}, {-4, 1}, {-4, 2}, {-4, 3}, {-3, -4}, {-3, -3}, {-3, -2}, {-3, -1}, {-3, 0}, {-3, 1}, {-3, 2}, {-3, 3}, {-3, 4}, {-2, -4}, {-2, -3}, {-2, -2}, {-2, -1}, {-2, 0}, {-2, 1}, {-2, 2}, {-2, 3}, {-2, 4}, {-1, -4}, {-1, -3}, {-1, -2}, {-1, -1}, {-1, 0}, {-1, 1}, {-1, 2}, {-1, 3}, {-1, 4}, {0, -5}, {0, -4}, {0, -3}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {0, 3}, {0, 4}, {0, 5}, {1, -4}, {1, -3}, {1, -2}, {1, -1}, {1, 0}, {1, 1}, {1, 2}, {1, 3}, {1, 4}, {2, -4}, {2, -3}, {2, -2}, {2, -1}, {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {3, -4}, {3, -3}, {3, -2}, {3, -1}, {3, 0}, {3, 1}, {3, 2}, {3, 3}, {3, 4}, {4, -3}, {4, -2}, {4, -1}, {4, 0}, {4, 1}, {4, 2}, {4, 3}, {5, 0}},\n\t\t6: {{-6, 0}, {-5, -3}, {-5, -2}, {-5, -1}, {-5, 0}, {-5, 1}, {-5, 2}, {-5, 3}, {-4, -4}, {-4, -3}, {-4, -2}, {-4, -1}, {-4, 0}, {-4, 1}, {-4, 2}, {-4, 3}, {-4, 4}, {-3, -5}, {-3, -4}, {-3, -3}, {-3, -2}, {-3, -1}, {-3, 0}, {-3, 1}, {-3, 2}, {-3, 3}, {-3, 4}, {-3, 5}, {-2, -5}, {-2, -4}, {-2, -3}, {-2, -2}, {-2, -1}, {-2, 0}, {-2, 1}, {-2, 2}, {-2, 3}, {-2, 4}, {-2, 5}, {-1, -5}, {-1, -4}, {-1, -3}, {-1, -2}, {-1, -1}, {-1, 0}, {-1, 1}, {-1, 2}, {-1, 3}, {-1, 4}, {-1, 5}, {0, -6}, {0, -5}, {0, -4}, {0, -3}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {0, 3}, {0, 4}, {0, 5}, {0, 6}, {1, -5}, {1, -4}, {1, -3}, {1, -2}, {1, -1}, {1, 0}, {1, 1}, {1, 2}, {1, 3}, {1, 4}, {1, 5}, {2, -5}, {2, -4}, {2, -3}, {2, -2}, {2, -1}, {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5}, {3, -5}, {3, -4}, {3, -3}, {3, -2}, {3, -1}, {3, 0}, {3, 1}, {3, 2}, {3, 3}, {3, 4}, {3, 5}, {4, -4}, {4, -3}, {4, -2}, {4, -1}, {4, 0}, {4, 1}, {4, 2}, {4, 3}, {4, 4}, {5, -3}, {5, -2}, {5, -1}, {5, 0}, {5, 1}, {5, 2}, {5, 3}, {6, 0}},\n\t\t7: {{-7, 0}, {-6, -3}, {-6, -2}, {-6, -1}, {-6, 0}, {-6, 1}, {-6, 2}, {-6, 3}, {-5, -4}, {-5, -3}, {-5, -2}, {-5, -1}, {-5, 0}, {-5, 1}, {-5, 2}, {-5, 3}, {-5, 4}, {-4, -5}, {-4, -4}, {-4, -3}, {-4, -2}, {-4, -1}, {-4, 0}, {-4, 1}, {-4, 2}, {-4, 3}, {-4, 4}, {-4, 5}, {-3, -6}, {-3, -5}, {-3, -4}, {-3, -3}, {-3, -2}, {-3, -1}, {-3, 0}, {-3, 1}, {-3, 2}, {-3, 3}, {-3, 4}, {-3, 5}, {-3, 6}, {-2, -6}, {-2, -5}, {-2, -4}, {-2, -3}, {-2, -2}, {-2, -1}, {-2, 0}, {-2, 1}, {-2, 2}, {-2, 3}, {-2, 4}, {-2, 5}, {-2, 6}, {-1, -6}, {-1, -5}, {-1, -4}, {-1, -3}, {-1, -2}, {-1, -1}, {-1, 0}, {-1, 1}, {-1, 2}, {-1, 3}, {-1, 4}, {-1, 5}, {-1, 6}, {0, -7}, {0, -6}, {0, -5}, {0, -4}, {0, -3}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {0, 3}, {0, 4}, {0, 5}, {0, 6}, {0, 7}, {1, -6}, {1, -5}, {1, -4}, {1, -3}, {1, -2}, {1, -1}, {1, 0}, {1, 1}, {1, 2}, {1, 3}, {1, 4}, {1, 5}, {1, 6}, {2, -6}, {2, -5}, {2, -4}, {2, -3}, {2, -2}, {2, -1}, {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5}, {2, 6}, {3, -6}, {3, -5}, {3, -4}, {3, -3}, {3, -2}, {3, -1}, {3, 0}, {3, 1}, {3, 2}, {3, 3}, {3, 4}, {3, 5}, {3, 6}, {4, -5}, {4, -4}, {4, -3}, {4, -2}, {4, -1}, {4, 0}, {4, 1}, {4, 2}, {4, 3}, {4, 4}, {4, 5}, {5, -4}, {5, -3}, {5, -2}, {5, -1}, {5, 0}, {5, 1}, {5, 2}, {5, 3}, {5, 4}, {6, -3}, {6, -2}, {6, -1}, {6, 0}, {6, 1}, {6, 2}, {6, 3}, {7, 0}},\n\t\t8: {{-8, 0}, {-7, -3}, {-7, -2}, {-7, -1}, {-7, 0}, {-7, 1}, {-7, 2}, {-7, 3}, {-6, -5}, {-6, -4}, {-6, -3}, {-6, -2}, {-6, -1}, {-6, 0}, {-6, 1}, {-6, 2}, {-6, 3}, {-6, 4}, {-6, 5}, {-5, -6}, {-5, -5}, {-5, -4}, {-5, -3}, {-5, -2}, {-5, -1}, {-5, 0}, {-5, 1}, {-5, 2}, {-5, 3}, {-5, 4}, {-5, 5}, {-5, 6}, {-4, -6}, {-4, -5}, {-4, -4}, {-4, -3}, {-4, -2}, {-4, -1}, {-4, 0}, {-4, 1}, {-4, 2}, {-4, 3}, {-4, 4}, {-4, 5}, {-4, 6}, {-3, -7}, {-3, -6}, {-3, -5}, {-3, -4}, {-3, -3}, {-3, -2}, {-3, -1}, {-3, 0}, {-3, 1}, {-3, 2}, {-3, 3}, {-3, 4}, {-3, 5}, {-3, 6}, {-3, 7}, {-2, -7}, {-2, -6}, {-2, -5}, {-2, -4}, {-2, -3}, {-2, -2}, {-2, -1}, {-2, 0}, {-2, 1}, {-2, 2}, {-2, 3}, {-2, 4}, {-2, 5}, {-2, 6}, {-2, 7}, {-1, -7}, {-1, -6}, {-1, -5}, {-1, -4}, {-1, -3}, {-1, -2}, {-1, -1}, {-1, 0}, {-1, 1}, {-1, 2}, {-1, 3}, {-1, 4}, {-1, 5}, {-1, 6}, {-1, 7}, {0, -8}, {0, -7}, {0, -6}, {0, -5}, {0, -4}, {0, -3}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {0, 3}, {0, 4}, {0, 5}, {0, 6}, {0, 7}, {0, 8}, {1, -7}, {1, -6}, {1, -5}, {1, -4}, {1, -3}, {1, -2}, {1, -1}, {1, 0}, {1, 1}, {1, 2}, {1, 3}, {1, 4}, {1, 5}, {1, 6}, {1, 7}, {2, -7}, {2, -6}, {2, -5}, {2, -4}, {2, -3}, {2, -2}, {2, -1}, {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5}, {2, 6}, {2, 7}, {3, -7}, {3, -6}, {3, -5}, {3, -4}, {3, -3}, {3, -2}, {3, -1}, {3, 0}, {3, 1}, {3, 2}, {3, 3}, {3, 4}, {3, 5}, {3, 6}, {3, 7}, {4, -6}, {4, -5}, {4, -4}, {4, -3}, {4, -2}, {4, -1}, {4, 0}, {4, 1}, {4, 2}, {4, 3}, {4, 4}, {4, 5}, {4, 6}, {5, -6}, {5, -5}, {5, -4}, {5, -3}, {5, -2}, {5, -1}, {5, 0}, {5, 1}, {5, 2}, {5, 3}, {5, 4}, {5, 5}, {5, 6}, {6, -5}, {6, -4}, {6, -3}, {6, -2}, {6, -1}, {6, 0}, {6, 1}, {6, 2}, {6, 3}, {6, 4}, {6, 5}, {7, -3}, {7, -2}, {7, -1}, {7, 0}, {7, 1}, {7, 2}, {7, 3}, {8, 0}},\n\t\t9: {{-9, 0}, {-8, -4}, {-8, -3}, {-8, -2}, {-8, -1}, {-8, 0}, {-8, 1}, {-8, 2}, {-8, 3}, {-8, 4}, {-7, -5}, {-7, -4}, {-7, -3}, {-7, -2}, {-7, -1}, {-7, 0}, {-7, 1}, {-7, 2}, {-7, 3}, {-7, 4}, {-7, 5}, {-6, -6}, {-6, -5}, {-6, -4}, {-6, -3}, {-6, -2}, {-6, -1}, {-6, 0}, {-6, 1}, {-6, 2}, {-6, 3}, {-6, 4}, {-6, 5}, {-6, 6}, {-5, -7}, {-5, -6}, {-5, -5}, {-5, -4}, {-5, -3}, {-5, -2}, {-5, -1}, {-5, 0}, {-5, 1}, {-5, 2}, {-5, 3}, {-5, 4}, {-5, 5}, {-5, 6}, {-5, 7}, {-4, -8}, {-4, -7}, {-4, -6}, {-4, -5}, {-4, -4}, {-4, -3}, {-4, -2}, {-4, -1}, {-4, 0}, {-4, 1}, {-4, 2}, {-4, 3}, {-4, 4}, {-4, 5}, {-4, 6}, {-4, 7}, {-4, 8}, {-3, -8}, {-3, -7}, {-3, -6}, {-3, -5}, {-3, -4}, {-3, -3}, {-3, -2}, {-3, -1}, {-3, 0}, {-3, 1}, {-3, 2}, {-3, 3}, {-3, 4}, {-3, 5}, {-3, 6}, {-3, 7}, {-3, 8}, {-2, -8}, {-2, -7}, {-2, -6}, {-2, -5}, {-2, -4}, {-2, -3}, {-2, -2}, {-2, -1}, {-2, 0}, {-2, 1}, {-2, 2}, {-2, 3}, {-2, 4}, {-2, 5}, {-2, 6}, {-2, 7}, {-2, 8}, {-1, -8}, {-1, -7}, {-1, -6}, {-1, -5}, {-1, -4}, {-1, -3}, {-1, -2}, {-1, -1}, {-1, 0}, {-1, 1}, {-1, 2}, {-1, 3}, {-1, 4}, {-1, 5}, {-1, 6}, {-1, 7}, {-1, 8}, {0, -9}, {0, -8}, {0, -7}, {0, -6}, {0, -5}, {0, -4}, {0, -3}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {0, 3}, {0, 4}, {0, 5}, {0, 6}, {0, 7}, {0, 8}, {0, 9}, {1, -8}, {1, -7}, {1, -6}, {1, -5}, {1, -4}, {1, -3}, {1, -2}, {1, -1}, {1, 0}, {1, 1}, {1, 2}, {1, 3}, {1, 4}, {1, 5}, {1, 6}, {1, 7}, {1, 8}, {2, -8}, {2, -7}, {2, -6}, {2, -5}, {2, -4}, {2, -3}, {2, -2}, {2, -1}, {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5}, {2, 6}, {2, 7}, {2, 8}, {3, -8}, {3, -7}, {3, -6}, {3, -5}, {3, -4}, {3, -3}, {3, -2}, {3, -1}, {3, 0}, {3, 1}, {3, 2}, {3, 3}, {3, 4}, {3, 5}, {3, 6}, {3, 7}, {3, 8}, {4, -8}, {4, -7}, {4, -6}, {4, -5}, {4, -4}, {4, -3}, {4, -2}, {4, -1}, {4, 0}, {4, 1}, {4, 2}, {4, 3}, {4, 4}, {4, 5}, {4, 6}, {4, 7}, {4, 8}, {5, -7}, {5, -6}, {5, -5}, {5, -4}, {5, -3}, {5, -2}, {5, -1}, {5, 0}, {5, 1}, {5, 2}, {5, 3}, {5, 4}, {5, 5}, {5, 6}, {5, 7}, {6, -6}, {6, -5}, {6, -4}, {6, -3}, {6, -2}, {6, -1}, {6, 0}, {6, 1}, {6, 2}, {6, 3}, {6, 4}, {6, 5}, {6, 6}, {7, -5}, {7, -4}, {7, -3}, {7, -2}, {7, -1}, {7, 0}, {7, 1}, {7, 2}, {7, 3}, {7, 4}, {7, 5}, {8, -4}, {8, -3}, {8, -2}, {8, -1}, {8, 0}, {8, 1}, {8, 2}, {8, 3}, {8, 4}, {9, 0}},\n\t\t10: {{-10, 0}, {-9, -4}, {-9, -3}, {-9, -2}, {-9, -1}, {-9, 0}, {-9, 1}, {-9, 2}, {-9, 3}, {-9, 4}, {-8, -6}, {-8, -5}, {-8, -4}, {-8, -3}, {-8, -2}, {-8, -1}, {-8, 0}, {-8, 1}, {-8, 2}, {-8, 3}, {-8, 4}, {-8, 5}, {-8, 6}, {-7, -7}, {-7, -6}, {-7, -5}, {-7, -4}, {-7, -3}, {-7, -2}, {-7, -1}, {-7, 0}, {-7, 1}, {-7, 2}, {-7, 3}, {-7, 4}, {-7, 5}, {-7, 6}, {-7, 7}, {-6, -8}, {-6, -7}, {-6, -6}, {-6, -5}, {-6, -4}, {-6, -3}, {-6, -2}, {-6, -1}, {-6, 0}, {-6, 1}, {-6, 2}, {-6, 3}, {-6, 4}, {-6, 5}, {-6, 6}, {-6, 7}, {-6, 8}, {-5, -8}, {-5, -7}, {-5, -6}, {-5, -5}, {-5, -4}, {-5, -3}, {-5, -2}, {-5, -1}, {-5, 0}, {-5, 1}, {-5, 2}, {-5, 3}, {-5, 4}, {-5, 5}, {-5, 6}, {-5, 7}, {-5, 8}, {-4, -9}, {-4, -8}, {-4, -7}, {-4, -6}, {-4, -5}, {-4, -4}, {-4, -3}, {-4, -2}, {-4, -1}, {-4, 0}, {-4, 1}, {-4, 2}, {-4, 3}, {-4, 4}, {-4, 5}, {-4, 6}, {-4, 7}, {-4, 8}, {-4, 9}, {-3, -9}, {-3, -8}, {-3, -7}, {-3, -6}, {-3, -5}, {-3, -4}, {-3, -3}, {-3, -2}, {-3, -1}, {-3, 0}, {-3, 1}, {-3, 2}, {-3, 3}, {-3, 4}, {-3, 5}, {-3, 6}, {-3, 7}, {-3, 8}, {-3, 9}, {-2, -9}, {-2, -8}, {-2, -7}, {-2, -6}, {-2, -5}, {-2, -4}, {-2, -3}, {-2, -2}, {-2, -1}, {-2, 0}, {-2, 1}, {-2, 2}, {-2, 3}, {-2, 4}, {-2, 5}, {-2, 6}, {-2, 7}, {-2, 8}, {-2, 9}, {-1, -9}, {-1, -8}, {-1, -7}, {-1, -6}, {-1, -5}, {-1, -4}, {-1, -3}, {-1, -2}, {-1, -1}, {-1, 0}, {-1, 1}, {-1, 2}, {-1, 3}, {-1, 4}, {-1, 5}, {-1, 6}, {-1, 7}, {-1, 8}, {-1, 9}, {0, -10}, {0, -9}, {0, -8}, {0, -7}, {0, -6}, {0, -5}, {0, -4}, {0, -3}, {0, -2}, {0, -1}, {0, 0}, {0, 1}, {0, 2}, {0, 3}, {0, 4}, {0, 5}, {0, 6}, {0, 7}, {0, 8}, {0, 9}, {0, 10}, {1, -9}, {1, -8}, {1, -7}, {1, -6}, {1, -5}, {1, -4}, {1, -3}, {1, -2}, {1, -1}, {1, 0}, {1, 1}, {1, 2}, {1, 3}, {1, 4}, {1, 5}, {1, 6}, {1, 7}, {1, 8}, {1, 9}, {2, -9}, {2, -8}, {2, -7}, {2, -6}, {2, -5}, {2, -4}, {2, -3}, {2, -2}, {2, -1}, {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5}, {2, 6}, {2, 7}, {2, 8}, {2, 9}, {3, -9}, {3, -8}, {3, -7}, {3, -6}, {3, -5}, {3, -4}, {3, -3}, {3, -2}, {3, -1}, {3, 0}, {3, 1}, {3, 2}, {3, 3}, {3, 4}, {3, 5}, {3, 6}, {3, 7}, {3, 8}, {3, 9}, {4, -9}, {4, -8}, {4, -7}, {4, -6}, {4, -5}, {4, -4}, {4, -3}, {4, -2}, {4, -1}, {4, 0}, {4, 1}, {4, 2}, {4, 3}, {4, 4}, {4, 5}, {4, 6}, {4, 7}, {4, 8}, {4, 9}, {5, -8}, {5, -7}, {5, -6}, {5, -5}, {5, -4}, {5, -3}, {5, -2}, {5, -1}, {5, 0}, {5, 1}, {5, 2}, {5, 3}, {5, 4}, {5, 5}, {5, 6}, {5, 7}, {5, 8}, {6, -8}, {6, -7}, {6, -6}, {6, -5}, {6, -4}, {6, -3}, {6, -2}, {6, -1}, {6, 0}, {6, 1}, {6, 2}, {6, 3}, {6, 4}, {6, 5}, {6, 6}, {6, 7}, {6, 8}, {7, -7}, {7, -6}, {7, -5}, {7, -4}, {7, -3}, {7, -2}, {7, -1}, {7, 0}, {7, 1}, {7, 2}, {7, 3}, {7, 4}, {7, 5}, {7, 6}, {7, 7}, {8, -6}, {8, -5}, {8, -4}, {8, -3}, {8, -2}, {8, -1}, {8, 0}, {8, 1}, {8, 2}, {8, 3}, {8, 4}, {8, 5}, {8, 6}, {9, -4}, {9, -3}, {9, -2}, {9, -1}, {9, 0}, {9, 1}, {9, 2}, {9, 3}, {9, 4}, {10, 0}},\n\t}\n\treturn d[unitRadius]\n}", "title": "" }, { "docid": "b2920bfa513f47609a1a640e5e9a4cf5", "score": "0.45951572", "text": "func NewGrid(width, height int) Grid {\n\tvar grid = Grid{\n\t\tWidth: width,\n\t\tHeight: height,\n\t}\n\n\tfor i := 0; i < height; i++ {\n\t\tvar layer []int\n\n\t\tfor j := 0; j < width; j++ {\n\t\t\tlayer = append(layer, 0)\n\t\t}\n\t\tgrid.Cells = append(grid.Cells, layer)\n\t}\n\n\treturn grid\n}", "title": "" }, { "docid": "4d1522f11b3a424447016307cfef7cba", "score": "0.45905682", "text": "func neighboursRowUg(in []float64, ug *UniformGrid) [][]int {\n\ttheta := (in[1] + 90) * math.Pi / 180\n\tm := math.Round((theta * ug.MTheta / math.Pi) - 0.5)\n\ttheta = math.Pi * (m + 0.5) / ug.MTheta\n\tphi := in[0] * math.Pi / 180\n\tmPhi := math.Round(2.0 * math.Pi * math.Sin(theta) / ug.DPhi)\n\n\tn1 := math.Round(phi * mPhi / (2 * math.Pi))\n\tp1 := []int{mod(int(m), int(ug.MTheta)), mod(int(n1), int(mPhi))}\n\tp2 := []int{mod(int(m), int(ug.MTheta)), mod(int(n1+1), int(mPhi))}\n\tp3 := []int{mod(int(m), int(ug.MTheta)), mod(int(n1-1), int(mPhi))}\n\treturn [][]int{p1, p2, p3}\n}", "title": "" }, { "docid": "05e40e5da51ca5980a4218c1e0321994", "score": "0.4581676", "text": "func newGrid(nrows, ncols int, data [][]rune) (*grid, error) {\n\tif nrows != len(data) {\n\t\treturn nil, fmt.Errorf(\"grid data size mismatched: expecting %d rows, got %d rows\", nrows, len(data))\n\t}\n\tfor r := 0; r < nrows; r++ {\n\t\tif ncols != len(data[r]) {\n\t\t\treturn nil, fmt.Errorf(\n\t\t\t\t\"grid data size mismatched: expecting %d columns, got %d columns for \\\"%s\\\"\",\n\t\t\t\tncols, len(data[r]), string(data[r]),\n\t\t\t)\n\t\t}\n\t}\n\treturn &grid{nrows, ncols, data}, nil\n}", "title": "" }, { "docid": "170a2751698fb3a9e8967ebff4f57164", "score": "0.45772567", "text": "func makeNeighbors(numTiles, width int) [][]int {\n\tvar (\n\t\tneighbors = make([][]int, numTiles)\n\t\tinBounds = func(x, y int) bool {\n\t\t\treturn 0 <= x && x < width && 0 <= y && y < numTiles/width\n\t\t}\n\t\tdirs = [][2]int{\n\t\t\t{-1, +0}, // N\n\t\t\t{-1, +1}, // NE\n\t\t\t{+0, +1}, // E\n\t\t\t{+1, +1}, // SE\n\t\t\t{+1, +0}, // S\n\t\t\t{+1, -1}, // SW\n\t\t\t{+0, -1}, // W\n\t\t\t{-1, -1}, // NW\n\t\t}\n\t\ttileToPoint = func(i int) (x, y int) {\n\t\t\ty = i / width\n\t\t\tx = i - y*width\n\t\t\treturn x, y\n\t\t}\n\t\tpointToTile = func(x, y int) int {\n\t\t\treturn y*width + x\n\t\t}\n\t)\n\tfor i := 0; i < numTiles; i++ {\n\t\tnumNeighbors := 0 // count first to avoid multiple allocs\n\t\tx, y := tileToPoint(i)\n\t\tfor _, d := range dirs {\n\t\t\tif inBounds(x+d[0], y+d[1]) {\n\t\t\t\tnumNeighbors++\n\t\t\t}\n\t\t}\n\t\tneighbors[i] = make([]int, 0, numNeighbors)\n\t\tfor _, d := range dirs {\n\t\t\tif x1, y1 := x+d[0], y+d[1]; inBounds(x1, y1) {\n\t\t\t\tneighbors[i] = append(neighbors[i], pointToTile(x1, y1))\n\t\t\t}\n\t\t}\n\t}\n\treturn neighbors\n}", "title": "" }, { "docid": "db8fcc452edfb6b7380a3670cac33e5d", "score": "0.4569734", "text": "func (d *distanceGrid) SetDistances(dist *cell.Distances) {\n\td.distances = dist;\n}", "title": "" }, { "docid": "17f13d67630ccba2f5a8d15eaf94de27", "score": "0.45651758", "text": "func TestGridClusters(t *testing.T) {\n\tgrid, err := NewGrid(defaultData)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tknownCluster1, knownCluster2 := grid.PointSet(), grid.PointSet()\n\tknownCluster1.Add(Point{0, 0})\n\tknownCluster2.Add(Point{0, 2})\n\tknownCluster2.Add(Point{1, 2})\n\tclusters := grid.AllClusters()\n\tfor _, ps := range clusters {\n\t\tif !ps.Equals(knownCluster1) && !ps.Equals(knownCluster2) {\n\t\t\tt.Fatalf(\"unexpected cluster\")\n\t\t}\n\t}\n\tlargest := grid.LargestCluster()\n\tif !largest.Equals(knownCluster2) {\n\t\tt.Fatalf(\"incorrect largest cluster\")\n\t}\n}", "title": "" }, { "docid": "e2a0added0530a39cb6f5e04855f2669", "score": "0.4539113", "text": "func ManhDist(x1, y1, x2, y2 int) int {\n\treturn int(math.Abs(float64(x1-x2)) + math.Abs(float64(y1-y2)))\n}", "title": "" }, { "docid": "de05b85915a4fcd0c23ac46f720755d6", "score": "0.45310706", "text": "func (p Sim) IndexGrid(v gridlib.Vec) *Life {\n return p.Lattice.IndexGrid(v).(*Life)\n //return p.Lattice.Grid[v.Y][v.X].(*Life)\n}", "title": "" }, { "docid": "3a22526b4e1375238994cd6dfe697513", "score": "0.45287284", "text": "func (a *Matrix4) MultDist(v Vector3) (result Vector3) {\n\tresult.X = v.X*a.el[0][0] + v.Y*a.el[1][0] + v.Z*a.el[2][0]\n\tresult.Y = v.X*a.el[0][1] + v.Y*a.el[1][1] + v.Z*a.el[2][1]\n\tresult.Z = v.X*a.el[0][2] + v.Y*a.el[1][2] + v.Z*a.el[2][2]\n\treturn\n}", "title": "" }, { "docid": "a03b1d6e2999da27a4c48ab84430448c", "score": "0.45273912", "text": "func Grid(canvas fc.Canvas, left, bottom, width, height, size float64, color color.RGBA) {\n\tfor x := left; x <= left+width; x += size {\n\t\tcanvas.Line(x, bottom, x, bottom+height, 0.1, color)\n\t}\n\tfor y := bottom; y <= bottom+height; y += size {\n\t\tcanvas.Line(left, y, left+width, y, 0.2, color)\n\t}\n}", "title": "" }, { "docid": "2ca4259d6048c00afcf9a8aefc8ca6c5", "score": "0.45150727", "text": "func Grid(args *A) {\n\tio.Ff(&bufferPy, \"plt.grid(\")\n\tupdateBufferAndClose(&bufferPy, args, false)\n}", "title": "" }, { "docid": "5f069eefac7df387029854388c3db807", "score": "0.45150405", "text": "func HypercubeDims(xs [][]float64) []float64 {\n\tcoordsMin := []float64{}\n\tcoordsMax := []float64{}\n\n\tfor _, x := range xs[0] {\n\t\tcoordsMin = append(coordsMin, x)\n\t\tcoordsMax = append(coordsMax, x)\n\t}\n\n\tfor _, xi := range xs {\n\t\tfor j, xin := range xi {\n\t\t\tif xin > coordsMax[j] {\n\t\t\t\t//fmt.Println(xin,coordsMax[j])\n\t\t\t\tcoordsMax[j] = xin\n\t\t\t}\n\n\t\t\tif xin < coordsMin[j] {\n\t\t\t\tcoordsMin[j] = xin\n\t\t\t}\n\t\t}\n\t}\n\n\tdims := []float64{}\n\tfor i, min := range coordsMin {\n\t\tdims = append(dims, coordsMax[i]-min)\n\t}\n\n\treturn dims\n}", "title": "" }, { "docid": "65b51f565ca6ec46fca28096c041088f", "score": "0.45063803", "text": "func (pz *Puzzle) neighCoords(crd Coord) Group {\n\tpzLen := pz.Len()\n\tpzDiv := int(math.Sqrt(float64(pzLen)))\n\tretArray := NewGroup(pzLen, pz)\n\n\t// I want to round down to the previous multiple\n\tstartRow := ((crd.getRow() / pzDiv) * pzDiv)\n\tstartCol := ((crd.getColumn() / pzDiv) * pzDiv)\n\tstopRow := startRow + pzDiv\n\tstopCol := startCol + pzDiv\n\n\tfor row := startRow; row < stopRow; row++ {\n\t\tfor col := startCol; col < stopCol; col++ {\n\t\t\tnewCoord := []int{col, row}\n\t\t\tretArray.Add(newCoord)\n\n\t\t}\n\t}\n\treturn *retArray\n}", "title": "" }, { "docid": "4a86a00be41f65b4f19c80e14197fb36", "score": "0.44984645", "text": "func DistSqr(v1, v2 Vect) float32 {\n\treturn (v1.X-v2.X)*(v1.X-v2.X) + (v1.Y-v2.Y)*(v1.Y-v2.Y)\n}", "title": "" }, { "docid": "4cfcd0999a4c5904a67a6f6977961779", "score": "0.44950518", "text": "func (graph *GridGraph) Init() {\n\t// pre-calculate neighbours\n\tfor y := 0; y < graph.height; y++ {\n\t\tfor x := 0; x < graph.width; x++ {\n\t\t\t// populate neighbours in row order\n\t\t\tedges := [8][2]int{\n\t\t\t\t[2]int{x - 1, y - 1},\n\t\t\t\t[2]int{x, y - 1},\n\t\t\t\t[2]int{x + 1, y - 1},\n\t\t\t\t[2]int{x - 1, y},\n\t\t\t\t[2]int{x + 1, y},\n\t\t\t\t[2]int{x - 1, y + 1},\n\t\t\t\t[2]int{x, y + 1},\n\t\t\t\t[2]int{x + 1, y + 1},\n\t\t\t}\n\n\t\t\tvar corners [][2]int\n\t\t\tfor _, corner := range edges {\n\t\t\t\tif graph.inGrid(corner[0], corner[1]) {\n\t\t\t\t\tcorners = append(corners, corner)\n\t\t\t\t}\n\t\t\t}\n\t\t\tgraph.edges[x+y*graph.width] = corners\n\t\t\tgraph.costs[x+y*graph.width] = make([]float64, len(corners))\n\t\t\tfor i := range corners {\n\t\t\t\txDiff := math.Abs(float64(x - corners[i][0]))\n\t\t\t\tyDiff := math.Abs(float64(y - corners[i][1]))\n\n\t\t\t\tgraph.costs[x+y*graph.width][i] = math.Sqrt(xDiff*xDiff + yDiff*yDiff)\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "825baf8ae9bf7ea0336d672152244d4f", "score": "0.44839492", "text": "func eukl_distance(dims int,p1 []float64, p2 []float64) float64 {\n\ttemp := 0.0\n\tfor i := 0; i < dims; i++{\n\t\ttemp += math.Pow(p1[i]-p2[i],2)\n\t}\n\treturn math.Sqrt(temp)\n}", "title": "" }, { "docid": "2a119b29f99fe5c3c651b40d84bb2482", "score": "0.44694403", "text": "func NewGrid(length int, width int) *Grid {\n\tg := new(Grid)\n\tg.grid = make([][]string, length)\n\tfor i := 0; i < length; i++ {\n\t\tg.grid[i] = make([]string, width)\n\t\tfor j := range g.grid[i] {\n\t\t\tg.grid[i][j] = \".\"\n\t\t}\n\t}\n\treturn g\n}", "title": "" }, { "docid": "bc746484bf902e2d97b0eed5fb01a968", "score": "0.446682", "text": "func (d *Dist1D) Entries() int64 {\n\treturn d.Dist.Entries()\n}", "title": "" }, { "docid": "73e2f560244959407fc898f72360c8ee", "score": "0.44639298", "text": "func getSubgrid(coords [2]byte) [][2]byte {\n\ti, j := coords[0], coords[1]\n\txShift := 3 * byte(math.Floor(float64(i/3)))\n\tyShift := 3 * byte(math.Floor(float64(j/3)))\n\tflatSubgrid := [][2]byte{}\n\tfor x := byte(0); x < 3; x++ {\n\t\tfor y := byte(0); y < 3; y++ {\n\t\t\tidx := [2]byte{xShift + x, yShift + y}\n\t\t\tflatSubgrid = append(flatSubgrid, idx)\n\t\t}\n\t}\n\treturn flatSubgrid\n}", "title": "" }, { "docid": "a97200a412445a55eba8e27ae10bbd36", "score": "0.44589084", "text": "func NewGrid() Grid {\n\treturn &grid{\n\t\tindexof: map[Control]int{},\n\t}\n}", "title": "" }, { "docid": "091b81677f7a53d44a3f774d80f90175", "score": "0.44448274", "text": "func (props *screenGridProps) entryPosToGridPos(pos int) (row, col int) {\n\trow = pos / props.gridWH\n\tcol = pos - (row * props.gridWH)\n\n\tif row >= props.rows || col >= props.cols {\n\t\trow, col = -1, -1\n\t\treturn\n\t}\n\n\t// SF grid starts in lower left, but input coordinates start in upper left,\n\t// so we'll mirror the grid height pos.\n\trow = (props.gridWH - 1) - row\n\n\treturn row, col\n}", "title": "" }, { "docid": "25f3f4cd7ee2f833fbdfaf150950b7cd", "score": "0.44327098", "text": "func gridProduct(matrix [][]int) int {\n\tvar largestSum int\n\tvar currentSum int\n\n\tfor y := 0; y < 20; y++ {\n\t\tfor x := 0; x < 20; x++ {\n\t\t\tif y < 17 {\n\t\t\t\tcurrentSum = matrix[x][y] * matrix[x][y+1] * matrix[x][y+2] * matrix[x][y+3] //travels down, also means that we dont travel upwards as they will be the same as this eventually\n\t\t\t\tif currentSum > largestSum {\n\t\t\t\t\tlargestSum = currentSum\n\t\t\t\t}\n\n\t\t\t}\n\t\t\tif x < 17 {\n\t\t\t\tcurrentSum = matrix[x][y] * matrix[x+1][y] * matrix[x+2][y] * matrix[x+3][y] //travels right, also means we don't travel left\n\t\t\t\tif currentSum > largestSum {\n\t\t\t\t\tlargestSum = currentSum\n\t\t\t\t}\n\n\t\t\t}\n\t\t\tif x < 17 && y < 17 {\n\t\t\t\tcurrentSum = matrix[x][y] * matrix[x+1][y+1] * matrix[x+2][y+2] * matrix[x+3][y+3] //travels down and right, we dont travel up and left because of this\n\t\t\t\tif currentSum > largestSum {\n\t\t\t\t\tlargestSum = currentSum\n\t\t\t\t}\n\t\t\t}\n\t\t\tif y < 17 && 2 < x {\n\t\t\t\tcurrentSum = matrix[x][y] * matrix[x-1][y+1] * matrix[x-2][y+2] * matrix[x-3][y+3] // travels left and down\n\t\t\t\tif currentSum > largestSum {\n\t\t\t\t\tlargestSum = currentSum\n\t\t\t\t}\n\t\t\t}\n\t\t\tif x < 17 && 2 < y {\n\t\t\t\tcurrentSum = matrix[x][y] * matrix[x+1][y-1] * matrix[x+2][y-2] * matrix[x+3][y-3] //travels right and up\n\t\t\t\tif currentSum > largestSum {\n\t\t\t\t\tlargestSum = currentSum\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\t}\n\n\treturn largestSum\n}", "title": "" }, { "docid": "0f07649807605e66947c934a118cf199", "score": "0.44315207", "text": "func CalRowEntropy(din [][]float64)(e []float64){\r\n xweit := []float64{}\r\n k := 0.0\r\n\r\n if len(din) != 0 {\r\n k = 1/math.Log(float64(len(din[0]) ))\r\n }\r\n\r\n _, Y := Norm_2d(din)\r\n //Y, _ := Norm_ent(din)\r\n\r\n for _, row := range Y{\r\n //1)计算第j项指标下第i个样本值占该指标的比重:\r\n xweit = SmpWeitInRow(row) // xweit is {p_ij, ...}, x weight in row(a indicator)\r\n mul_p_ln, sum_p_ln := []float64{}, 0.0\r\n for _, p_ij := range xweit{\r\n mul_p_ln = append(mul_p_ln, p_ij * math.Log(p_ij) )\r\n }\r\n sum_p_ln = sum1d(mul_p_ln)\r\n //2)计算第j项指标的熵值\r\n e = append(e, -k * sum_p_ln)\r\n }\r\n return\r\n}", "title": "" }, { "docid": "ad5d08afa4b9a3ea1c5ca883d8414108", "score": "0.44260207", "text": "func NewDenseGrid(size vec2d.I, generator Generator) *DenseGrid {\n\tg := &DenseGrid{\n\t\tSize: size,\n\t\tData: make([]interface{}, size.Area()),\n\t}\n\tif generator != nil {\n\t\tfor iter, pt, ok := g.Size.FromOrigin().Start(); ok; pt, ok = iter.Next() {\n\t\t\tg.Data[iter.Idx()] = generator(pt)\n\t\t}\n\t}\n\treturn g\n}", "title": "" }, { "docid": "8e116bdba81891e08b7303992960408f", "score": "0.44207212", "text": "func (b *Bin2D) Entries() int64 {\n\treturn b.Dist.Entries()\n}", "title": "" }, { "docid": "0a38de7a2b5960f57eb4144f36edba51", "score": "0.44145873", "text": "func (m *CDense) Dims() (r, c int) {\n\treturn m.mat.Rows, m.mat.Cols\n}", "title": "" }, { "docid": "49be631866df4fb3532562d7e78a2baf", "score": "0.4407797", "text": "func outputGrid() {\n\n\tfor i:= 0; i < 9; i+=1{\n\t\tfor j:= 0; j< 9; j+=1{\n\t\t\tfmt.Printf(\"%d\\t\",ansGrid[i][j])\n\t\t}\n\t\tfmt.Println()\n\t}\n}", "title": "" }, { "docid": "31cff905d4a3f37be309ebc40c92ff2b", "score": "0.4404032", "text": "func (c *Canvas) Grid(x, y, w, h, size, interval float32, linecolor color.NRGBA) {\n\tfor xp := x; xp <= x+w; xp += interval {\n\t\tc.Line(xp, y, xp, y+h, size, linecolor) // vertical line\n\t}\n\tfor yp := y; yp <= y+h; yp += interval {\n\t\tc.Line(x, yp, x+w, yp, size, linecolor) // horizontal line\n\t}\n}", "title": "" }, { "docid": "64f14f1fc16193c7d6c9c5dc1899084d", "score": "0.4403637", "text": "func NewGrid(nh, nv int) Grid {\n\tgrid := Grid{NumHoriz: nh, NumVert: nv}\n\treturn grid\n}", "title": "" }, { "docid": "0773717eec13a7f8b6280c6b47b7eda2", "score": "0.440323", "text": "func Grid(args string) {\n\tio.Ff(&bb, \"grid(%s)\\n\", args)\n}", "title": "" }, { "docid": "fad283d235d49e4784099f7a90bc27a8", "score": "0.4400535", "text": "func (vd GenotypeDistancer) Distance(i, j int) float64 {\n\tvi := vd.Matrix[i]\n\tvj := vd.Matrix[j]\n\tdist := 0.0\n\tfor k, vik := range vi {\n\t\tvjk := vj[k]\n\t\tdist += (vik - vjk) * (vik - vjk)\n\t}\n\treturn dist\n}", "title": "" }, { "docid": "4a8cc0d1471525e15d026eaf5f6bb24a", "score": "0.43878907", "text": "func (osg *OpenSimplexGenerator) Get2D(x float64, y float64) float64 {\n\t// place input coordinates onto grid\n\tstretchOffset := (x + y) * stretchConstant2D\n\txs := x + stretchOffset\n\tys := y + stretchOffset\n\n\t// floor to get grid coordinates of rhombus (stretched square) super-cell origin\n\txsb := int(math.Floor(xs))\n\tysb := int(math.Floor(ys))\n\n\t// skew out to get actual coordinates of rhombus origin. we'll need these later\n\tsquishOffset := float64(xsb+ysb) * squishConstant2D\n\txb := float64(xsb) + squishOffset\n\tyb := float64(ysb) + squishOffset\n\n\t// compute grid coordinates relative to rhombus origin\n\txins := xs - float64(xsb)\n\tyins := ys - float64(ysb)\n\n\t// sum those together to get a value that determines which region we're in\n\tinSum := xins + yins\n\n\t// positions relative to origin point\n\tdx0 := x - xb\n\tdy0 := y - yb\n\n\t// we'll be defining these inside the next block and using them afterwards\n\tvar dx_ext, dy_ext float64\n\tvar xsv_ext, ysv_ext int\n\n\tvar value float64\n\n\t// contribution (1,0)\n\tdx1 := dx0 - 1 - squishConstant2D\n\tdy1 := dy0 - 0 - squishConstant2D\n\tattn1 := 2 - dx1*dx1 - dy1*dy1\n\tif attn1 > 0 {\n\t\tattn1 *= attn1\n\t\tvalue += attn1 * attn1 * osg.extrapolate2(xsb+1, ysb, dx1, dy1)\n\t}\n\n\t// contribution (0,1)\n\tdx2 := dx0 - 0 - squishConstant2D\n\tdy2 := dy0 - 1 - squishConstant2D\n\tattn2 := 2 - dx2*dx2 - dy2*dy2\n\tif attn2 > 0 {\n\t\tattn2 *= attn2\n\t\tvalue += attn2 * attn2 * osg.extrapolate2(xsb, ysb+1, dx2, dy2)\n\t}\n\n\tif inSum <= 1 { // we're inside the triangle (2-Simplex) at (0,0)\n\t\tzins := 1 - inSum\n\t\tif (zins > xins) || (zins > yins) { // (0,0) is one of the closest two triangle vertices\n\t\t\tif xins > yins {\n\t\t\t\txsv_ext = xsb + 1\n\t\t\t\tysv_ext = ysb - 1\n\t\t\t\tdx_ext = dx0 - 1\n\t\t\t\tdy_ext = dy0 + 1\n\t\t\t} else {\n\t\t\t\txsv_ext = xsb - 1\n\t\t\t\tysv_ext = ysb + 1\n\t\t\t\tdx_ext = dx0 + 1\n\t\t\t\tdy_ext = dy0 - 1\n\t\t\t}\n\t\t} else { // (1,0) and (0,1) are the closest two vertices\n\t\t\txsv_ext = xsb + 1\n\t\t\tysv_ext = ysb + 1\n\t\t\tdx_ext = dx0 - 1 - 2*squishConstant2D\n\t\t\tdy_ext = dy0 - 1 - 2*squishConstant2D\n\t\t}\n\t} else { // we're inside the triangle (2-Simplex) at (1,1)\n\t\tzins := 2 - inSum\n\t\tif (zins < xins) || (zins < yins) { // (0,0) is one of the closest two triangle vertices\n\t\t\tif xins > yins {\n\t\t\t\txsv_ext = xsb + 2\n\t\t\t\tysv_ext = ysb\n\t\t\t\tdx_ext = dx0 - 2 - 2*squishConstant2D\n\t\t\t\tdy_ext = dy0 - 2*squishConstant2D\n\t\t\t} else {\n\t\t\t\txsv_ext = xsb\n\t\t\t\tysv_ext = ysb + 2\n\t\t\t\tdx_ext = dx0 - 2*squishConstant2D\n\t\t\t\tdy_ext = dy0 - 2 - 2*squishConstant2D\n\t\t\t}\n\t\t} else { // (1,0) and (0,1) are the closest two vertices\n\t\t\tdx_ext = dx0\n\t\t\tdy_ext = dy0\n\t\t\txsv_ext = xsb\n\t\t\tysv_ext = ysb\n\t\t}\n\t\txsb += 1\n\t\tysb += 1\n\t\tdx0 = dx0 - 1 - 2*squishConstant2D\n\t\tdy0 = dy0 - 1 - 2*squishConstant2D\n\t}\n\n\t// contribution (0,0) or (1,1)\n\tattn0 := 2 - dx0*dx0 - dy0*dy0\n\tif attn0 > 0 {\n\t\tattn0 *= attn0\n\t\tvalue += attn0 * attn0 * osg.extrapolate2(xsb, ysb, dx0, dy0)\n\t}\n\n\t// extra vertex\n\tattn_ext := 2 - dx_ext*dx_ext - dy_ext*dy_ext\n\tif attn_ext > 0 {\n\t\tattn_ext *= attn_ext\n\t\tvalue += attn_ext * attn_ext * osg.extrapolate2(xsv_ext, ysv_ext, dx_ext, dy_ext)\n\t}\n\n\treturn value / normConstant2D\n}", "title": "" }, { "docid": "4fe018a7c9b174979b4e13c1cf168e42", "score": "0.43726012", "text": "func getDistance(start, end client.Position) (dis int) {\n\tstartX, startY, startZ := axialToCube(start)\n\tendX, endY, endZ := axialToCube(end)\n\treturn max(max(abs(startX-endX), abs(startY-endY)), abs(startZ-endZ))\n}", "title": "" }, { "docid": "54c900e456d6b1bb624c949e3e5acd10", "score": "0.437154", "text": "func getNeighbors(grid [][]int, x int, y int) (neighbors []int) {\n\tsize := len(grid)\n\tneighbors = make([]int, 0, 8)\n\n\t// Add all in bounds neighbors\n\tif x-1 >= 0 && y-1 >= 0 {\n\t\tneighbors = append(neighbors, grid[x-1][y-1])\n\t}\n\tif x-1 >= 0 {\n\t\tneighbors = append(neighbors, grid[x-1][y])\n\t}\n\tif y-1 >= 0 {\n\t\tneighbors = append(neighbors, grid[x][y-1])\n\t}\n\tif x+1 < size && y+1 < size {\n\t\tneighbors = append(neighbors, grid[x+1][y+1])\n\t}\n\tif x+1 < size {\n\t\tneighbors = append(neighbors, grid[x+1][y])\n\t}\n\n\tif y+1 < size {\n\t\tneighbors = append(neighbors, grid[x][y+1])\n\t}\n\tif x+1 < size && y-1 >= 0 {\n\t\tneighbors = append(neighbors, grid[x+1][y-1])\n\t}\n\tif x-1 >= 0 && y+1 < size {\n\t\tneighbors = append(neighbors, grid[x-1][y+1])\n\t}\n\n\treturn neighbors\n}", "title": "" }, { "docid": "416b77cc19a915ac3f890cd08ce237a0", "score": "0.43618634", "text": "func gridGenration(grid matrix) {\n\t\n\tfillGrid(grid, 0, 0)\n\n\t//check user level of the game \n\tfor ((total > 0) && (level < userLevel)){\n\t\ttotal = 0\n\t\t//genrate random number of row and col \n\t\trow := rand.Intn(9)%9\n\t\tcol := rand.Intn(9)%9\n\t\tnumber := (rand.Intn(9)%9) + 1\n\t\t\n\t\t//fmt.Println(total, row, col, number)\n\t\t//fmt.Println(grid)\n\t\tif(grid[row][col] == 0){\n\n\t\t\tfor (fitGrid(grid, number, row, col) == false){\n\t\t\t\t//genrate random number to satified all rules of sudoku\n\t\t\t\tnumber = (rand.Intn(9) %9) +1\n\t\t\t}\n\n\t\t\t//fmt.Println(\"fet in gred \", number)\n\t\t\tgrid[row][col] = number\n\t\t\tfillGrid(grid, 0, 0)\n\t\t\t//fmt.Println(\"Run\")\n\t\t\t//fmt.Println(grid)\n\n\t\t\tif(total == 0){\n\t\t\t\tgrid[row][col] = 0;\n\t\t\t\ttotal = 1\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tlevel+=1\n\t\t}\n\t\ttotal = 1\n\t}\n\tfmt.Println(\"Question of grid\")\n\tquestionGrid(grid)\n\tfmt.Println(\"Answer of grid\")\n\toutputGrid()\n}", "title": "" }, { "docid": "1f51705fa225e60ea9f605cf8cd9771f", "score": "0.435306", "text": "func (x *Point3) Dist(y *Point3) float64 {\n\treturn math.Sqrt(x.DistSqr(y))\n}", "title": "" }, { "docid": "5a01b3470588a21aacd2102ef4b9c26e", "score": "0.43516025", "text": "func NewGrid(size, step float32, color *math32.Color) *Grid {\r\n\r\n\tgrid := new(Grid)\r\n\r\n\thalf := size / 2\r\n\tpositions := math32.NewArrayF32(0, 0)\r\n\tfor i := -half; i <= half; i += step {\r\n\t\tpositions.Append(\r\n\t\t\t-half, 0, i, color.R, color.G, color.B,\r\n\t\t\thalf, 0, i, color.R, color.G, color.B,\r\n\t\t\ti, 0, -half, color.R, color.G, color.B,\r\n\t\t\ti, 0, half, color.R, color.G, color.B,\r\n\t\t)\r\n\t}\r\n\r\n\t// Create geometry\r\n\tgeom := geometry.NewGeometry()\r\n\tgeom.AddVBO(\r\n\t\tgls.NewVBO(positions).\r\n\t\t\tAddAttrib(gls.VertexPosition).\r\n\t\t\tAddAttrib(gls.VertexColor),\r\n\t)\r\n\r\n\t// Create material\r\n\tmat := material.NewBasic()\r\n\r\n\t// Initialize lines with the specified geometry and material\r\n\tgrid.Lines.Init(geom, mat)\r\n\treturn grid\r\n}", "title": "" }, { "docid": "d11dcf02f610ba4d3ee720cb8705b925", "score": "0.43514282", "text": "func TestNextGridNumber(t *testing.T) {\n\tL := 3\n\texpectedNumGrids := int(math.Pow(2.0, float64(L*L)))\n\tnumGrids := 0\n\tg := NewGridWithDims(L, L)\n\tfor {\n\t\tdone := g.NextGrid()\n\t\tnumGrids++\n\t\tif done {\n\t\t\tbreak\n\t\t}\n\t}\n\tif numGrids != expectedNumGrids {\n\t\tt.Fatalf(\"g.NextGrid() did not produce the expected number of unique grids\")\n\t}\n}", "title": "" }, { "docid": "20c198e7228d46aca8ff720ae3c9cc07", "score": "0.43454835", "text": "func DistIt(num int, mem, cpu float64, offers ...ms.Offer) (dist *Dist, err error) {\n\thrs := dpFillHostRes(nil, nil, mapIntoHostRes(offers, mem, cpu), num, 1)\n\tif !checkDist(hrs, num) {\n\t\terr = ErrBadDist\n\t\treturn\n\t}\n\tportsMap := mapIntoPortsMap(offers)\n\tdist = mapHostResIntoDist(hrs, portsMap)\n\treturn\n}", "title": "" }, { "docid": "6215150dfcc6cb344907989222e73dde", "score": "0.43435183", "text": "func NewGridRenderable(context *graphics.RenderContext) *GridRenderable {\n\tgl := context.OpenGl()\n\tprogram, programErr := opengl.LinkNewStandardProgram(gl, gridVertexShaderSource, gridFragmentShaderSource)\n\n\tif programErr != nil {\n\t\tpanic(fmt.Errorf(\"GridRenderable shader failed: %v\", programErr))\n\t}\n\trenderable := &GridRenderable{\n\t\tcontext: context,\n\t\tprogram: program,\n\t\tvao: opengl.NewVertexArrayObject(gl, program),\n\t\tvertexPositionBuffer: gl.GenBuffers(1)[0],\n\t\tvertexPositionAttrib: gl.GetAttribLocation(program, \"vertexPosition\"),\n\t\tviewMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, \"viewMatrix\")),\n\t\tprojectionMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, \"projectionMatrix\"))}\n\n\t{\n\t\tgl.BindBuffer(opengl.ARRAY_BUFFER, renderable.vertexPositionBuffer)\n\t\thalf := fineCoordinatesPerTileSide / float32(2.0)\n\t\tlimit := float32(fineCoordinatesPerTileSide*tilesPerMapSide + half)\n\t\tvar vertices = []float32{\n\t\t\t-half, -half, 0.0,\n\t\t\tlimit, -half, 0.0,\n\t\t\tlimit, limit, 0.0,\n\n\t\t\tlimit, limit, 0.0,\n\t\t\t-half, limit, 0.0,\n\t\t\t-half, -half, 0.0}\n\t\tgl.BufferData(opengl.ARRAY_BUFFER, len(vertices)*4, vertices, opengl.STATIC_DRAW)\n\t\tgl.BindBuffer(opengl.ARRAY_BUFFER, 0)\n\t}\n\trenderable.vao.WithSetter(func(gl opengl.OpenGl) {\n\t\tgl.EnableVertexAttribArray(uint32(renderable.vertexPositionAttrib))\n\t\tgl.BindBuffer(opengl.ARRAY_BUFFER, renderable.vertexPositionBuffer)\n\t\tgl.VertexAttribOffset(uint32(renderable.vertexPositionAttrib), 3, opengl.FLOAT, false, 0, 0)\n\t\tgl.BindBuffer(opengl.ARRAY_BUFFER, 0)\n\t})\n\n\treturn renderable\n}", "title": "" }, { "docid": "e858e729dbdad3606c0f10da4e80ced4", "score": "0.4333553", "text": "func (g *grid) mkgrid() (gg [][]int, colwidths []int, rowheights []int) {\n\tgg = make([][]int, g.ymax)\n\tfor y := 0; y < g.ymax; y++ {\n\t\tgg[y] = make([]int, g.xmax)\n\t\tfor x := 0; x < g.xmax; x++ {\n\t\t\tgg[y][x] = -1\n\t\t}\n\t}\n\tfor i := range g.controls {\n\t\tfor y := g.controls[i].y; y < g.controls[i].y+g.controls[i].yspan; y++ {\n\t\t\tfor x := g.controls[i].x; x < g.controls[i].x+g.controls[i].xspan; x++ {\n\t\t\t\tgg[y][x] = i\n\t\t\t}\n\t\t}\n\t}\n\treturn gg, make([]int, g.xmax), make([]int, g.ymax)\n}", "title": "" }, { "docid": "417e412bc0467aa4e1b0843c3811291f", "score": "0.43331563", "text": "func (c Clusters) CentersInDimension(n int) Coordinates {\n\tvar v []float64\n\tfor _, cl := range c {\n\t\tv = append(v, cl.Center[n])\n\t}\n\treturn v\n}", "title": "" }, { "docid": "8c1cb75194deef1d74664be31d5677e1", "score": "0.4331614", "text": "func randLocal2(fst GridPosition)GridPosition{\n\tgd := GridPosition{rand.Intn(N), rand.Intn(N)}\n\n\tfor isItOver(distanciaJogadores(fst,gd)){\n\t\tgd = GridPosition{rand.Intn(N), rand.Intn(N)}\n\t}\n\treturn gd;\n}", "title": "" }, { "docid": "5b80791d676b3dad5322bff71ad2be6d", "score": "0.4331327", "text": "func neighboursUg(in int, ug *UniformGrid) []int {\n\tvar neighbours [][]int\n\tvar inGrid = ug.IDToGrid(in)\n\tm := inGrid[0]\n\tn := inGrid[1]\n\tneighbours = append(neighbours, []int{m, mod(n-1, len(ug.VertexData[m]))})\n\tneighbours = append(neighbours, []int{m, mod(n+1, len(ug.VertexData[m]))})\n\tfmt.Printf(\"\\nIngrid: %v\\n \", inGrid)\n\tfmt.Printf(\"nbs same row: %v\\n \", neighbours)\n\tcoord := ug.GridToCoord(inGrid)\n\n\tif m > 0 {\n\t\tfmt.Printf(\"m-1: %v\\n\", m-1)\n\t\tcoordDown := ug.GridToCoord([]int{m - 1, n})\n\t\tneighbours = append(neighbours, neighboursRowUg([]float64{coord[0], coordDown[1]}, ug)...)\n\t}\n\tfmt.Printf(\"m, len(VD): %v, %v\\n\", m, len(ug.VertexData))\n\tif m < len(ug.VertexData)-1 {\n\t\tcoordUp := ug.GridToCoord([]int{m + 1, n})\n\t\tneighbours = append(neighbours, neighboursRowUg([]float64{coord[0], coordUp[1]}, ug)...)\n\t}\n\n\tvar neighbours1d []int\n\tfor _, neighbour := range neighbours {\n\t\t//if !ug.VertexData[neighbour[0]][neighbour[1]] {\n\t\tneighbours1d = append(neighbours1d, ug.GridToID(neighbour))\n\t\t//}\n\t}\n\treturn neighbours1d\n}", "title": "" }, { "docid": "3982c8cb515c6dc12ceda0397a0c6c92", "score": "0.43299073", "text": "func TestGridCreation(t *testing.T) {\n\tdata := defaultData\n\tgrid, err := NewGrid(data)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif grid.Lx() != len(data) || grid.Ly() != len(data[0]) {\n\t\tt.Fatalf(\"grid does not have the correct dimensions\")\n\t}\n\tfor x, row := range data {\n\t\tfor y, val := range row {\n\t\t\tif grid.Get(Point{x, y}) != val {\n\t\t\t\tt.Fatalf(\"grid holds incorrect value\")\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "982eda77ea04955b1f076ba4930981d5", "score": "0.43263933", "text": "func newGrid(width, height int) Grid {\n\tgrid := make([][]bool, width)\n\n\tfor index, _ := range grid {\n\t\tgrid[index] = make([]bool, height)\n\t}\n\n\treturn grid\n}", "title": "" }, { "docid": "7329580472b4c9012d3aa06cc793f902", "score": "0.43147168", "text": "func (ds Disters) Dist(v vector.Vec3) float64 {\n\td := math.MaxFloat64\n\tfor _, cd := range ds {\n\t\td = math.Min(cd.Dist(v), d)\n\t}\n\treturn d\n}", "title": "" }, { "docid": "ed9e024f31ad78fd914adf2560777a89", "score": "0.4310567", "text": "func (d *Dist0D) Entries() int64 {\n\treturn d.N\n}", "title": "" }, { "docid": "7d4181fa9e1bc098901b3e9bde86e93c", "score": "0.4309971", "text": "func getDistance(a worldgen.Coordinate, b worldgen.Coordinate) float64 {\n\treturn math.Abs(float64(int64(a.X-b.X))) + math.Abs(float64(int64(a.Y-b.Y)))\n}", "title": "" }, { "docid": "b07e69ee7f72a27c75c45c4b96b52d90", "score": "0.4306661", "text": "func createGrid(size int) [][]int {\n\tgrid := make([][]int, size)\n\tfor i := 0; i < size; i++ {\n\t\tgrid[i] = make([]int, size)\n\t}\n\n\t// Randomly populate the grid\n\tfor i := 0; i < size*3; i++ {\n\t\tx := rand.Intn(size)\n\t\ty := rand.Intn(size)\n\t\tgrid[x][y] = 1\n\t}\n\n\treturn grid\n}", "title": "" }, { "docid": "8fe2a88d3786f3910074335c537322f2", "score": "0.4301544", "text": "func Diamond(startSize vec2d.I, iterations int, noiseDecay float64) *DenseGrid {\n\tgen := newDiamondGenerator(startSize, iterations, 1.0, noiseDecay)\n\n\t// populate the initial points\n\tfor iter, pt, ok := startSize.FromOrigin().Start(); ok; pt, ok = iter.Next() {\n\t\tp2 := pt.ScalarMultiply(gen.scale)\n\t\tidx := gen.size.Idx(p2)\n\t\tgen.grid[idx] = rand.Float64()\n\t}\n\n\tgen.fillAll()\n\tg := gen.getGrid()\n\tNormalize(g)\n\treturn g\n}", "title": "" }, { "docid": "d61654d20510432b413cd862e3bf1e47", "score": "0.42966932", "text": "func (m mesh) Norm(ord norm) float64 {\n\tswitch ord {\n\tcase One:\n\t\tsv := make([]float64, m.c) // hold the sums of each col\n\n\t\t// we extract each column\n\t\tfor j := 0; j < m.c; j++ {\n\t\t\t// we extract the elems in the col\n\t\t\tsum := 0. // we wanna keep track of the sum of each col\n\t\t\tfor k := 0; k < m.r; k++ {\n\t\t\t\tsum += math.Abs(m.elems[k*m.c+j])\n\t\t\t}\n\t\t\t// we then wanna put it inside a vector so that\n\t\t\t// we can get the biggest col sum out\n\t\t\tsv[j] = sum\n\t\t}\n\n\t\t// return the biggest column sum\n\t\t_, max := VecMax(sv)\n\t\treturn max\n\tcase Two:\n\t\tsum := 0.\n\t\tfor _, v := range m.elems {\n\t\t\tsum += v * v\n\t\t}\n\t\treturn math.Sqrt(sum)\n\tcase Inf:\n\t\tsv := make([]float64, m.c) // no. of elems in row = no. of cols\n\n\t\t// we extract each row\n\t\tfor i := 0; i < m.r; i++ {\n\t\t\t// we extract the elems in the row\n\t\t\tsum := 0. // we wanna keep track of the sum of each row\n\t\t\tfor j := 0; j < m.c; j++ {\n\t\t\t\tsum += math.Abs(m.elems[i*m.c+j])\n\t\t\t}\n\t\t\t// we wanna put the sum into a vector so that\n\t\t\t// we can get the biggest row sum out\n\t\t\tsv[i] = sum\n\t\t}\n\n\t\t// return the biggest row sum\n\t\t_, max := VecMax(sv)\n\t\treturn max\n\t}\n\treturn 0\n}", "title": "" }, { "docid": "0732033e542b230ad0885bd558d01fb8", "score": "0.42905653", "text": "func columnNorms(m, n int, a []float64, lda int) []float64 {\n\tbi := blas64.Implementation()\n\tnorms := make([]float64, n)\n\tfor j := 0; j < n; j++ {\n\t\tnorms[j] = bi.Dnrm2(m, a[j:], lda)\n\t}\n\treturn norms\n}", "title": "" }, { "docid": "4c13653b345fa69871c7a7e1247fb521", "score": "0.4288969", "text": "func (n *RTreePoint) Dist(r *Rect) float64 {\n\treturn math.sqrt(n.SquaredDist(r))\n}", "title": "" }, { "docid": "cec23bb6b108c7a5f1ed3bfcf8b460d5", "score": "0.4286784", "text": "func TileGridFromChars(charGrid [][]string) (tg [][]*grid.Tile, err error) {\n\ttgLen := len(charGrid[0])\n\ttgWidth := len(charGrid)\n\n\t// Initialize tile grid\n\ttg = make([][]*grid.Tile, tgLen)\n\tfor i, _ := range tg {\n\t\ttg[i] = make([]*grid.Tile, tgWidth)\n\t}\n\n\t// Loop through char grid\n\tfor r, row := range charGrid {\n\t\tfor i, char := range row {\n\t\t\ttg[i][r] = tileFromChar(char)\n\t\t}\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "ef94152d84d4b995d61a4f9ad6fe2e0b", "score": "0.42857468", "text": "func sqdist(x1, y1, x2, y2 int) int {\n\tdx, dy := x2 - x1, y2 - y1\n\treturn dx*dx + dy*dy\n}", "title": "" }, { "docid": "249a1173a8266670251bafd52f36773e", "score": "0.42845738", "text": "func (g *Game) getAccessibleCells(c Side) [][2]int {\n\tcells := [][2]int{}\n\tfor x := 0; x < len(g.Board); x++ {\n\t\tfor y := 0; y < len(g.Board[0]); y++ {\n\t\t\tif g.roomIsValid(c, x, y) {\n\t\t\t\tcells = append(cells, [2]int{x, y})\n\t\t\t}\n\t\t}\n\t}\n\n\treturn cells\n}", "title": "" }, { "docid": "de498390d54d3a76989c1a77d6a805cb", "score": "0.4280803", "text": "func sum4DNeighbours(grid [][][][]bool, w, x, y, z int) int {\n\ttotal := 0\n\n\tfor h := -1; h <= 1; h++ {\n\t\tfor i := -1; i <= 1; i++ {\n\t\t\tfor j := -1; j <= 1; j++ {\n\t\t\t\tfor k := -1; k <= 1; k++ {\n\t\t\t\t\t// Set up indices for all the neighbours\n\t\t\t\t\two := w + h\n\t\t\t\t\txo := x + i\n\t\t\t\t\tyo := y + j\n\t\t\t\t\tzo := z + k\n\n\t\t\t\t\t// Don't count the hypercube itself\n\t\t\t\t\tif !(h == 0 && i == 0 && j == 0 && k == 0) {\n\t\t\t\t\t\t// Bounds checking - the grid is big enough, so disregard\n\t\t\t\t\t\t// Only add to total if the neighbour is active\n\t\t\t\t\t\tif wo >= 0 && xo >= 0 && yo >= 0 && zo >= 0 &&\n\t\t\t\t\t\t\two < l && xo < m && yo < n && zo < p &&\n\t\t\t\t\t\t\tgrid[wo][xo][yo][zo] {\n\t\t\t\t\t\t\ttotal++\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn total\n}", "title": "" }, { "docid": "b113671937901483b3aa536c2649401d", "score": "0.42799687", "text": "func GetGrid(game Game) [][]Cell {\n\tgrid := [][]Cell{}\n\tfor y := 0; y < game.bounds.Y; y++ {\n\t\trow := []Cell{}\n\t\tfor x := 0; x < game.bounds.X; x++ {\n\t\t\trow = append(row, getCellType(Vector{x, y}, game))\n\t\t}\n\t\tgrid = append(grid, row)\n\t}\n\treturn grid\n}", "title": "" } ]
adf2e9d0856bfc7c44f5d54739e6f9e1
Eliminate orphans by walking the chan backwards and whenever we see more than one block at a height, picking the one that matches its descendant's PrevHash.
[ { "docid": "8942abda7b56fc071c413c9952d90818", "score": "0.5905261", "text": "func eliminateOrphans(m map[int][]*leveldbBlockHeader) (int, error) {\n\n\tminHeight, maxHeight, count := -1, -1, 0\n\n\t// Find min, max and count\n\tfor h, v := range m {\n\t\tif minHeight > h || minHeight == -1 {\n\t\t\tminHeight = h\n\t\t}\n\t\tif maxHeight < h || maxHeight == -1 {\n\t\t\tmaxHeight = h\n\t\t}\n\t\tcount += len(v)\n\t}\n\n\t// It is possible that we are at a split, i.e. more than block\n\t// exists at max height. In this specific case (levelDb import),\n\t// we can just delete them until the main chain unity is found.\n\tfor h := maxHeight; len(m[h]) > 1 && h >= minHeight; h-- {\n\t\tlog.Printf(\"Chain is split at heighest height, ignoring height %d\", h)\n\t\tdelete(m, h)\n\t\tmaxHeight--\n\t}\n\n\tprevHash := m[maxHeight][0].PrevHash\n\tfor h := maxHeight - 1; h >= minHeight; h-- {\n\t\tif len(m[h]) > 1 { // More than one block at this height\n\t\t\tfor _, bh := range m[h] {\n\t\t\t\tif bh.Hash() == prevHash {\n\t\t\t\t\tm[h] = []*leveldbBlockHeader{bh}\n\t\t\t\t} else {\n\t\t\t\t\tlog.Printf(\"Ignoring orphan block %v\", bh.Hash())\n\t\t\t\t\tcount--\n\t\t\t\t}\n\t\t\t}\n\t\t\tif len(m[h]) != 1 {\n\t\t\t\treturn count, fmt.Errorf(\"Problem finding valid parent when eliminating orphans.\")\n\t\t\t}\n\t\t}\n\n\t\tif len(m[h]) > 0 {\n\t\t\tprevHash = m[h][0].PrevHash\n\t\t}\n\t}\n\n\treturn count, nil\n}", "title": "" } ]
[ { "docid": "65754fefdf24a652c53f928a8e6ca5b2", "score": "0.62148964", "text": "func (b *BlockChain) WantedOrphan(hash *chainhash.Hash) *chainhash.Hash {\n\t// Protect concurrent access. Using a read lock only so multiple\n\t// readers can query without blocking each other.\n\tb.orphanLock.RLock()\n\tdefer b.orphanLock.RUnlock()\n\n\t// Work back to the first block in the orphan chain\n\tprevHash := hash\n\tfor {\n\t\torphan, exists := b.orphans[*prevHash]\n\t\tif !exists {\n\t\t\tbreak\n\t\t}\n\t\tprevHash = &orphan.block.MsgBlock().Header.PrevBlock\n\t}\n\n\treturn prevHash\n}", "title": "" }, { "docid": "234aa99f6733bbbea53521b59d4f88bf", "score": "0.6023123", "text": "func (bIndex *BlockIndex) GetAncestor(height int32) *BlockIndex {\n\tif height > bIndex.Height || height < 0 {\n\t\treturn nil\n\t}\n\tif height == bIndex.Height{\n\t\treturn bIndex\n\t}\n\tindexWalk := bIndex\n\tfor indexWalk.Prev != nil{\n\t\tif indexWalk.Prev.Height == height{\n\t\t\treturn indexWalk.Prev\n\t\t}\n\t\tindexWalk = indexWalk.Prev\n\t}\n\t// indexWalk := bIndex\n\t// heightWalk := bIndex.Height\n\t// for heightWalk > height {\n\t// \theightSkip := getSkipHeight(heightWalk)\n\t// \theightSkipPrev := getSkipHeight(heightWalk - 1)\n\t// \tif indexWalk.Skip != nil && (heightSkip == height ||\n\t// \t\t(heightSkip > height && !(heightSkipPrev < heightSkip-2 && heightSkipPrev >= height))) {\n\t// \t\t// Only follow skip if prev->skip isn't better than skip->prev.\n\t// \t\tindexWalk = indexWalk.Skip\n\t// \t\theightWalk = indexWalk.Height\n\t// \t} else {\n\t// \t\tif indexWalk.Prev == nil {\n\t// \t\t\tpanic(\"The blockIndex pointer should not be nil\")\n\t// \t\t}\n\t// \t\tindexWalk = indexWalk.Prev\n\t// \t\theightWalk--\n\t// \t}\n\t// }\n\n\treturn indexWalk\n}", "title": "" }, { "docid": "1d0731e67fda05e13334180fe7f87df6", "score": "0.589063", "text": "func findPrevStateHashMutation(stateUpdate *stateUpdateImpl) (hashing.HashValue, error) {\n\th, exists, err := stateUpdate.previousStateHashMutation()\n\tif err != nil {\n\t\treturn hashing.NilHash, err\n\t}\n\tif !exists {\n\t\treturn hashing.NilHash, nil\n\t}\n\treturn h, nil\n}", "title": "" }, { "docid": "cd611975962b218187965667176518ae", "score": "0.5768539", "text": "func (b *BlockGen) PrevBlock(index int) *types.SnailBlock {\n\tif index >= b.i {\n\t\tpanic(\"block index out of range\")\n\t}\n\tif index == -1 {\n\t\treturn b.parent\n\t}\n\treturn b.chain[index]\n}", "title": "" }, { "docid": "c5a79a90ccd8c92111f297bf95cdb8d3", "score": "0.56939083", "text": "func (b *Block) PreviousBlockHash() crypto.Hash { return b.header.PreviousBlockHash }", "title": "" }, { "docid": "981405ed9e89484560d84baf2edb0631", "score": "0.5636012", "text": "func (m *Manager) HandleHeight(previousHeight int64) int64 {\n\tif m.opts.GetPruningStrategy() == types.PruningNothing || previousHeight <= 0 {\n\t\treturn 0\n\t}\n\n\tdefer func() {\n\t\tm.pruneHeightsMx.Lock()\n\t\tdefer m.pruneHeightsMx.Unlock()\n\n\t\tm.pruneSnapshotHeightsMx.Lock()\n\t\tdefer m.pruneSnapshotHeightsMx.Unlock()\n\n\t\t// move persisted snapshot heights to pruneHeights which\n\t\t// represent the heights to be pruned at the next pruning interval.\n\t\tvar next *list.Element\n\t\tfor e := m.pruneSnapshotHeights.Front(); e != nil; e = next {\n\t\t\tsnHeight := e.Value.(int64)\n\t\t\tif snHeight < previousHeight-int64(m.opts.KeepRecent) {\n\t\t\t\tm.pruneHeights = append(m.pruneHeights, snHeight)\n\n\t\t\t\t// We must get next before removing to be able to continue iterating.\n\t\t\t\tnext = e.Next()\n\t\t\t\tm.pruneSnapshotHeights.Remove(e)\n\t\t\t} else {\n\t\t\t\tnext = e.Next()\n\t\t\t}\n\t\t}\n\n\t\t// flush the updates to disk so that they are not lost if crash happens.\n\t\tif err := m.db.SetSync(pruneHeightsKey, int64SliceToBytes(m.pruneHeights)); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tif int64(m.opts.KeepRecent) < previousHeight {\n\t\tpruneHeight := previousHeight - int64(m.opts.KeepRecent)\n\t\t// We consider this height to be pruned iff:\n\t\t//\n\t\t// - snapshotInterval is zero as that means that all heights should be pruned.\n\t\t// - snapshotInterval % (height - KeepRecent) != 0 as that means the height is not\n\t\t// a 'snapshot' height.\n\t\tif m.snapshotInterval == 0 || pruneHeight%int64(m.snapshotInterval) != 0 {\n\t\t\tm.pruneHeightsMx.Lock()\n\t\t\tdefer m.pruneHeightsMx.Unlock()\n\n\t\t\tm.pruneHeights = append(m.pruneHeights, pruneHeight)\n\t\t\treturn pruneHeight\n\t\t}\n\t}\n\treturn 0\n}", "title": "" }, { "docid": "3374cae58ba0d50209675c86c46d7022", "score": "0.5511322", "text": "func (tree *ByPrev) Remove(key common.BlockIdType) {\n\tif true {\n\t\tfor lower := tree.LowerBound(key); lower.position != endByPrev; {\n\t\t\tif ByPrevCompare(lower.Key(), key) == 0 {\n\t\t\t\tnode := lower.node\n\t\t\t\tlower.Next()\n\t\t\t\ttree.remove(node)\n\t\t\t} else {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t} else {\n\t\tnode := tree.lookup(key)\n\t\ttree.remove(node)\n\t}\n}", "title": "" }, { "docid": "5a865f39145e5f479ad5790709a337bb", "score": "0.54139566", "text": "func (sm *syncManager) antiPastHashesBetween(stagingArea *model.StagingArea, lowHash, highHash *externalapi.DomainHash,\n\tmaxBlueScoreDifference uint64) (hashes []*externalapi.DomainHash, actualHighHash *externalapi.DomainHash, err error) {\n\n\t// If lowHash is not in the selectedParentChain of highHash - SelectedChildIterator will fail.\n\t// Therefore, we traverse down lowHash's selectedParentChain until we reach a block that is in\n\t// highHash's selectedParentChain.\n\t// We keep originalLowHash to filter out blocks in it's past later down the road\n\toriginalLowHash := lowHash\n\tlowHash, err = sm.findLowHashInHighHashSelectedParentChain(stagingArea, lowHash, highHash)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tlowBlockGHOSTDAGData, err := sm.ghostdagDataStore.Get(sm.databaseContext, stagingArea, lowHash)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\thighBlockGHOSTDAGData, err := sm.ghostdagDataStore.Get(sm.databaseContext, stagingArea, highHash)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tif lowBlockGHOSTDAGData.BlueScore() > highBlockGHOSTDAGData.BlueScore() {\n\t\treturn nil, nil, errors.Errorf(\"low hash blueScore > high hash blueScore (%d > %d)\",\n\t\t\tlowBlockGHOSTDAGData.BlueScore(), highBlockGHOSTDAGData.BlueScore())\n\t}\n\n\tif maxBlueScoreDifference != 0 {\n\t\t// In order to get no more then maxBlueScoreDifference\n\t\t// blocks from the future of the lowHash (including itself),\n\t\t// we iterate the selected parent chain of the highNode and\n\t\t// stop once we reach\n\t\t// highBlockBlueScore-lowBlockBlueScore+1 <= maxBlueScoreDifference.\n\t\t// That stop point becomes the new highHash.\n\t\t// Using blueScore as an approximation is considered to be\n\t\t// fairly accurate because we presume that most DAG blocks are\n\t\t// blue.\n\t\thighHash, err = sm.findHighHashAccordingToMaxBlueScoreDifference(stagingArea,\n\t\t\tlowHash, highHash, maxBlueScoreDifference, highBlockGHOSTDAGData, lowBlockGHOSTDAGData)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t}\n\n\t// Collect all hashes by concatenating the merge-sets of all blocks between highHash and lowHash\n\tblockHashes := []*externalapi.DomainHash{}\n\titerator, err := sm.dagTraversalManager.SelectedChildIterator(stagingArea, highHash, lowHash)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tdefer iterator.Close()\n\tfor ok := iterator.First(); ok; ok = iterator.Next() {\n\t\tcurrent, err := iterator.Get()\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\t// Both blue and red merge sets are topologically sorted, but not the concatenation of the two.\n\t\t// We require the blocks to be topologically sorted. In addition, for optimal performance,\n\t\t// we want the selectedParent to be first.\n\t\t// Since the rest of the merge set is in the anticone of selectedParent, it's position in the list does not\n\t\t// matter, even though it's blue score is the highest, we can arbitrarily decide it comes first.\n\t\t// Therefore we first append the selectedParent, then the rest of blocks in ghostdag order.\n\t\tsortedMergeSet, err := sm.getSortedMergeSet(stagingArea, current)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\n\t\t// append to blockHashes all blocks in sortedMergeSet which are not in the past of originalLowHash\n\t\tfor _, blockHash := range sortedMergeSet {\n\t\t\tisInPastOfOriginalLowHash, err := sm.dagTopologyManager.IsAncestorOf(stagingArea, blockHash, originalLowHash)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tif isInPastOfOriginalLowHash {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tblockHashes = append(blockHashes, blockHash)\n\t\t}\n\t}\n\n\t// The process above doesn't return highHash, so include it explicitly, unless highHash == lowHash\n\tif !lowHash.Equal(highHash) {\n\t\tblockHashes = append(blockHashes, highHash)\n\t}\n\n\treturn blockHashes, highHash, nil\n}", "title": "" }, { "docid": "b624a66b2bb28c292343cd807ac1373b", "score": "0.5358613", "text": "func (client *client) switchHashMatchedBlock(currSeq int64) (int64, []byte, error) {\n\tlastBlock, err := client.RequestLastBlock()\n\tif err != nil {\n\t\tplog.Error(\"Parachain RequestLastBlock fail\", \"err\", err)\n\t\treturn -2, nil, err\n\t}\n\n\tif lastBlock.Height == 0 {\n\t\treturn client.syncFromGenesisBlock()\n\t}\n\n\tdepth := searchHashMatchDepth\n\tfor height := lastBlock.Height; height > 0 && depth > 0; height-- {\n\t\tblock, err := client.GetBlockByHeight(height)\n\t\tif err != nil {\n\t\t\treturn -2, nil, err\n\t\t}\n\t\tminer, err := getMinerTxInfo(block)\n\t\tif err != nil {\n\t\t\treturn -2, nil, err\n\t\t}\n\t\tplog.Info(\"switchHashMatchedBlock\", \"lastParaBlock height\", miner.Height, \"mainHeight\",\n\t\t\tminer.MainBlockHeight, \"mainHash\", common.Bytes2Hex(miner.MainBlockHash))\n\t\tmainSeq, err := client.GetSeqByHashOnMainChain(miner.MainBlockHash)\n\t\tif err != nil {\n\t\t\tdepth--\n\t\t\tif depth == 0 {\n\t\t\t\tplog.Error(\"switchHashMatchedBlock depth overflow\", \"last info:mainHeight\", miner.MainBlockHeight,\n\t\t\t\t\t\"mainHash\", common.Bytes2Hex(miner.MainBlockHash), \"search startHeight\", lastBlock.Height, \"curHeight\", miner.Height,\n\t\t\t\t\t\"search depth\", searchHashMatchDepth)\n\t\t\t\tpanic(\"search HashMatchedBlock overflow, re-setting search depth and restart to try\")\n\t\t\t}\n\t\t\tif height == 1 {\n\t\t\t\tplog.Error(\"switchHashMatchedBlock search to height=1 not found\", \"lastBlockHeight\", lastBlock.Height,\n\t\t\t\t\t\"height1 mainHash\", common.Bytes2Hex(miner.MainBlockHash))\n\t\t\t\terr = client.removeBlocks(0)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn currSeq, nil, nil\n\t\t\t\t}\n\t\t\t\treturn client.syncFromGenesisBlock()\n\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\t//remove fail, the para chain may be remove part, set the preMainBlockHash to nil, to match nothing, force to search from last\n\t\terr = client.removeBlocks(height)\n\t\tif err != nil {\n\t\t\treturn currSeq, nil, nil\n\t\t}\n\n\t\tplog.Info(\"switchHashMatchedBlock succ\", \"currHeight\", height, \"initHeight\", lastBlock.Height,\n\t\t\t\"new currSeq\", mainSeq+1, \"new preMainBlockHash\", common.Bytes2Hex(miner.MainBlockHash))\n\t\treturn mainSeq + 1, miner.MainBlockHash, nil\n\t}\n\treturn -2, nil, paracross.ErrParaCurHashNotMatch\n}", "title": "" }, { "docid": "5d53c4bf3b3c368481dee5efb76fa6aa", "score": "0.5221709", "text": "func (o *OrphanManage) GetPrevOrphans(hash *bc.Hash) ([]*bc.Hash, bool) {\n\to.mtx.RLock()\n\tprevOrphans, ok := o.prevOrphans[*hash]\n\to.mtx.RUnlock()\n\treturn prevOrphans, ok\n}", "title": "" }, { "docid": "78ae3df485c486c2010e3cd54f807ad1", "score": "0.5210137", "text": "func _findPrevTestNetDifficulty(startNode *BlockNode, params *BitCloutParams) *BlockHash {\n\tpowLimitHash := _difficultyBitsToHash(params.BitcoinPowLimitBits)\n\n\t// Search backwards through the chain for the last block without\n\t// the special rule applied.\n\titerNode := startNode\n\t// The node stores a difficulty block hash.\n\t// Convert it to bigint.\n\t// Convert the bigint to bits.\n\tfor iterNode != nil && iterNode.Height%params.BitcoinBlocksPerRetarget != 0 &&\n\t\t*iterNode.DifficultyTarget == *powLimitHash {\n\n\t\titerNode = iterNode.Parent\n\t}\n\n\t// Return the found difficulty or the minimum difficulty if no\n\t// appropriate block was found.\n\tlastDiffHash := powLimitHash\n\tif iterNode != nil {\n\t\tlastDiffHash = iterNode.DifficultyTarget\n\t}\n\treturn lastDiffHash\n}", "title": "" }, { "docid": "0dbfa84f7d9e90e6c5f9f7411ec8ac52", "score": "0.5183821", "text": "func FindCommonAncestor(db DatabaseReader, a, b *protocol.Header) *protocol.Header {\n\tfor bn := b.Number.Uint64(); a.Number.Uint64() > bn; {\n\t\ta = GetHeader(db, a.UncleHash, a.Number.Uint64()-1)\n\t\tif a == nil {\n\t\t\treturn nil\n\t\t}\n\t}\n\tfor an := a.Number.Uint64(); an < b.Number.Uint64(); {\n\t\tb = GetHeader(db, b.UncleHash, b.Number.Uint64()-1)\n\t\tif b == nil {\n\t\t\treturn nil\n\t\t}\n\t}\n\tfor a.Hash() != b.Hash() {\n\t\ta = GetHeader(db, a.UncleHash, a.Number.Uint64()-1)\n\t\tif a == nil {\n\t\t\treturn nil\n\t\t}\n\t\tb = GetHeader(db, b.UncleHash, b.Number.Uint64()-1)\n\t\tif b == nil {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn a\n}", "title": "" }, { "docid": "02c49f8424c15a48714aec0850542d62", "score": "0.5179811", "text": "func (m *MergingIter) prevEntry() {\n\tl := &m.levels[m.heap.items[0].index]\n\tl.prev()\n\tif !l.heapKey.valid() {\n\t\t// l.iter is exhausted.\n\t\tm.err = l.iter.Error()\n\t\tif m.err == nil {\n\t\t\tm.heap.pop()\n\t\t}\n\t\treturn\n\t}\n\n\tif m.heap.len() > 1 {\n\t\tm.heap.fix(0)\n\t}\n}", "title": "" }, { "docid": "b7cb7ec7c65899f90f1806765c0dba75", "score": "0.51556545", "text": "func (m *MergingIter) Prev() *Span {\n\tif m.err != nil {\n\t\treturn nil\n\t}\n\tif m.dir == -1 && (m.end == nil || m.start == nil) {\n\t\treturn nil\n\t}\n\tif m.dir != -1 {\n\t\tm.switchToMaxHeap()\n\t}\n\treturn m.findPrevFragmentSet()\n}", "title": "" }, { "docid": "874d3e3a2f297074112a9226c572d25a", "score": "0.51427466", "text": "func (n *NullPersister) LastBlockHash(eventType string, contractAddress common.Address) common.Hash {\n\treturn common.Hash{}\n}", "title": "" }, { "docid": "a915ea962f802cf5e85a483c35f5ecb5", "score": "0.5131202", "text": "func recalcDepth(peers *pslice.PSlice) uint8 {\n\t// handle edge case separately\n\tif peers.Length() <= nnLowWatermark {\n\t\treturn 0\n\t}\n\tvar (\n\t\tpeersCtr = uint(0)\n\t\tcandidate = uint8(0)\n\t\tshallowestEmpty, noEmptyBins = peers.ShallowestEmpty()\n\t)\n\n\t_ = peers.EachBin(func(_ swarm.Address, po uint8) (bool, bool, error) {\n\t\tpeersCtr++\n\t\tif peersCtr >= nnLowWatermark {\n\t\t\tcandidate = po\n\t\t\treturn true, false, nil\n\t\t}\n\t\treturn false, false, nil\n\t})\n\n\tif noEmptyBins || shallowestEmpty > candidate {\n\t\treturn candidate\n\t}\n\n\treturn shallowestEmpty\n}", "title": "" }, { "docid": "67da3321f2760852dec3ab1be943924c", "score": "0.51186115", "text": "func (b *Block) ParentHash() common.Hash {\n\treturn b.header.ParentHash\n}", "title": "" }, { "docid": "8ad7712fd9c6f512053c7b5e7db87525", "score": "0.5116598", "text": "func (chain *BlockChain) FindParent(bh *BlockHandle) (*BlockHandle, error) {\n\tparent_hash := bh.Header.PrevBlock\n\tparent, err := chain.Get(&parent_hash)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn parent, nil\n}", "title": "" }, { "docid": "d178df6993552683107d953203c13047", "score": "0.5098729", "text": "func (s *segment) prev() *segment {\n\tif s == nil || s.parent == nil || len(s.parent.children) == 0 || s.parent.children[0] == s {\n\t\treturn nil\n\t}\n\tfor i, t := range s.parent.children {\n\t\tif t == s {\n\t\t\treturn s.parent.children[i-1]\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "781cd05f4676a07a9dd4977ab590901b", "score": "0.5052287", "text": "func (m *Monitor) IngestPrevBlock(height int64) error {\n\tprevHeight := height - 1\n\n\t// Get validator set for the block\n\tvalidators, err := m.client.Validators(&prevHeight)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Query the previous block\n\tblock, err := m.client.Block(&prevHeight)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Query the next block to access the commits\n\tnextBlock, err := m.client.Block(&height)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Parse blockinfo\n\tblockInfo := new(ctypes.BlockInfo)\n\tblockInfo.ID = block.BlockMeta.BlockID.String()\n\tblockInfo.Height = height\n\tblockInfo.Time = block.BlockMeta.Header.Time\n\tblockInfo.Proposer = block.Block.ProposerAddress.String()\n\n\t// Identify missed validators\n\tmissedValidators := make([]*ctypes.MissInfo, 0)\n\n\tfor i, validator := range validators.Validators {\n\t\tif nextBlock.Block.LastCommit.Precommits[i] == nil {\n\t\t\tmissed := &ctypes.MissInfo{\n\t\t\t\tHeight: block.BlockMeta.Header.Height,\n\t\t\t\tAddress: validator.Address.String(),\n\t\t\t\tAlerted: false,\n\t\t\t\tTime: block.BlockMeta.Header.Time,\n\t\t\t\tProposer: block.BlockMeta.Header.ProposerAddress.String(),\n\t\t\t}\n\t\t\tmissedValidators = append(missedValidators, missed)\n\t\t\tcontinue\n\t\t}\n\t}\n\n\t// Collect evidence\n\tevidenceInfo := make([]*ctypes.EvidenceInfo, 0)\n\tfor _, evidence := range nextBlock.Block.Evidence.Evidence {\n\t\tevInfo := &ctypes.EvidenceInfo{}\n\t\tevInfo.Address = types.Address(evidence.Address()).String()\n\t\tevInfo.Height = evidence.Height()\n\t\tevidenceInfo = append(evidenceInfo, evInfo)\n\t}\n\n\t// Insert in DB\n\terr = m.db.RunInTransaction(func(tx *pg.Tx) error {\n\t\t// Insert blockinfo\n\t\terr = tx.Insert(blockInfo)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Insert evidence\n\t\tif len(evidenceInfo) > 0 {\n\t\t\terr = tx.Insert(&evidenceInfo)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\t// Insert missed signatures\n\t\tif len(missedValidators) > 0 {\n\t\t\terr = tx.Insert(&missedValidators)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "4d6865c0ebc50a4d165cbd7da89b55da", "score": "0.50254023", "text": "func (set *HashSet[T, U]) Remove(hash HashValue, ptr T) U {\n\tidx := uint(hash) % set.size\n\n\tbin := set.table[idx]\n\tprevPtr := &set.table[idx]\n\n\t// Find the bin\n\tfor bin != nil && !set.isEqual(ptr, bin.elt) {\n\t\tprevPtr = &bin.next\n\t\tbin = bin.next\n\t}\n\n\t// Remove the bin if it exists\n\tif bin != nil {\n\t\t// Update the previous linked list pointer\n\t\t*prevPtr = bin.next\n\t\tset.entries--\n\n\t\telt := bin.elt\n\t\tset.recycle(bin)\n\n\t\treturn elt\n\t}\n\n\tvar zero U\n\treturn zero\n}", "title": "" }, { "docid": "dd44409e62a34b90d45d780fdb164d90", "score": "0.50220877", "text": "func (b *blockManager) removeOrphanBlock(orphan *orphanBlock) {\n\t// Protect concurrent access.\n\tb.orphanLock.Lock()\n\tdefer b.orphanLock.Unlock()\n\n\t// Remove the orphan block from the orphan pool.\n\torphanHash := orphan.block.Hash()\n\tdelete(b.orphans, *orphanHash)\n\n\t// Remove the reference from the previous orphan index too. An indexing\n\t// for loop is intentionally used over a range here as range does not\n\t// reevaluate the slice on each iteration nor does it adjust the index\n\t// for the modified slice.\n\tprevHash := &orphan.block.MsgBlock().Header.PrevBlock\n\torphans := b.prevOrphans[*prevHash]\n\tfor i := 0; i < len(orphans); i++ {\n\t\thash := orphans[i].block.Hash()\n\t\tif hash.IsEqual(orphanHash) {\n\t\t\tcopy(orphans[i:], orphans[i+1:])\n\t\t\torphans[len(orphans)-1] = nil\n\t\t\torphans = orphans[:len(orphans)-1]\n\t\t\ti--\n\t\t}\n\t}\n\tb.prevOrphans[*prevHash] = orphans\n\n\t// Remove the map entry altogether if there are no longer any orphans\n\t// which depend on the parent hash.\n\tif len(b.prevOrphans[*prevHash]) == 0 {\n\t\tdelete(b.prevOrphans, *prevHash)\n\t}\n}", "title": "" }, { "docid": "7c6e5297f329fbac16cefa2054ee42e3", "score": "0.50183415", "text": "func (n *Neo) GetBlockAtHeight(height uint64) (WrkChainBlockHeader, error) {\n\n\tqueryUrl := viper.GetString(types.FlagWrkchainRpc)\n\n\tvar jsonStr []byte\n\tatHeight, err := n.getLatestBlockHash()\n\tif err != nil {\n\t\treturn WrkChainBlockHeader{}, err\n\t}\n\n\tif height > 0 {\n\t\tatHeight = strconv.FormatUint(height, 10)\n\t\tjsonStr = []byte(`{\"jsonrpc\":\"2.0\",\"method\":\"getblock\",\"params\":[` + atHeight + `,1], \"id\": 1}`)\n\t} else {\n\t\tjsonStr = []byte(`{\"jsonrpc\":\"2.0\",\"method\":\"getblock\",\"params\":[\"` + atHeight + `\",1], \"id\": 1}`)\n\t}\n\n\tresp, err := http.Post(queryUrl, \"application/json\", bytes.NewBuffer(jsonStr))\n\tif err != nil {\n\t\treturn WrkChainBlockHeader{}, err\n\t}\n\tdefer resp.Body.Close()\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn WrkChainBlockHeader{}, err\n\t}\n\n\tvar res NeoBlockHeaderResult\n\terr = json.Unmarshal(body, &res)\n\tif err != nil {\n\t\treturn WrkChainBlockHeader{}, err\n\t}\n\n\theader := res.Result\n\n\tif err != nil {\n\t\treturn WrkChainBlockHeader{}, err\n\t}\n\n\tblockHash := header.Hash\n\tparentHash := \"\"\n\thash1 := \"\"\n\thash2 := \"\"\n\thash3 := \"\"\n\tblockHeight := header.Index\n\n\tif height == 0 {\n\t\tn.lastHeight = blockHeight\n\t}\n\n\tif viper.GetBool(types.FlagParentHash) {\n\t\tparentHash = header.PreviousHash\n\t}\n\n\thash1Ref := viper.GetString(types.FlagHash1)\n\thash2Ref := viper.GetString(types.FlagHash2)\n\thash3Ref := viper.GetString(types.FlagHash3)\n\n\tif len(hash1Ref) > 0 {\n\t\thash1 = n.gethash(header, hash1Ref)\n\t}\n\n\tif len(hash2Ref) > 0 {\n\t\thash2 = n.gethash(header, hash2Ref)\n\t}\n\n\tif len(hash3Ref) > 0 {\n\t\thash3 = n.gethash(header, hash3Ref)\n\t}\n\n\twrkchainBlock := NewWrkChainBlockHeader(blockHeight, blockHash, parentHash, hash1, hash2, hash3)\n\n\treturn wrkchainBlock, nil\n}", "title": "" }, { "docid": "d9444d1cb87426cdbc2fc4cfb3d06d8a", "score": "0.50150245", "text": "func DeleteDownToBlock(finalBlockNumber uint64) error {\n\tglobalData.Lock()\n\tdefer globalData.Unlock()\n\n\tlog := globalData.log\n\n\tlog.Infof(\"Delete down to block: %d\", finalBlockNumber)\n\n\tlast, ok := storage.Pool.Blocks.LastElement()\n\tif !ok {\n\t\treturn nil // block store is already empty\n\t}\n\n\treservoir.Disable()\n\tdefer reservoir.Enable()\n\n\tpackedBlock := last.Value\n\tbr := blockrecord.Get()\n\nouter_loop:\n\tfor {\n\t\theader, digest, data, err := br.ExtractHeader(packedBlock, 0, false)\n\t\tif nil != err {\n\t\t\tlog.Criticalf(\"failed to unpack block: %d from storage error: %s\", binary.BigEndian.Uint64(last.Key), err)\n\t\t\treturn err\n\t\t}\n\n\t\t// finished\n\t\tif header.Number < finalBlockNumber {\n\t\t\tblockheader.Set(header.Number, digest, header.Version, header.Timestamp)\n\t\t\tlog.Infof(\"finish: _NOT_ Deleting: %d\", header.Number)\n\n\t\t\tif blockrecord.IsDifficultyAppliedVersion(header.Version) {\n\t\t\t\tif header.Number >= difficulty.AdjustTimespanInBlocks {\n\t\t\t\t\tnextDifficulty, prevDifficulty, err := blockrecord.AdjustDifficultyAtBlock(header.Number)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlog.Errorf(\"failed to adjust difficulty with error: %s\", err)\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\tlog.Infof(\"set new difficulty to %f, previous difficulty %f\", nextDifficulty, prevDifficulty)\n\t\t\t\t} else {\n\t\t\t\t\t// in case fork happens around first difficulty adjust, difficulty might be changed and\n\t\t\t\t\t// delete down blocks below adjustment block, leave difficulty different than other nodes\n\t\t\t\t\tblockrecord.ResetDifficulty()\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t}\n\n\t\tlog.Infof(\"Delete block: %d transactions: %d\", header.Number, header.TransactionCount)\n\n\t\t// record block owner\n\t\tvar blockOwner *account.Account\n\n\t\t// handle packed transactions\n\tinner_loop:\n\t\tfor i := 1; true; i += 1 {\n\t\t\t// start db transaction by block & index db\n\t\t\ttrx, err := storage.NewDBTransaction()\n\t\t\tif nil != err {\n\t\t\t\tlog.Errorf(\"cannot create transaction: error: %s\", i, err)\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\ttransaction, n, err := transactionrecord.Packed(data).Unpack(mode.IsTesting())\n\t\t\tif nil != err {\n\t\t\t\ttrx.Abort()\n\t\t\t\tlog.Warnf(\"invalid tx[%d]: error: %s\", i, err)\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tpackedTransaction := transactionrecord.Packed(data[:n])\n\t\t\tswitch tx := transaction.(type) {\n\t\t\tcase *transactionrecord.OldBaseData:\n\t\t\t\tif nil == blockOwner {\n\t\t\t\t\tblockOwner = tx.Owner\n\t\t\t\t}\n\t\t\t\t// delete later\n\n\t\t\tcase *transactionrecord.AssetData:\n\t\t\t\tassetId := tx.AssetId()\n\t\t\t\ttrx.Delete(storage.Pool.Assets, assetId[:])\n\t\t\t\tasset.Delete(assetId)\n\n\t\t\tcase *transactionrecord.BitmarkIssue:\n\t\t\t\ttxId := packedTransaction.MakeLink()\n\t\t\t\treservoir.DeleteByTxId(txId)\n\t\t\t\tif storage.Pool.Transactions.Has(txId[:]) {\n\t\t\t\t\ttrx.Delete(storage.Pool.Transactions, txId[:])\n\t\t\t\t\townership.Transfer(trx, txId, txId, 0, tx.Owner, nil)\n\t\t\t\t}\n\n\t\t\tcase *transactionrecord.BitmarkTransferUnratified, *transactionrecord.BitmarkTransferCountersigned:\n\t\t\t\ttr := tx.(transactionrecord.BitmarkTransfer)\n\t\t\t\ttxId := packedTransaction.MakeLink()\n\t\t\t\ttrx.Delete(storage.Pool.Transactions, txId[:])\n\t\t\t\treservoir.DeleteByTxId(txId)\n\t\t\t\tlink := tr.GetLink()\n\t\t\t\tblockNumber, linkOwner := ownership.OwnerOf(trx, link)\n\t\t\t\tif nil == linkOwner {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlog.Criticalf(\"missing transaction record for: %v\", link)\n\t\t\t\t\tlogger.Panic(\"Transactions database is corrupt\")\n\t\t\t\t}\n\t\t\t\townership.Transfer(trx, txId, link, blockNumber, tr.GetOwner(), linkOwner)\n\n\t\t\tcase *transactionrecord.BlockFoundation:\n\t\t\t\tif nil == blockOwner {\n\t\t\t\t\tblockOwner = tx.Owner\n\t\t\t\t}\n\t\t\t\t// delete later\n\n\t\t\tcase *transactionrecord.BlockOwnerTransfer:\n\t\t\t\ttxId := packedTransaction.MakeLink()\n\t\t\t\ttrx.Delete(storage.Pool.Transactions, txId[:])\n\t\t\t\treservoir.DeleteByTxId(txId)\n\t\t\t\tblockNumber, linkOwner := ownership.OwnerOf(trx, tx.Link)\n\t\t\t\tif nil == linkOwner {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlog.Criticalf(\"missing transaction record for: %v\", tx.Link)\n\t\t\t\t\tlogger.Panic(\"Transactions database is corrupt\")\n\t\t\t\t}\n\t\t\t\townerdata, err := ownership.GetOwnerDataB(trx, txId[:], storage.Pool.OwnerData)\n\t\t\t\tif nil != err {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlog.Criticalf(\"missing ownership for: %s\", txId)\n\t\t\t\t\tlogger.Panic(\"Ownership database is corrupt\")\n\t\t\t\t}\n\t\t\t\tblockOwnerdata, ok := ownerdata.(*ownership.BlockOwnerData)\n\t\t\t\tif !ok {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlog.Criticalf(\"expected block ownership but read: %+v\", ownerdata)\n\t\t\t\t\tlogger.Panic(\"Ownership database is corrupt\")\n\t\t\t\t}\n\n\t\t\t\townership.Transfer(trx, txId, tx.Link, blockNumber, tx.Owner, linkOwner)\n\n\t\t\t\tblockNumberKey := make([]byte, 8)\n\t\t\t\tbinary.BigEndian.PutUint64(blockNumberKey, blockOwnerdata.IssueBlockNumber())\n\n\t\t\t\t// put block ownership back\n\t\t\t\t_, previous := trx.GetNB(storage.Pool.Transactions, tx.Link[:])\n\n\t\t\t\tblockTransaction, _, err := transactionrecord.Packed(previous).Unpack(mode.IsTesting())\n\t\t\t\tif nil != err {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlogger.Criticalf(\"invalid error: %s\", txId, err)\n\t\t\t\t\tlogger.Panic(\"Transaction database is corrupt\")\n\t\t\t\t}\n\t\t\t\tswitch prevTx := blockTransaction.(type) {\n\t\t\t\tcase *transactionrecord.BlockFoundation:\n\t\t\t\t\terr := transactionrecord.CheckPayments(prevTx.Version, mode.IsTesting(), prevTx.Payments)\n\t\t\t\t\tif nil != err {\n\t\t\t\t\t\ttrx.Abort()\n\t\t\t\t\t\tlogger.Criticalf(\"invalid tx id: %s error: %s\", txId, err)\n\t\t\t\t\t\tlogger.Panic(\"Transaction database is corrupt\")\n\t\t\t\t\t}\n\t\t\t\t\tpackedPayments, err := prevTx.Payments.Pack(mode.IsTesting())\n\t\t\t\t\tif nil != err {\n\t\t\t\t\t\ttrx.Abort()\n\t\t\t\t\t\tlogger.Criticalf(\"invalid tx id: %s error: %s\", txId, err)\n\t\t\t\t\t\tlogger.Panic(\"Transaction database is corrupt\")\n\t\t\t\t\t}\n\t\t\t\t\t// payment data\n\t\t\t\t\ttrx.Put(\n\t\t\t\t\t\tstorage.Pool.BlockOwnerPayment,\n\t\t\t\t\t\tblockNumberKey,\n\t\t\t\t\t\tpackedPayments,\n\t\t\t\t\t\t[]byte{},\n\t\t\t\t\t)\n\t\t\t\t\ttrx.Put(\n\t\t\t\t\t\tstorage.Pool.BlockOwnerTxIndex,\n\t\t\t\t\t\ttx.Link[:],\n\t\t\t\t\t\tblockNumberKey,\n\t\t\t\t\t\t[]byte{},\n\t\t\t\t\t)\n\t\t\t\t\ttrx.Delete(storage.Pool.BlockOwnerTxIndex, txId[:])\n\n\t\t\t\tcase *transactionrecord.BlockOwnerTransfer:\n\t\t\t\t\terr := transactionrecord.CheckPayments(prevTx.Version, mode.IsTesting(), prevTx.Payments)\n\t\t\t\t\tif nil != err {\n\t\t\t\t\t\ttrx.Abort()\n\t\t\t\t\t\tlogger.Criticalf(\"invalid tx id: %s error: %s\", txId, err)\n\t\t\t\t\t\tlogger.Panic(\"Transaction database is corrupt\")\n\t\t\t\t\t}\n\t\t\t\t\tpackedPayments, err := prevTx.Payments.Pack(mode.IsTesting())\n\t\t\t\t\tif nil != err {\n\t\t\t\t\t\ttrx.Abort()\n\t\t\t\t\t\tlogger.Criticalf(\"invalid tx id: %s error: %s\", txId, err)\n\t\t\t\t\t\tlogger.Panic(\"Transaction database is corrupt\")\n\t\t\t\t\t}\n\t\t\t\t\t// payment data\n\t\t\t\t\ttrx.Put(storage.Pool.BlockOwnerPayment, blockNumberKey, packedPayments, []byte{})\n\t\t\t\t\ttrx.Put(storage.Pool.BlockOwnerTxIndex, tx.Link[:], blockNumberKey, []byte{})\n\t\t\t\t\ttrx.Delete(storage.Pool.BlockOwnerTxIndex, txId[:])\n\n\t\t\t\tdefault:\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlogger.Criticalf(\"invalid block transfer link: %+v\", prevTx)\n\t\t\t\t\tlogger.Panic(\"Transaction database is corrupt\")\n\t\t\t\t}\n\n\t\t\tcase *transactionrecord.BitmarkShare:\n\t\t\t\ttxId := packedTransaction.MakeLink()\n\t\t\t\tblockNumber, linkOwner := ownership.OwnerOf(trx, tx.Link)\n\t\t\t\tif nil == linkOwner {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlog.Criticalf(\"missing transaction record for: %v\", tx.Link)\n\t\t\t\t\tlogger.Panic(\"Transactions database is corrupt\")\n\t\t\t\t}\n\n\t\t\t\townerData, err := ownership.GetOwnerData(trx, txId, storage.Pool.OwnerData)\n\t\t\t\tif nil != err {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlogger.Criticalf(\"invalid ownerData for tx id: %s\", txId)\n\t\t\t\t\tlogger.Panic(\"Ownership database is corrupt\")\n\t\t\t\t}\n\t\t\t\tshareData, ok := ownerData.(*ownership.ShareOwnerData)\n\t\t\t\tif !ok {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlogger.Criticalf(\"invalid ownerData: %+v for tx id: %s\", ownerData, txId)\n\t\t\t\t\tlogger.Panic(\"Ownership database is corrupt\")\n\t\t\t\t}\n\n\t\t\t\ttrx.Delete(storage.Pool.Transactions, txId[:])\n\t\t\t\treservoir.DeleteByTxId(txId)\n\n\t\t\t\tshareId := shareData.IssueTxId()\n\n\t\t\t\tfKey := append(linkOwner.Bytes(), shareId[:]...)\n\t\t\t\ttrx.Delete(storage.Pool.Shares, shareId[:])\n\t\t\t\ttrx.Delete(storage.Pool.ShareQuantity, fKey)\n\n\t\t\t\townership.Transfer(trx, txId, tx.Link, blockNumber, linkOwner, linkOwner)\n\n\t\t\tcase *transactionrecord.ShareGrant:\n\n\t\t\t\ttxId := packedTransaction.MakeLink()\n\n\t\t\t\ttrx.Delete(storage.Pool.Transactions, txId[:])\n\t\t\t\treservoir.DeleteByTxId(txId)\n\n\t\t\t\toKey := append(tx.Owner.Bytes(), tx.ShareId[:]...)\n\t\t\t\trKey := append(tx.Recipient.Bytes(), tx.ShareId[:]...)\n\n\t\t\t\t// this could be zero\n\t\t\t\toAccountBalance, _ := trx.GetN(storage.Pool.ShareQuantity, oKey)\n\n\t\t\t\t// this cannot be zero\n\t\t\t\trAccountBalance, ok := trx.GetN(storage.Pool.ShareQuantity, rKey)\n\t\t\t\tif !ok {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlog.Criticalf(\"missing balance record for: %v share id: %x\", tx.Recipient, tx.ShareId)\n\t\t\t\t\tlogger.Panic(\"ShareQuantity database is corrupt\")\n\t\t\t\t}\n\n\t\t\t\t// owner, share ← recipient\n\t\t\t\trAccountBalance -= tx.Quantity\n\t\t\t\toAccountBalance += tx.Quantity\n\n\t\t\t\t// update balances\n\t\t\t\tif 0 == rAccountBalance {\n\t\t\t\t\ttrx.Delete(storage.Pool.ShareQuantity, rKey)\n\t\t\t\t} else {\n\t\t\t\t\ttrx.PutN(storage.Pool.ShareQuantity, rKey, rAccountBalance)\n\t\t\t\t}\n\t\t\t\ttrx.PutN(storage.Pool.ShareQuantity, oKey, oAccountBalance)\n\n\t\t\tcase *transactionrecord.ShareSwap:\n\n\t\t\t\ttxId := packedTransaction.MakeLink()\n\n\t\t\t\ttrx.Delete(storage.Pool.Transactions, txId[:])\n\t\t\t\treservoir.DeleteByTxId(txId)\n\n\t\t\t\townerOneShareOneKey := append(tx.OwnerOne.Bytes(), tx.ShareIdOne[:]...)\n\t\t\t\townerOneShareTwoKey := append(tx.OwnerOne.Bytes(), tx.ShareIdTwo[:]...)\n\t\t\t\townerTwoShareOneKey := append(tx.OwnerTwo.Bytes(), tx.ShareIdOne[:]...)\n\t\t\t\townerTwoShareTwoKey := append(tx.OwnerTwo.Bytes(), tx.ShareIdTwo[:]...)\n\n\t\t\t\t// either of these balances could be zero\n\t\t\t\townerOneShareOneAccountBalance, _ := trx.GetN(storage.Pool.ShareQuantity, ownerOneShareOneKey)\n\t\t\t\townerTwoShareTwoAccountBalance, _ := trx.GetN(storage.Pool.ShareQuantity, ownerTwoShareTwoKey)\n\n\t\t\t\t// these balances cannot be zero\n\t\t\t\townerOneShareTwoAccountBalance, ok := trx.GetN(storage.Pool.ShareQuantity, ownerOneShareTwoKey)\n\t\t\t\tif !ok {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlog.Criticalf(\"missing balance record for owner 1: %v share id 2: %x\", tx.OwnerOne, tx.ShareIdTwo)\n\t\t\t\t\tlogger.Panic(\"ShareQuantity database is corrupt\")\n\t\t\t\t}\n\t\t\t\townerTwoShareOneAccountBalance, ok := trx.GetN(storage.Pool.ShareQuantity, ownerTwoShareOneKey)\n\t\t\t\tif !ok {\n\t\t\t\t\ttrx.Abort()\n\t\t\t\t\tlog.Criticalf(\"missing balance record for owner 2: %v share id 1: %x\", tx.OwnerTwo, tx.ShareIdOne)\n\t\t\t\t\tlogger.Panic(\"ShareQuantity database is corrupt\")\n\t\t\t\t}\n\n\t\t\t\t// owner 1, share 1 ← owner 2\n\t\t\t\townerTwoShareOneAccountBalance -= tx.QuantityOne\n\t\t\t\townerOneShareOneAccountBalance += tx.QuantityOne\n\n\t\t\t\t// owner 2, share 2 ← owner 1\n\t\t\t\townerOneShareTwoAccountBalance -= tx.QuantityTwo\n\t\t\t\townerTwoShareTwoAccountBalance += tx.QuantityTwo\n\n\t\t\t\t// update database share one\n\t\t\t\tif 0 == ownerTwoShareOneAccountBalance {\n\t\t\t\t\ttrx.Delete(storage.Pool.ShareQuantity, ownerTwoShareOneKey)\n\t\t\t\t} else {\n\t\t\t\t\ttrx.PutN(storage.Pool.ShareQuantity, ownerTwoShareOneKey, ownerTwoShareOneAccountBalance)\n\t\t\t\t}\n\t\t\t\ttrx.PutN(storage.Pool.ShareQuantity, ownerOneShareOneKey, ownerOneShareOneAccountBalance)\n\n\t\t\t\t// update database share two\n\t\t\t\tif 0 == ownerOneShareTwoAccountBalance {\n\t\t\t\t\ttrx.Delete(storage.Pool.ShareQuantity, ownerOneShareTwoKey)\n\t\t\t\t} else {\n\t\t\t\t\ttrx.PutN(storage.Pool.ShareQuantity, ownerOneShareTwoKey, ownerOneShareTwoAccountBalance)\n\t\t\t\t}\n\t\t\t\ttrx.PutN(storage.Pool.ShareQuantity, ownerTwoShareTwoKey, ownerTwoShareTwoAccountBalance)\n\n\t\t\tdefault:\n\t\t\t\ttrx.Abort()\n\t\t\t\tlogger.Panicf(\"unexpected transaction: %v\", transaction)\n\t\t\t}\n\n\t\t\tdata = data[n:]\n\t\t\tif 0 == len(data) {\n\t\t\t\ttrx.Commit()\n\t\t\t\tbreak inner_loop\n\t\t\t}\n\n\t\t\t// commit db transactions\n\t\t\ttrx.Commit()\n\t\t}\n\n\t\t// start db transaction by block & index db\n\t\ttrx, err := storage.NewDBTransaction()\n\t\tif nil != err {\n\t\t\treturn err\n\t\t}\n\n\t\t// block number key for deletion\n\t\tblockNumberKey := make([]byte, 8)\n\t\tbinary.BigEndian.PutUint64(blockNumberKey, header.Number)\n\n\t\t// block ownership remove\n\t\tfoundationTxId := blockrecord.FoundationTxId(header.Number, digest)\n\t\ttrx.Delete(storage.Pool.Transactions, foundationTxId[:])\n\t\tif nil == blockOwner {\n\t\t\ttrx.Abort()\n\t\t\tlog.Criticalf(\"nil block owner for block: %d\", header.Number)\n\t\t} else {\n\t\t\townership.Transfer(trx, foundationTxId, foundationTxId, 0, blockOwner, nil)\n\t\t}\n\t\t// remove remaining block data\n\t\ttrx.Delete(storage.Pool.BlockOwnerTxIndex, foundationTxId[:])\n\t\ttrx.Delete(storage.Pool.BlockOwnerPayment, blockNumberKey)\n\t\ttrx.Delete(storage.Pool.Blocks, blockNumberKey)\n\n\t\t// and delete its hash\n\t\ttrx.Delete(storage.Pool.BlockHeaderHash, blockNumberKey)\n\n\t\t// fetch previous block number\n\t\tbinary.BigEndian.PutUint64(blockNumberKey, header.Number-1)\n\t\tpackedBlock = storage.Pool.Blocks.Get(blockNumberKey)\n\n\t\tif nil == packedBlock {\n\t\t\t// all blocks deleted\n\t\t\tblockheader.SetGenesis()\n\t\t\ttrx.Commit()\n\t\t\tbreak outer_loop\n\t\t}\n\n\t\t// commit db transactions\n\t\ttrx.Commit()\n\n\t\t// ensure this block's hash is completely erased\n\t\tblockheader.ClearCache()\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "fdbbaffa95a85521e0f296be9a1e9833", "score": "0.50039047", "text": "func (c *Converter) PrevBlock() *notionapi.Block {\n\tif c.CurrBlockIdx == 0 {\n\t\treturn nil\n\t}\n\treturn c.CurrBlocks[c.CurrBlockIdx-1]\n}", "title": "" }, { "docid": "b001dfc56c838101b232a0d53dfe3ed7", "score": "0.49808308", "text": "func (cache *blockCache) tipHash() chainhash.Hash {\n\tcache.mtx.RLock()\n\tdefer cache.mtx.RUnlock()\n\treturn cache.best.hash\n}", "title": "" }, { "docid": "7fab4d595e12211645e51568b68cb15a", "score": "0.49733704", "text": "func dbFetchHashByHeight(dbTx database.Tx, height int32) (*chainhash.Hash, error) {\n\tvar serializedHeight [4]byte\n\tbyteOrder.PutUint32(serializedHeight[:], uint32(height))\n\n\tmeta := dbTx.Metadata()\n\theightIndex := meta.Bucket(heightIndexBucketName)\n\thashBytes := heightIndex.Get(serializedHeight[:])\n\tif hashBytes == nil {\n\t\tstr := fmt.Sprintf(\"no block at height %d exists\", height)\n\t\treturn nil, errNotInMainChain(str)\n\t}\n\n\tvar hash chainhash.Hash\n\tcopy(hash[:], hashBytes)\n\treturn &hash, nil\n}", "title": "" }, { "docid": "f4e623fb21ee7a76393804dcf6475782", "score": "0.4963846", "text": "func (b *blockManager) orphanRoot(hash *chainhash.Hash) *chainhash.Hash {\n\t// Protect concurrent access. Using a read lock only so multiple\n\t// readers can query without blocking each other.\n\tb.orphanLock.RLock()\n\tdefer b.orphanLock.RUnlock()\n\n\t// Keep looping while the parent of each orphaned block is known and is an\n\t// orphan itself.\n\torphanRoot := hash\n\tprevHash := hash\n\tfor {\n\t\torphan, exists := b.orphans[*prevHash]\n\t\tif !exists {\n\t\t\tbreak\n\t\t}\n\t\torphanRoot = prevHash\n\t\tprevHash = &orphan.block.MsgBlock().Header.PrevBlock\n\t}\n\n\treturn orphanRoot\n}", "title": "" }, { "docid": "06a965cdf971bfbb518ebbccfba14343", "score": "0.4962344", "text": "func getSkipHeight(height int32) int32 {\n\tif height < 2 {\n\t\treturn 0\n\t}\n\n\t// Determine which height to jump back to. Any number strictly lower than\n\t// height is acceptable, but the following expression seems to perform well\n\t// in simulations (max 110 steps to go back up to 2**18 blocks).\n\tif (height & 1) > 0 {\n\t\treturn invertLowestOne(invertLowestOne(height-1)) + 1\n\t}\n\treturn invertLowestOne(height)\n}", "title": "" }, { "docid": "1141a11498161440fe266d29ac8aa3ba", "score": "0.49562454", "text": "func GetLastBlock(coin string) (uint64, string, error) {\n\trow := struct {\n\t\tHeight uint64 `db:\"height\"`\n\t\tBlockHash string `db:\"hash\"`\n\t}{}\n\n\tif err := GetDB().Get(&row, \"SELECT height, hash FROM blocks WHERE coin = ? ORDER BY height DESC LIMIT 1\", coin); err != nil {\n\t\tif err.Error() == \"sql: no rows in result set\" {\n\t\t\t// assume db is fresh, return a block from before the fork\n\t\t\treturn 0, \"000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f\", nil\n\t\t}\n\t\treturn 0, \"\", err\n\t}\n\n\treturn row.Height, row.BlockHash, nil\n}", "title": "" }, { "docid": "377d849f6b34a9672a53ad085f8ba9d1", "score": "0.49476996", "text": "func (serBlock *SerBlock2) GetPreviousBlockHash() ([]byte, error) {\n\tbuf := proto.NewBuffer(serBlock.blockBytes)\n\treturn serBlock.extractPreviousBlockHash(buf)\n}", "title": "" }, { "docid": "72c23f09f80de198daeaadb474c17c90", "score": "0.49454945", "text": "func (b *dbBlock) prev() DatabaseBlock {\n\treturn b.listState.prev\n}", "title": "" }, { "docid": "e06f99702599fbe9e50d7c9c913907f4", "score": "0.49339458", "text": "func (h Hash) Parent() (Hash, error) {\n\tparent := Hash{prec: h.prec - 1}\n\tif parent.prec < PrecisionMin {\n\t\treturn parent, ErrInvalidPrecision\n\t}\n\tparent.bits = h.bits >> 2\n\treturn parent, nil\n}", "title": "" }, { "docid": "676b76ef98b24228cc6f2627d3f44d60", "score": "0.49273404", "text": "func (tree *ByPrev) UpperBound(key common.BlockIdType) IteratorByPrev {\n\tresult := tree.End()\n\tnode := tree.Root\n\n\tif node == nil {\n\t\treturn result\n\t}\n\n\tfor {\n\t\tif ByPrevCompare(key, node.Key) >= 0 {\n\t\t\tif node.Right != nil {\n\t\t\t\tnode = node.Right\n\t\t\t} else {\n\t\t\t\treturn result\n\t\t\t}\n\t\t} else {\n\t\t\tresult.node = node\n\t\t\tresult.position = betweenByPrev\n\t\t\tif node.Left != nil {\n\t\t\t\tnode = node.Left\n\t\t\t} else {\n\t\t\t\treturn result\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "af5b44e16ec78d40fcccdc7a0b75da8b", "score": "0.49262846", "text": "func (s lockNodes) PrevIndex(index int) int {\n\tsort.Sort(s)\n\n\tvar prevIndex int\n\tfor _, node := range s.Nodes {\n\t\tidx, _ := strconv.Atoi(path.Base(node.Key))\n\t\tif index == idx {\n\t\t\treturn prevIndex\n\t\t}\n\t\tprevIndex = idx\n\t}\n\treturn 0\n}", "title": "" }, { "docid": "f98c249fc3164d7318d20518efd70955", "score": "0.49196133", "text": "func GetHashFn(ref *model.Header, chain ChainContext) func(n uint64) common.Hash {\n\t// Cache will initially contain [refHash.parent],\n\t// Then fill up with [refHash.p, refHash.pp, refHash.ppp, ...]\n\tvar cache []common.Hash\n\n\treturn func(n uint64) common.Hash {\n\t\tif ref.Number.Uint64() <= n {\n\t\t\t// This situation can happen if we're doing tracing and using\n\t\t\t// block overrides.\n\t\t\treturn common.Hash{}\n\t\t}\n\t\t// If there's no hash cache yet, make one\n\t\tif len(cache) == 0 {\n\t\t\tcache = append(cache, ref.ParentHash)\n\t\t}\n\t\tif idx := ref.Number.Uint64() - n - 1; idx < uint64(len(cache)) {\n\t\t\treturn cache[idx]\n\t\t}\n\t\t// No luck in the cache, but we can start iterating from the last element we already know\n\t\tlastKnownHash := cache[len(cache)-1]\n\t\tlastKnownNumber := ref.Number.Uint64() - uint64(len(cache))\n\n\t\tfor {\n\t\t\theader := chain.GetHeader(lastKnownHash, lastKnownNumber)\n\t\t\tif header == nil {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tcache = append(cache, header.ParentHash)\n\t\t\tlastKnownHash = header.ParentHash\n\t\t\tlastKnownNumber = header.Number.Uint64() - 1\n\t\t\tif n == lastKnownNumber {\n\t\t\t\treturn lastKnownHash\n\t\t\t}\n\t\t}\n\t\treturn common.Hash{}\n\t}\n}", "title": "" }, { "docid": "55ce766ad75b4098e523d36bccf49d97", "score": "0.48959634", "text": "func (b *Block) DeriveHash() {\n\tinfo := bytes.Join([][]byte{b.Data, b.PrevHash}, []byte{})\n\tfmt.Println(\"info: \", info)\n\thash := sha256.Sum256(info)\n\tfmt.Println(\"hash: \", hash)\n\tb.Hash = hash[:]\n}", "title": "" }, { "docid": "c1de076e598b187101e5786ee99f2162", "score": "0.48679167", "text": "func (node *blockNode) Ancestor(height int32) *blockNode {\n\tif height < 0 || height > node.height {\n\t\treturn nil\n\t}\n\n\tn := node\n\tfor ; n != nil && n.height != height; n = n.parent {\n\t\t// Intentionally left blank\n\t}\n\n\treturn n\n}", "title": "" }, { "docid": "59bed426646d93d8fe11012b71175d83", "score": "0.48507085", "text": "func (s *State) invertRecentBlock() (diffs []OutputDiff) {\n\t// Remove the output for the miner subsidy.\n\t//\n\t// TODO: Update this for incentive stuff - miner doesn't get subsidy until\n\t// 2000 or 5000 or 10000 blocks later.\n\tsubsidyID := s.currentBlock().SubsidyID()\n\tsubsidy, err := s.output(subsidyID)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tdiff := OutputDiff{New: false, ID: subsidyID, Output: subsidy}\n\tdiffs = append(diffs, diff)\n\tdelete(s.unspentOutputs, subsidyID)\n\n\t// Perform inverse contract maintenance.\n\tdiffSet := s.invertContractMaintenance()\n\tdiffs = append(diffs, diffSet...)\n\n\t// Reverse each transaction in the block, in reverse order from how\n\t// they appear in the block.\n\tfor i := len(s.currentBlock().Transactions) - 1; i >= 0; i-- {\n\t\tdiffSet := s.invertTransaction(s.currentBlock().Transactions[i])\n\t\tdiffs = append(diffs, diffSet...)\n\t}\n\n\t// Update the CurrentBlock and CurrentPath variables of the longest fork.\n\tdelete(s.currentPath, s.height())\n\ts.currentBlockID = s.currentBlock().ParentBlockID\n\treturn\n}", "title": "" }, { "docid": "b8b1f7b65fbd5547ae4623119da5763f", "score": "0.48268506", "text": "func blockHistory(tx *bolt.Tx) (blockIDs [32]types.BlockID) {\n\theight := blockHeight(tx)\n\tstep := types.BlockHeight(1)\n\t// The final step is to include the genesis block, which is why the final\n\t// element is skipped during iteration.\n\tfor i := 0; i < 31; i++ {\n\t\t// Include the next block.\n\t\tblockID, err := getPath(tx, height)\n\t\tif build.DEBUG && err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tblockIDs[i] = blockID\n\n\t\t// Determine the height of the next block to include and then increase\n\t\t// the step size. The height must be decreased first to prevent\n\t\t// underflow.\n\t\t//\n\t\t// `i >= 9` means that the first 10 blocks will be included, and then\n\t\t// skipping will start.\n\t\tif i >= 9 {\n\t\t\tstep *= 2\n\t\t}\n\t\tif height <= step {\n\t\t\tbreak\n\t\t}\n\t\theight -= step\n\t}\n\t// Include the genesis block as the last element\n\tblockID, err := getPath(tx, 0)\n\tif build.DEBUG && err != nil {\n\t\tpanic(err)\n\t}\n\tblockIDs[31] = blockID\n\treturn blockIDs\n}", "title": "" }, { "docid": "99d243b639f8a938b3f6bf4c21315db6", "score": "0.4824166", "text": "func (b *BlockChain) getSuitableBlock(lastNode *blockNode) *blockNode {\n\t// In order to avoid a block is a very skewed timestamp to have too much\n\t// influence, we select the median of the 3 top most nodes as a starting\n\t// point.\n\tnodes := make([]*blockNode, 3)\n\tnodes[2] = lastNode\n\tnodes[1] = lastNode.parent\n\n\tnodes[0] = nodes[1].parent\n\n\t// Sorting network.\n\tif nodes[0].timestamp > nodes[2].timestamp {\n\t\tnodes[0], nodes[2] = nodes[2], nodes[0]\n\t}\n\n\tif nodes[0].timestamp > nodes[1].timestamp {\n\t\tnodes[0], nodes[1] = nodes[1], nodes[0]\n\t}\n\n\tif nodes[1].timestamp > nodes[2].timestamp {\n\t\tnodes[1], nodes[2] = nodes[2], nodes[1]\n\t}\n\n\t// We should have our candidate in the middle now.\n\treturn nodes[1]\n}", "title": "" }, { "docid": "176e89b381388c58a443dbb5e77c1c83", "score": "0.4816802", "text": "func (iter *RbTreeIterator[K, V]) Prev() iterator.ConstBidIterator[V] {\n\tif iter.IsValid() {\n\t\titer.node = iter.node.Prev()\n\t}\n\treturn iter\n}", "title": "" }, { "docid": "9d7f67523e711a87f9459cfa1c600359", "score": "0.48152208", "text": "func (log *Log) BlockUntilPresent(ctx context.Context, leaf []byte) (*pb.LogTreeHashResponse, error) {\n\tlastHead := int64(-1)\n\ttimeToSleep := time.Second\n\tfor {\n\t\tlth, err := log.TreeHead(ctx, Head)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif lth.TreeSize > lastHead {\n\t\t\tlastHead = lth.TreeSize\n\t\t\terr = log.VerifyInclusion(ctx, lth, leaf)\n\t\t\tswitch err {\n\t\t\tcase nil: // we found it\n\t\t\t\treturn lth, nil\n\t\t\tcase ErrNotFound:\n\t\t\t\t// no good, continue\n\t\t\tdefault:\n\t\t\t\t// Should return error, but we're struggling to differentiate not found vs other errors\n\t\t\t\t//return nil, err\n\t\t\t}\n\t\t\t// since we got a new tree head, reset sleep time\n\t\t\ttimeToSleep = time.Second\n\t\t} else {\n\t\t\t// no luck, snooze a bit longer\n\t\t\ttimeToSleep *= 2\n\t\t}\n\t\ttime.Sleep(timeToSleep)\n\t}\n}", "title": "" }, { "docid": "d51c6cdc605e966b1f874d9a47ba4f7a", "score": "0.48101443", "text": "func (bi *BlockIndices) GetMainChain() BlockIndexChain {\n\tblocksAtHeight := make(map[int]BlockIndices)\n\tfor _, b := range *bi {\n\t\th := int(b.Height)\n\t\tif blocksAtHeight[h] == nil {\n\t\t\tblocksAtHeight[h] = make(BlockIndices, 0)\n\t\t}\n\t\tblocksAtHeight[h] = append(blocksAtHeight[h], b)\n\t}\n\n\t// create a list of heights starting that the tip and going\n\t// down to genesis\n\tkeys := make([]int, 0, len(blocksAtHeight))\n\tfor k := range blocksAtHeight {\n\t\tkeys = append(keys, k)\n\t}\n\tsort.Sort(sort.Reverse(sort.IntSlice(keys)))\n\n\t// create a list of block indexes representing the main or most-work\n\t// chain.\n\t// the list is created in reverse. starting with the tip and then adding\n\t// the block at the next lower hight. if there are multiple blocks at a\n\t// lower hight then pick the one that the previous one refereed to.\n\tbic := BlockIndexChain{}\n\tfor _, height := range keys {\n\t\tblocksAtCurHeight := blocksAtHeight[height]\n\t\tif len(blocksAtCurHeight) == 1 {\n\t\t\tbic = append(bic, blocksAtCurHeight[0])\n\t\t} else {\n\t\t\t// if there are two or more blocks for a given height we choose\n\t\t\t// the block the previous block refereed to\n\t\t\tif len(bic) > 0 {\n\t\t\t\tdidAppend := false\n\t\t\t\tfor _, b := range blocksAtCurHeight {\n\t\t\t\t\tif bytes.Equal(b.Hash[:], bic[len(bic)-1].PreviousHash[:]) {\n\t\t\t\t\t\tbic = append(bic, b)\n\t\t\t\t\t\tdidAppend = true\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// check that we had something to append. this only ever should be\n\t\t\t\t// true if there is an error in the database or our deserialization\n\t\t\t\t// of the data block index in the database.\n\t\t\t\tif !didAppend {\n\t\t\t\t\tpanic(fmt.Sprintf(\"No previous Block found with the hash %s at height %d!\", bic[len(bic)-1].PreviousHash, height))\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// just choose the first block we have at the highest height as tip\n\t\t\t\t// FIXME: use chainwork\n\t\t\t\tbic = append(bic, blocksAtCurHeight[0])\n\t\t\t}\n\t\t}\n\t}\n\t// reverse bic to start at genesis -> tip\n\tfor i := len(bic)/2 - 1; i >= 0; i-- {\n\t\topp := len(bic) - 1 - i\n\t\tbic[i], bic[opp] = bic[opp], bic[i]\n\t}\n\n\treturn bic\n}", "title": "" }, { "docid": "c8fd98a22c53829e1b414185aab96dcd", "score": "0.48015368", "text": "func (b *Block) DeriveHash() {\n\tinfo := bytes.Join([][]byte{b.Data, b.PrevHash}, []byte{})\n\thash := sha256.Sum256(info)\n\tb.Hash = hash[:]\n}", "title": "" }, { "docid": "ba18a9611244a86de7d36aa1a32fc547", "score": "0.479562", "text": "func (e *PublicEthAPI) GetUncleByBlockHashAndIndex(hash common.Hash, idx hexutil.Uint) map[string]interface{} {\n\treturn nil\n}", "title": "" }, { "docid": "e88cf5940a55911c900570eff119acfe", "score": "0.47945082", "text": "func (localNode *LocalNode) getNeighborsMajorityBlockByHeight(height uint32, neighbors []*node.RemoteNode) *block.Header {\n\tfor i := 0; i < 3; i++ {\n\t\tallHeaders, err := localNode.getNeighborsBlockHeaderByHeight(height, neighbors)\n\t\tif err != nil {\n\t\t\tlog.Warningf(\"Get neighbors block header at height %d error: %v\", height, err)\n\t\t\tcontinue\n\t\t}\n\n\t\tcounter := make(map[common.Uint256]int)\n\t\theaders := make(map[common.Uint256]*block.Header)\n\t\ttotalCount := 0\n\t\tallHeaders.Range(func(key, value interface{}) bool {\n\t\t\tif header, ok := value.(*block.Header); ok && header != nil {\n\t\t\t\tcounter[header.Hash()]++\n\t\t\t\tif _, ok := headers[header.Hash()]; !ok {\n\t\t\t\t\th := value.(*block.Header)\n\t\t\t\t\tif h.UnsignedHeader.Height > 0 {\n\t\t\t\t\t\terr = h.VerifySignature()\n\t\t\t\t\t}\n\t\t\t\t\tif err == nil {\n\t\t\t\t\t\theaders[header.Hash()] = h\n\t\t\t\t\t} else {\n\t\t\t\t\t\tlog.Infof(\"Received header with invalid signature from neighbor %s\", key)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\ttotalCount++\n\t\t\t}\n\t\t\treturn true\n\t\t})\n\n\t\tif totalCount == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tfor blockHash, count := range counter {\n\t\t\tif count > int(rollbackMinRelativeWeight*float32(totalCount)) {\n\t\t\t\treturn headers[blockHash]\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "18beb39aee7df1f66bcf5c8d55eaec0e", "score": "0.47721326", "text": "func (localNode *LocalNode) getNeighborsMajorityBlockHashByHeight(height uint32, neighbors []*node.RemoteNode) common.Uint256 {\n\theader := localNode.getNeighborsMajorityBlockByHeight(height, neighbors)\n\tif header == nil {\n\t\treturn common.EmptyUint256\n\t}\n\treturn header.Hash()\n}", "title": "" }, { "docid": "c47eba05d2f4e4602ac411eebd21f991", "score": "0.47656193", "text": "func (p *Path) Prev() *Cell {\n\n\tp.CurrentIndex--\n\tif p.CurrentIndex < 0 {\n\t\tp.CurrentIndex = 0\n\t}\n\n\treturn p.Cells[p.CurrentIndex]\n}", "title": "" }, { "docid": "74c68a146a50fa7916657521c312a87b", "score": "0.47589454", "text": "func GetHashFn(ref *types.Header, chain ChainContext) func(n uint64) common.Hash {\n\tvar cache map[uint64]common.Hash\n\n\treturn func(n uint64) common.Hash {\n\t\t// If there's no hash cache yet, make one\n\t\tif cache == nil {\n\t\t\tcache = map[uint64]common.Hash{\n\t\t\t\tref.Height - 1: ref.ParentHash,\n\t\t\t}\n\t\t}\n\t\t// Try to fulfill the request from the cache\n\t\tif hash, ok := cache[n]; ok {\n\t\t\treturn hash\n\t\t}\n\t\t// Not cached, iterate the blocks and cache the hashes\n\t\tfor header := chain.GetHeader(ref.Height - 1); header != nil; header = chain.GetHeader(header.Height - 1) {\n\t\t\tcache[header.Height-1] = header.ParentHash\n\t\t\tif n == header.Height-1 {\n\t\t\t\treturn header.ParentHash\n\t\t\t}\n\n\t\t\tif header.Height == 0 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\treturn common.EmptyHash\n\t}\n}", "title": "" }, { "docid": "7fe58810ea36e9fb0433b086d2a3b1ec", "score": "0.47492602", "text": "func FindPreviousUtreexoRootHint(height int32, roots []UtreexoRootHint) *UtreexoRootHint {\n\tif len(roots) == 0 {\n\t\treturn nil\n\t}\n\n\t// There is no previous root if the height is already after the first\n\t// root.\n\tfirstRoot := &roots[0]\n\tif height <= firstRoot.Height {\n\t\treturn nil\n\t}\n\n\t// Find the previous root.\n\tpreviousRoot := firstRoot\n\tfor i := 1; i < len(roots); i++ {\n\t\tif height <= roots[i].Height {\n\t\t\tbreak\n\t\t}\n\t\tpreviousRoot = &roots[i]\n\t}\n\n\treturn previousRoot\n}", "title": "" }, { "docid": "4d1313a5f0889ca7903e29089cfe3c51", "score": "0.47361606", "text": "func (n *Node) GetBadNode(sibling int) (*Node, int) {\n\t// if leaf node, there are is no bad node. But this won't be reached, because I know there's a bad node.\n\tif len(n.ChildNodes) == 0 {\n\t\treturn nil, -1\n\t}\n\n\t// scan children for the one node that doesn't match.\n\tmaj := n.getMajorityWeight()\n\tvar bad *Node\n\tfor _, c := range n.ChildNodes {\n\t\tif c.GetWeight() != maj {\n\t\t\tbad = c\n\t\t\tbreak\n\t\t}\n\t}\n\t// if no bad node found, the current node is guilty.\n\tif bad == nil {\n\t\treturn n, sibling\n\t}\n\t// if bad node's children are balanced, bad node is guilty.\n\tif bad.isBalanced() {\n\t\treturn bad, maj\n\t}\n\t// otherwise, keep looking\n\treturn bad.GetBadNode(maj)\n}", "title": "" }, { "docid": "50113c21cab7a6fbaef552c81747483f", "score": "0.47336188", "text": "func getParent(state int, at map[int]map[uint8]int) (uint8, int) {\n\tfor beginState, transitions := range at {\n\t\tfor c, endState := range transitions {\n\t\t\tif endState == state {\n\t\t\t\treturn c, beginState\n\t\t\t}\n\t\t}\n\t}\n\treturn 0, 0 //unreachable\n}", "title": "" }, { "docid": "4d19d88317a7cd8119186440cb741bc3", "score": "0.47154635", "text": "func (bl *Block) Hash() common.Hash {\n\tif !bl.built {\n\t\treturn common.Hash{}\n\t}\n\treturn bl.tree.Root()\n}", "title": "" }, { "docid": "49abba3ffa98389a02718771368ba169", "score": "0.46985713", "text": "func (tree *MutableTree) recursiveRemove(node *Node, key []byte, orphans *[]*Node) (newHash []byte, newSelf *Node, newKey []byte, newValue []byte, err error) {\n\tversion := tree.version + 1\n\n\tif node.isLeaf() {\n\t\tif bytes.Equal(key, node.key) {\n\t\t\t*orphans = append(*orphans, node)\n\t\t\treturn nil, nil, nil, node.value, nil\n\t\t}\n\t\treturn node.hash, node, nil, nil, nil\n\t}\n\n\t// node.key < key; we go to the left to find the key:\n\tif bytes.Compare(key, node.key) < 0 {\n\t\tleftNode, err := node.getLeftNode(tree.ImmutableTree)\n\t\tif err != nil {\n\t\t\treturn nil, nil, nil, nil, err\n\t\t}\n\t\tnewLeftHash, newLeftNode, newKey, value, err := tree.recursiveRemove(leftNode, key, orphans)\n\t\tif err != nil {\n\t\t\treturn nil, nil, nil, nil, err\n\t\t}\n\n\t\tif len(*orphans) == 0 {\n\t\t\treturn node.hash, node, nil, value, nil\n\t\t}\n\t\t*orphans = append(*orphans, node)\n\t\tif newLeftHash == nil && newLeftNode == nil { // left node held value, was removed\n\t\t\treturn node.rightHash, node.rightNode, node.key, value, nil\n\t\t}\n\n\t\tnewNode, err := node.clone(version)\n\t\tif err != nil {\n\t\t\treturn nil, nil, nil, nil, err\n\t\t}\n\n\t\tnewNode.leftHash, newNode.leftNode = newLeftHash, newLeftNode\n\t\terr = newNode.calcHeightAndSize(tree.ImmutableTree)\n\t\tif err != nil {\n\t\t\treturn nil, nil, nil, nil, err\n\t\t}\n\t\tnewNode, err = tree.balance(newNode, orphans)\n\t\tif err != nil {\n\t\t\treturn nil, nil, nil, nil, err\n\t\t}\n\n\t\treturn newNode.hash, newNode, newKey, value, nil\n\t}\n\t// node.key >= key; either found or look to the right:\n\trightNode, err := node.getRightNode(tree.ImmutableTree)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\tnewRightHash, newRightNode, newKey, value, err := tree.recursiveRemove(rightNode, key, orphans)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\tif len(*orphans) == 0 {\n\t\treturn node.hash, node, nil, value, nil\n\t}\n\t*orphans = append(*orphans, node)\n\tif newRightHash == nil && newRightNode == nil { // right node held value, was removed\n\t\treturn node.leftHash, node.leftNode, nil, value, nil\n\t}\n\n\tnewNode, err := node.clone(version)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\n\tnewNode.rightHash, newNode.rightNode = newRightHash, newRightNode\n\tif newKey != nil {\n\t\tnewNode.key = newKey\n\t}\n\terr = newNode.calcHeightAndSize(tree.ImmutableTree)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\n\tnewNode, err = tree.balance(newNode, orphans)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\n\treturn newNode.hash, newNode, nil, value, nil\n}", "title": "" }, { "docid": "3a52c964c57463c465bd15b58b9c0eb7", "score": "0.46967736", "text": "func (cache *blockCache) tipHeight() uint32 {\n\tcache.mtx.Lock()\n\tdefer cache.mtx.Unlock()\n\treturn cache.best.height\n}", "title": "" }, { "docid": "c67f8b1b3a2be0620a811b2a7f69294c", "score": "0.46904", "text": "func (db *LevelDb) DropAfterBlockBySha(sha *btcwire.ShaHash) (rerr error) {\n\tdb.dbLock.Lock()\n\tdefer db.dbLock.Unlock()\n\tdefer func() {\n\t\tif rerr == nil {\n\t\t\trerr = db.processBatches()\n\t\t} else {\n\t\t\tdb.lBatch().Reset()\n\t\t}\n\t}()\n\n\tstartheight := db.nextBlock - 1\n\n\tkeepidx, err := db.getBlkLoc(sha)\n\tif err != nil {\n\t\t// should the error here be normalized ?\n\t\tlog.Tracef(\"block loc failed %v \", sha)\n\t\treturn err\n\t}\n\n\tfor height := startheight; height > keepidx; height = height - 1 {\n\t\tvar blk *btcutil.Block\n\t\tblksha, buf, err := db.getBlkByHeight(height)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tblk, err = btcutil.NewBlockFromBytes(buf)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n err = db.undoNames(height)\n if err != nil {\n log.Warnf(\"Cannot undo names at height %d: %v\", height, err)\n return err\n }\n\n\t\tfor _, tx := range blk.MsgBlock().Transactions {\n\t\t\terr = db.unSpend(tx)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\t// rather than iterate the list of tx backward, do it twice.\n\t\tfor _, tx := range blk.Transactions() {\n\t\t\tvar txUo txUpdateObj\n\t\t\ttxUo.delete = true\n\t\t\tdb.txUpdateMap[*tx.Sha()] = &txUo\n\t\t}\n\t\tdb.lBatch().Delete(shaBlkToKey(blksha))\n\t\tdb.lBatch().Delete(int64ToKey(height))\n\n _, err = db.expireNames(height+1, true)\n if err != nil {\n log.Warnf(\"Cannot unexpire names at height %d: %v\", height, err)\n return err\n }\n\t}\n\n\tdb.nextBlock = keepidx + 1\n\n\treturn nil\n}", "title": "" }, { "docid": "2243d23a2030b3ecceb13085e562d04d", "score": "0.46861377", "text": "func (e *ByteIntervalElement) Prev() *ByteIntervalElement {\n\tif p := e.prev; e.list != nil && p != &e.list.root {\n\t\treturn p\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "29f63659c1b94f679188d2aeb5ee0e00", "score": "0.46749425", "text": "func (reorg *reorganizer) gatherChainInfo() error {\n\t//find branch root block , gather rollforward Target block\n\tcdb := reorg.cs.cdb\n\n\tbrBlock := reorg.brTopBlock\n\tbrBlockNo := brBlock.GetHeader().GetBlockNo()\n\tbrBlockHash := brBlock.BlockHash()\n\n\tlatestNo := cdb.latest\n\n\tfor {\n\t\tmainBlockHash, err := cdb.getHashByNo(brBlockNo)\n\n\t\tif latestNo < brBlockNo {\n\t\t\t//must not exist (no, hash) record\n\t\t\tif err == nil {\n\t\t\t\treturn fmt.Errorf(\"block of main chain can't be higher than latest. no=%d, latest=%d\",\n\t\t\t\t\tbrBlockNo, latestNo)\n\t\t\t}\n\t\t} else {\n\t\t\t//must exist\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif bytes.Equal(brBlock.Hash, mainBlockHash) {\n\t\t\t\tif latestNo == brBlockNo {\n\t\t\t\t\treturn fmt.Errorf(\"best block can't be branch root block\")\n\t\t\t\t}\n\t\t\t\treorg.brRootBlock = brBlock\n\n\t\t\t\tlogger.Debug().Str(\"hash\", brBlock.ID()).Uint64(\"blockNo\", brBlockNo).\n\t\t\t\t\tMsg(\"found branch root block\")\n\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\t//gather rollback target\n\n\t\t\tlogger.Debug().Str(\"hash\", enc.ToString(mainBlockHash)).Uint64(\"blockNo\", brBlockNo).\n\t\t\t\tMsg(\"gather rollback target\")\n\t\t\treorg.rbBlocks = append(reorg.rbBlocks, &reorgBlock{brBlockNo, mainBlockHash})\n\t\t}\n\n\t\tif brBlockNo <= 0 {\n\t\t\tbreak\n\t\t}\n\n\t\t//gather rollforward target\n\t\tlogger.Debug().Str(\"hash\", enc.ToString(brBlockHash)).Uint64(\"blockNo\", brBlockNo).\n\t\t\tMsg(\"gather rollforward target\")\n\t\treorg.rfBlocks = append(reorg.rfBlocks, &reorgBlock{brBlockNo, brBlockHash})\n\n\t\t//get prev block from branch\n\t\tif brBlock, err = cdb.getBlock(brBlock.GetHeader().GetPrevBlockHash()); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tprevBrBlockNo := brBlock.GetHeader().GetBlockNo()\n\t\tif brBlockNo-1 != prevBrBlockNo {\n\t\t\treturn fmt.Errorf(\"rollback target is not valid. block(%v), blockno(exp=%d,res=%d)\",\n\t\t\t\tbrBlock.ID(), brBlockNo-1, prevBrBlockNo)\n\t\t}\n\t\tbrBlockNo = prevBrBlockNo\n\t\tbrBlockHash = brBlock.BlockHash()\n\t}\n\n\treturn fmt.Errorf(\"branch root block(%v) doesn't exist\", reorg.brTopBlock.ID())\n}", "title": "" }, { "docid": "883c9758ac9db2519cf2f968b14627d4", "score": "0.46745867", "text": "func (h Head) EarliestInChain() Head {\n\tfor {\n\t\tif h.Parent != nil {\n\t\t\th = *h.Parent\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn h\n}", "title": "" }, { "docid": "49ae9e143c28c66b35cf8543fa9ee019", "score": "0.46714935", "text": "func (obj *state) Previous() hash.Hash {\n\treturn obj.previous\n}", "title": "" }, { "docid": "88a69664e3524ad1fcf1f7e9ef3efabc", "score": "0.46633518", "text": "func (evt *BlockMove) PrevParentId() (ret string) {\n\tif p := evt.Get(\"oldParentId\"); p.Bool() {\n\t\tret = p.String()\n\t}\n\treturn\n}", "title": "" }, { "docid": "7b8836571027cbaf247924b07a135d5c", "score": "0.46625793", "text": "func (n *Node) Prev() *Node {\n\tif n.left != nil {\n\t\treturn n.left.Max()\n\t}\n\tp := n.parent\n\tfor p != nil && n == p.left {\n\t\tn = p\n\t\tp = p.parent\n\t}\n\treturn p\n}", "title": "" }, { "docid": "2ee2c352c895b44abda1efc208948ca5", "score": "0.4662078", "text": "func (cache *blockCache) tip() dcrBlock {\n\tcache.mtx.RLock()\n\tdefer cache.mtx.RUnlock()\n\treturn cache.best\n}", "title": "" }, { "docid": "7a237137e1cd424d0752c4c752514ecd", "score": "0.46513352", "text": "func (commit *NestedCommit) Height() (int64, error) {\n\tif len(commit.Precommits) == 0 {\n\t\treturn 0, nil\n\t}\n\n\treturn commit.FirstPrecommit().Height, nil\n}", "title": "" }, { "docid": "e7a791cd4a17725a6befe21a533e0d56", "score": "0.4645766", "text": "func (r *Ring) Remove(hex string) {\n\tif !validateInput(hex) {\n\t\tlogging.Error(\"Ring invalid hex: %v\", hex)\n\t\treturn\n\t}\n\tif r.size == 0 {\n\t\treturn\n\t}\n\tif r.size == 1 {\n\t\tr.head = nil\n\t\tr.size = 0\n\t\treturn\n\t}\n\tif r.size == 2 {\n\t\tif r.head.val == hex {\n\t\t\tr.head = r.head.next\n\t\t\tr.head.next = nil\n\t\t\tr.head.prv = nil\n\t\t\tr.head.distPrv = big.NewInt(0)\n\t\t\tr.head.distNext = big.NewInt(0)\n\t\t\tr.size = 1\n\t\t} else if r.head.next.val == hex {\n\t\t\tr.head.next = nil\n\t\t\tr.head.prv = nil\n\t\t\tr.head.distPrv = big.NewInt(0)\n\t\t\tr.head.distNext = big.NewInt(0)\n\t\t\tr.size = 1\n\t\t}\n\t\treturn\n\t}\n\tif r.head.val == hex {\n\t\toldHead := r.head\n\t\t// first becomes the head\n\t\tr.head = r.head.next\n\t\t// tail links to the new head\n\t\toldHead.prv = r.head\n\t\t// update distances\n\t\ttailToNewHeadDistance := getDist(r.head.prv.key, r.head.key)\n\t\tr.head.distPrv = tailToNewHeadDistance\n\t\tr.head.prv.distNext = tailToNewHeadDistance\n\t\t// help GC\n\t\toldHead = nil\n\t\tr.size--\n\t\treturn\n\t}\n\thexKey, _ := new(big.Int).SetString(hex, 16)\n\tcurrent := r.head.next\n\tfor current != nil && current.val != r.head.val {\n\t\t// Loop until we reach nil or we go back to head\n\t\tif current.val == hex {\n\t\t\t// We need to remove current\n\t\t\tprv := current.prv\n\t\t\tnext := current.next\n\t\t\tif prv != nil && next != nil {\n\t\t\t\tif prv.val == next.val {\n\t\t\t\t\tnext.prv = nil\n\t\t\t\t\tnext.distPrv = nil\n\t\t\t\t\tnext.next = nil\n\t\t\t\t\tnext.distNext = nil\n\t\t\t\t} else {\n\t\t\t\t\tprv.next = next\n\t\t\t\t\tprv.distNext = getDist(prv.key, next.key)\n\t\t\t\t\tnext.prv = prv\n\t\t\t\t\tnext.distPrv = getDist(prv.key, next.key)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif r.head.val == current.val {\n\t\t\t\tr.head = next\n\t\t\t}\n\t\t\tr.size--\n\t\t\treturn\n\t\t}\n\t\t// Not equal\n\t\tif current.key.Cmp(hexKey) > 0 {\n\t\t\t// Return now\n\t\t\treturn\n\t\t}\n\t\tcurrent = current.next\n\t}\n}", "title": "" }, { "docid": "fa3aaa6d01508961c4acbf794e7d67bc", "score": "0.46429682", "text": "func (b *CommonBlock) Height() uint64 { return b.Hght }", "title": "" }, { "docid": "63ef3ed144acc8c316f497ae3a162dda", "score": "0.46377766", "text": "func (r Root) FindPrevSibling() Node {\n\treturn Root{r.Pointer.PrevSibling.PrevSibling}\n}", "title": "" }, { "docid": "e1fd7700adaf70514a1c00bac5a191ad", "score": "0.46351358", "text": "func (g *Hash) RemoveBi(v, w int) {\n\n\t//use the Remove function above to delete edges in both directions\n\n\tg.Remove(w, v)\n\tg.Remove(v, w)\n\n}", "title": "" }, { "docid": "6a9bc9b61fa46e11371f2f0f849e3fb0", "score": "0.4634024", "text": "func (d *driver) lastRef(file *pfs.File, shard uint64) *pfs.Commit {\n\tcommit := file.Commit\n\tfor commit != nil {\n\t\tdiffInfo, _ := d.finished.get(&drive.Diff{\n\t\t\tCommit: commit,\n\t\t\tShard: shard,\n\t\t})\n\t\tif _, ok := diffInfo.Appends[path.Clean(file.Path)]; ok {\n\t\t\treturn commit\n\t\t}\n\t\tcommit = diffInfo.ParentCommit\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f4b36de201f761c34bbb2c1f9e57a1f2", "score": "0.46339363", "text": "func (b *BlockChain) getLastBlockIndex(last *blockNode, proofOfStake bool) (block *blockNode) {\n\n\tif last == nil {\n\t\tdefer timeTrack(now(), fmt.Sprintf(\"GetLastBlockIndex\"))\n\t} else {\n\t\tdefer timeTrack(now(), fmt.Sprintf(\"GetLastBlockIndex(%v)\", last.hash))\n\t}\n\n\tblock = last\n\tfor true {\n\t\tif block == nil {\n\t\t\tbreak\n\t\t}\n\t\t// TODO dirty workaround, ppcoin doesn't point to genesis block\n\t\tif block.height == 0 {\n\t\t\tbreak\n\t\t}\n\t\tif block.parentHash == nil {\n\t\t\tbreak\n\t\t}\n\t\tif block.isProofOfStake() == proofOfStake {\n\t\t\tbreak\n\t\t}\n\t\tblock, _ = b.getPrevNodeFromNode(block)\n\t}\n\treturn block\n}", "title": "" }, { "docid": "c60bc90326a647e470f79e0f243009e7", "score": "0.46330297", "text": "func removeChildNode(children []*blockNode, node *blockNode) []*blockNode {\n\tif node == nil {\n\t\treturn children\n\t}\n\n\tfor i := 0; i < len(children); i++ {\n\t\tif children[i].hash.IsEqual(node.hash) {\n\t\t\tcopy(children[i:], children[i+1:])\n\t\t\tchildren[len(children)-1] = nil\n\t\t\treturn children[:len(children)-1]\n\t\t}\n\t}\n\treturn children\n}", "title": "" }, { "docid": "60687b1774c218864ba790f7c364b25c", "score": "0.46312782", "text": "func (s *ConsensusSet) blockHistory() (blockIDs [32]types.BlockID) {\n\tknownBlocks := make([]types.BlockID, 0, 32)\n\tstep := types.BlockHeight(1)\n\tfor height := s.height(); ; height -= step {\n\t\t// after 12, start doubling\n\t\tknownBlocks = append(knownBlocks, s.db.getPath(height))\n\t\tif len(knownBlocks) >= 12 {\n\t\t\tstep *= 2\n\t\t}\n\n\t\t// this check has to come before height -= step;\n\t\t// otherwise we might underflow\n\t\tif height <= step {\n\t\t\tbreak\n\t\t}\n\t}\n\t// always include the genesis block\n\tknownBlocks = append(knownBlocks, s.db.getPath(0))\n\n\tcopy(blockIDs[:], knownBlocks)\n\treturn\n}", "title": "" }, { "docid": "cc5e705cadb75df6f7fb37ef24161bb2", "score": "0.46291852", "text": "func (m *monitor) getOtherNodesBlockByHeight(hi uint64) error {\n\tif len(m.peers) <= 0 {\n\t\treturn fmt.Errorf(\"the length of peers <= 0\")\n\t}\n\tfor i := 0; i < m.peersLen; i++ {\n\t\tif len(m.peers[i]) <= 0 {\n\t\t\tcontinue\n\t\t} else {\n\t\t\tresb, err := m.getBlockByHeight(hi, m.peers[i])\n\t\t\tif err != nil {\n\t\t\t\tnodesNum-- //Prevent dead nodes from being counted\n\t\t\t\tlogger.Error(\"Call getBlockByHeight error:\", zap.String(\"node address\", m.peers[i]), zap.Uint64(\"height\", hi), zap.Int(\"nodesNum\", nodesNum), zap.Error(err))\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tm.mBlocks[m.peers[i]] = resb\n\t\t}\n\t}\n\tif len(m.mBlocks) == 0 {\n\t\treturn fmt.Errorf(\"the length of map mBlocks <= 0\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2912316b221b7e9a3a987bea40425564", "score": "0.4622603", "text": "func (l *ByteIntervalList) Back() *ByteIntervalElement {\n\tif l.len == 0 {\n\t\treturn nil\n\t}\n\treturn l.root.prev\n}", "title": "" }, { "docid": "7d948971700b50ba2555f90725728554", "score": "0.4620949", "text": "func (f *fetcherTester) chainHeight() uint64 {\n\tf.lock.RLock()\n\tdefer f.lock.RUnlock()\n\n\treturn f.blocks[f.hashes[len(f.hashes)-1]].NumberU64()\n}", "title": "" }, { "docid": "4a1a0a6f5590cfa3184470f68879af0b", "score": "0.46180463", "text": "func (p *path) backtrack(explored map[common.Coordinate]Survey) (common.Coordinate, error) {\n\tfor i := p.size - 2; i >= 0; i -= 1 {\n\t\tc := p.coordinates[i]\n\t\tif _, _, found := pickNeighbor(c, explored); found {\n\t\t\tp.size = i + 1 // shrink\n\t\t\treturn c, nil\n\t\t}\n\t}\n\treturn common.Coordinate{}, fmt.Errorf(\"Couldn't find a coordinate, which is not fully explored, in the path\")\n}", "title": "" }, { "docid": "5c780cb6b6f1670fe630fade0b390df7", "score": "0.46101177", "text": "func (s *IndexSubscriber) CatchUp(ctx context.Context, _ database.DB, queryer ChainQueryer) error {\n\tlowestHeight, bestHeight, err := s.findLowestIndexTipHeight(queryer)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Nothing to do if all indexes are synced.\n\tif bestHeight == lowestHeight {\n\t\treturn nil\n\t}\n\n\t// Create a progress logger for the indexing process below.\n\tprogressLogger := progresslog.NewBlockProgressLogger(\"Indexed\", log)\n\n\t// tip and need to be caught up, so log the details and loop through\n\t// each block that needs to be indexed.\n\tlog.Infof(\"Catching up from height %d to %d\", lowestHeight,\n\t\tbestHeight)\n\n\tvar cachedParent *dcrutil.Block\n\tfor height := lowestHeight + 1; height <= bestHeight; height++ {\n\t\tif interruptRequested(ctx) {\n\t\t\treturn indexerError(ErrInterruptRequested, interruptMsg)\n\t\t}\n\n\t\thash, err := queryer.BlockHashByHeight(height)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Ensure the next tip hash is on the main chain.\n\t\tif !queryer.MainChainHasBlock(hash) {\n\t\t\tmsg := fmt.Sprintf(\"the next block being synced to (%s) \"+\n\t\t\t\t\"at height %d is not on the main chain\", hash, height)\n\t\t\treturn indexerError(ErrBlockNotOnMainChain, msg)\n\t\t}\n\n\t\tvar parent *dcrutil.Block\n\t\tif cachedParent == nil && height > 0 {\n\t\t\tparentHash, err := queryer.BlockHashByHeight(height - 1)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tparent, err = queryer.BlockByHash(parentHash)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\tparent = cachedParent\n\t\t}\n\n\t\tchild, err := queryer.BlockByHash(hash)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Construct and send the index notification.\n\t\tisTreasuryEnabled, err := queryer.IsTreasuryAgendaActive(parent.Hash())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tntfn := &IndexNtfn{\n\t\t\tNtfnType: ConnectNtfn,\n\t\t\tBlock: child,\n\t\t\tParent: parent,\n\t\t\tIsTreasuryEnabled: isTreasuryEnabled,\n\t\t}\n\n\t\t// Relay the index update to subscribed indexes.\n\t\tfor _, sub := range s.subscriptions {\n\t\t\terr := updateIndex(ctx, sub.idx, ntfn)\n\t\t\tif err != nil {\n\t\t\t\ts.cancel()\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\tcachedParent = child\n\n\t\tprogressLogger.LogBlockHeight(child.MsgBlock())\n\t}\n\n\tlog.Infof(\"Caught up to height %d\", bestHeight)\n\n\treturn nil\n}", "title": "" }, { "docid": "cd0abbcb69f396d2ee23cd69ed441c80", "score": "0.4609535", "text": "func (tree *ByPrev) Find(key common.BlockIdType) IteratorByPrev {\n\tif true {\n\t\tlower := tree.LowerBound(key)\n\t\tif !lower.IsEnd() && ByPrevCompare(key, lower.Key()) == 0 {\n\t\t\treturn lower\n\t\t}\n\t\treturn tree.End()\n\t} else {\n\t\tif node := tree.lookup(key); node != nil {\n\t\t\treturn IteratorByPrev{tree, node, betweenByPrev}\n\t\t}\n\t\treturn tree.End()\n\t}\n}", "title": "" }, { "docid": "b38bcc33dce38ad02eb0a4de637d1a0e", "score": "0.45998305", "text": "func (b *BlockChain) getPrevNodeFromBlock(block *massutil.Block) (*blockNode, error) {\n\tprevHash := &block.MsgBlock().Header.Previous\n\tif prevHash.IsEqual(zeroHash) {\n\t\treturn nil, nil\n\t}\n\n\tif bn, ok := b.index[*prevHash]; ok {\n\t\treturn bn, nil\n\t}\n\n\tprevBlockNode, err := b.loadBlockNode(prevHash)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn prevBlockNode, nil\n}", "title": "" }, { "docid": "d3ec5cec0508edf6a0d22f35c0acf316", "score": "0.4597852", "text": "func (it *Iterator) Prev() *base.InternalKey {\n\tit.nd = it.list.getPrev(it.nd, 0)\n\tif it.nd == it.list.head {\n\t\treturn nil\n\t}\n\tnodeKey := it.list.getKey(it.nd)\n\tif it.lower != nil && it.list.cmp(it.lower, nodeKey.UserKey) > 0 {\n\t\tit.nd = it.list.head\n\t\treturn nil\n\t}\n\tit.key = nodeKey\n\treturn &it.key\n}", "title": "" }, { "docid": "37ba9adb08957585f379046370b8bfa2", "score": "0.45942846", "text": "func (b *Block) LastCommitHash() crypto.Hash { return b.header.LastCommitHash }", "title": "" }, { "docid": "f0af3a416c26d83e32831f01336b18a8", "score": "0.4580277", "text": "func (api *PublicEthereumAPI) GetUncleByBlockHashAndIndex(hash common.Hash, idx hexutil.Uint) map[string]interface{} {\n\treturn nil\n}", "title": "" }, { "docid": "5e0a2aeffd40c2a3b396d6e77e80f428", "score": "0.45721477", "text": "func (b *Board) Topple(r, c int) {\n if b.cell[r][c] >= 4 {\n if b.Contains(r - 1, c) {\n b.Set(r - 1, c, b.cell[r - 1][c] + 1)\n b.Set(r, c, b.cell[r][c] - 1)\n } else {\n b.Set(r, c, b.cell[r][c] - 1)\n }\n if b.Contains(r, c - 1) {\n b.Set(r, c - 1, b.cell[r][c - 1] + 1)\n b.Set(r, c, b.cell[r][c] - 1)\n } else {\n b.Set(r, c, b.cell[r][c] - 1)\n }\n if b.Contains(r, c + 1) {\n b.Set(r, c + 1, b.cell[r][c + 1] + 1)\n b.Set(r, c, b.cell[r][c] - 1)\n } else {\n b.Set(r, c, b.cell[r][c] - 1)\n }\n if b.Contains(r + 1, c) {\n b.Set(r + 1, c, b.cell[r + 1][c] + 1)\n b.Set(r, c, b.cell[r][c] - 1)\n } else {\n b.Set(r, c, b.cell[r][c] - 1)\n }\n }\n}", "title": "" }, { "docid": "dd3e4de109c61d738e393a87801f2ea3", "score": "0.4563295", "text": "func (s *SMT) interiorHash(left, right []byte, height, iBatch int, oldRoot []byte, shortcut, store bool, keys, values, batch [][]byte) []byte {\n\tvar h []byte\n\tif (len(left) == 0) && (len(right)) == 0 {\n\t\t// if a key was deleted, the node becomes default\n\t\tbatch[2*iBatch+1] = left\n\t\tbatch[2*iBatch+2] = right\n\t\ts.deleteOldNode(oldRoot, height)\n\t\treturn nil\n\t} else if len(left) == 0 {\n\t\th = s.hash(s.defaultHashes[height-1], right[:HashLength])\n\t} else if len(right) == 0 {\n\t\th = s.hash(left[:HashLength], s.defaultHashes[height-1])\n\t} else {\n\t\th = s.hash(left[:HashLength], right[:HashLength])\n\t}\n\tif !store {\n\t\t// a shortcut node cannot move up\n\t\treturn append(h, byte(0))\n\t}\n\tif !shortcut {\n\t\th = append(h, byte(0))\n\t} else {\n\t\t// store the value at the shortcut node instead of height 0.\n\t\th = append(h, byte(1))\n\t\tleft = append(keys[0], byte(2))\n\t\tright = append(values[0], byte(2))\n\t}\n\tbatch[2*iBatch+2] = right\n\tbatch[2*iBatch+1] = left\n\n\t/*\n\t\tif len(batch[2*iBatch+2]) == 0 {\n\t\t\tif len(right) != 0 {\n\t\t\t\tbatch[2*iBatch+2] = append(batch[2*iBatch+2], right...)\n\t\t\t}\n\t\t} else {\n\t\t\tif len(right) == 0 {\n\t\t\t\tbatch[2*iBatch+2] = nil\n\t\t\t} else {\n\t\t\t\tcopy(batch[2*iBatch+2], right)\n\t\t\t}\n\t\t}\n\t\tif len(batch[2*iBatch+1]) == 0 {\n\t\t\tif len(left) != 0 {\n\t\t\t\tbatch[2*iBatch+1] = append(batch[2*iBatch+1], left...)\n\t\t\t}\n\t\t} else {\n\t\t\tif len(left) == 0 {\n\t\t\t\tbatch[2*iBatch+1] = nil\n\t\t\t} else {\n\t\t\t\tcopy(batch[2*iBatch+1], left)\n\t\t\t}\n\t\t}\n\t*/\n\n\t// maybe store batch node\n\tif (height)%4 == 0 {\n\t\tif shortcut {\n\t\t\tbatch[0] = []byte{1}\n\t\t} else {\n\t\t\tbatch[0] = []byte{0}\n\t\t}\n\n\t\ts.storeNode(batch, h, oldRoot, height)\n\t}\n\treturn h\n}", "title": "" }, { "docid": "e369a9316660c0f6328ef4362ebb21e6", "score": "0.45607767", "text": "func (t tile) hash(n compact.NodeID) []byte {\n\tr := rf.NewEmptyRange(0)\n\n\tleft := n.Index << uint64(n.Level)\n\tright := (n.Index + 1) << uint64(n.Level)\n\n\tif len(t.leaves) < int(right) {\n\t\tpanic(fmt.Sprintf(\"index %d out of range of %d leaves\", right, len(t.leaves)))\n\t}\n\tfor _, l := range t.leaves[left:right] {\n\t\tr.Append(l[:], nil)\n\t}\n\troot, err := r.GetRootHash(nil)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn root\n}", "title": "" }, { "docid": "d6f4945ec65ee05496b2811b1ff85ac9", "score": "0.45604274", "text": "func (b *blockManager) findNextHeaderCheckpoint(height int64) *chaincfg.Checkpoint {\n\tcheckpoints := b.cfg.Chain.Checkpoints()\n\tif len(checkpoints) == 0 {\n\t\treturn nil\n\t}\n\n\t// There is no next checkpoint if the height is already after the final\n\t// checkpoint.\n\tfinalCheckpoint := &checkpoints[len(checkpoints)-1]\n\tif height >= finalCheckpoint.Height {\n\t\treturn nil\n\t}\n\n\t// Find the next checkpoint.\n\tnextCheckpoint := finalCheckpoint\n\tfor i := len(checkpoints) - 2; i >= 0; i-- {\n\t\tif height >= checkpoints[i].Height {\n\t\t\tbreak\n\t\t}\n\t\tnextCheckpoint = &checkpoints[i]\n\t}\n\treturn nextCheckpoint\n}", "title": "" }, { "docid": "a9a5fe8641724ec5df1f36908d943fc2", "score": "0.45599246", "text": "func findPath(graph *channeldb.ChannelGraph, sourceNode *channeldb.LightningNode,\n\ttarget *btcec.PublicKey, ignoredNodes map[vertex]struct{},\n\tignoredEdges map[uint64]struct{}, amt lnwire.MilliSatoshi) ([]*ChannelHop, error) {\n\n\t// First we'll initialize an empty heap which'll help us to quickly\n\t// locate the next edge we should visit next during our graph\n\t// traversal.\n\tvar nodeHeap distanceHeap\n\n\t// For each node/vertex the graph we create an entry in the distance\n\t// map for the node set with a distance of \"infinity\". We also mark\n\t// add the node to our set of unvisited nodes.\n\tdistance := make(map[vertex]nodeWithDist)\n\tif err := graph.ForEachNode(nil, func(_ *bolt.Tx, node *channeldb.LightningNode) error {\n\t\t// TODO(roasbeef): with larger graph can just use disk seeks\n\t\t// with a visited map\n\t\tdistance[newVertex(node.PubKey)] = nodeWithDist{\n\t\t\tdist: infinity,\n\t\t\tnode: node,\n\t\t}\n\t\treturn nil\n\t}); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// To start, we add the source of our path finding attempt to the\n\t// distance map with with a distance of 0. This indicates our starting\n\t// point in the graph traversal.\n\tsourceVertex := newVertex(sourceNode.PubKey)\n\tdistance[sourceVertex] = nodeWithDist{\n\t\tdist: 0,\n\t\tnode: sourceNode,\n\t}\n\n\t// To start, our source node will the sole item within our distance\n\t// heap.\n\theap.Push(&nodeHeap, distance[sourceVertex])\n\n\t// We'll use this map as a series of \"previous\" hop pointers. So to get\n\t// to `vertex` we'll take the edge that it's mapped to within `prev`.\n\tprev := make(map[vertex]edgeWithPrev)\n\n\tfor nodeHeap.Len() != 0 {\n\t\t// Fetch the node within the smallest distance from our source\n\t\t// from the heap.\n\t\tpartialPath := heap.Pop(&nodeHeap).(nodeWithDist)\n\t\tbestNode := partialPath.node\n\n\t\t// If we've reached our target (or we don't have any outgoing\n\t\t// edges), then we're done here and can exit the graph\n\t\t// traversal early.\n\t\tif bestNode.PubKey.IsEqual(target) {\n\t\t\tbreak\n\t\t}\n\n\t\t// Now that we've found the next potential step to take we'll\n\t\t// examine all the outgoing edge (channels) from this node to\n\t\t// further our graph traversal.\n\t\tpivot := newVertex(bestNode.PubKey)\n\t\terr := bestNode.ForEachChannel(nil, func(tx *bolt.Tx,\n\t\t\tedgeInfo *channeldb.ChannelEdgeInfo,\n\t\t\toutEdge, inEdge *channeldb.ChannelEdgePolicy) error {\n\n\t\t\tv := newVertex(outEdge.Node.PubKey)\n\n\t\t\t// TODO(roasbeef): skip if disabled\n\n\t\t\t// If this vertex or edge has been black listed, then\n\t\t\t// we'll skip exploring this edge during this\n\t\t\t// iteration.\n\t\t\tif _, ok := ignoredNodes[v]; ok {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tif _, ok := ignoredEdges[outEdge.ChannelID]; ok {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tif inEdge == nil {\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\t// Compute the tentative distance to this new\n\t\t\t// channel/edge which is the distance to our current\n\t\t\t// pivot node plus the weight of this edge.\n\t\t\ttempDist := distance[pivot].dist + edgeWeight(inEdge)\n\n\t\t\t// If this new tentative distance is better than the\n\t\t\t// current best known distance to this node, then we\n\t\t\t// record the new better distance, and also populate\n\t\t\t// our \"next hop\" map with this edge. We'll also shave\n\t\t\t// off irrelevant edges by adding the sufficient\n\t\t\t// capacity of an edge to our relaxation condition.\n\t\t\tif tempDist < distance[v].dist &&\n\t\t\t\tedgeInfo.Capacity >= amt.ToSatoshis() {\n\n\t\t\t\t// TODO(roasbeef): need to also account\n\t\t\t\t// for min HTLC\n\n\t\t\t\tdistance[v] = nodeWithDist{\n\t\t\t\t\tdist: tempDist,\n\t\t\t\t\tnode: outEdge.Node,\n\t\t\t\t}\n\t\t\t\tprev[v] = edgeWithPrev{\n\t\t\t\t\t// We'll use the *incoming* edge here\n\t\t\t\t\t// as we need to use the routing policy\n\t\t\t\t\t// specified by the node this channel\n\t\t\t\t\t// connects to.\n\t\t\t\t\tedge: &ChannelHop{\n\t\t\t\t\t\tChannelEdgePolicy: inEdge,\n\t\t\t\t\t\tCapacity: edgeInfo.Capacity,\n\t\t\t\t\t},\n\t\t\t\t\tprevNode: bestNode.PubKey,\n\t\t\t\t}\n\n\t\t\t\t// In order for the path unwinding to work\n\t\t\t\t// properly, we'll ensure that this edge\n\t\t\t\t// properly points to the outgoing node.\n\t\t\t\t//\n\t\t\t\t// TODO(roasbeef): revisit, possibly switch db\n\t\t\t\t// format?\n\t\t\t\tprev[v].edge.Node = outEdge.Node\n\n\t\t\t\t// Add this new node to our heap as we'd like\n\t\t\t\t// to further explore down this edge.\n\t\t\t\theap.Push(&nodeHeap, distance[v])\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// If the target node isn't found in the prev hop map, then a path\n\t// doesn't exist, so we terminate in an error.\n\tif _, ok := prev[newVertex(target)]; !ok {\n\t\treturn nil, newErrf(ErrNoPathFound, \"unable to find a path to \"+\n\t\t\t\"destination\")\n\t}\n\n\t// If the potential route if below the max hop limit, then we'll use\n\t// the prevHop map to unravel the path. We end up with a list of edges\n\t// in the reverse direction which we'll use to properly calculate the\n\t// timelock and fee values.\n\tpathEdges := make([]*ChannelHop, 0, len(prev))\n\tprevNode := newVertex(target)\n\tfor prevNode != sourceVertex { // TODO(roasbeef): assumes no cycles\n\t\t// Add the current hop to the limit of path edges then walk\n\t\t// backwards from this hop via the prev pointer for this hop\n\t\t// within the prevHop map.\n\t\tpathEdges = append(pathEdges, prev[prevNode].edge)\n\t\tprev[prevNode].edge.Node.PubKey.Curve = nil\n\n\t\tprevNode = newVertex(prev[prevNode].prevNode)\n\t}\n\n\t// The route is invalid if it spans more than 20 hops. The current\n\t// Sphinx (onion routing) implementation can only encode up to 20 hops\n\t// as the entire packet is fixed size. If this route is more than 20\n\t// hops, then it's invalid.\n\tnumEdges := len(pathEdges)\n\tif numEdges > HopLimit {\n\t\treturn nil, newErr(ErrMaxHopsExceeded, \"potential path has \"+\n\t\t\t\"too many hops\")\n\t}\n\n\t// As our traversal of the prev map above walked backwards from the\n\t// target to the source in the route, we need to reverse it before\n\t// returning the final route.\n\tfor i := 0; i < numEdges/2; i++ {\n\t\tpathEdges[i], pathEdges[numEdges-i-1] = pathEdges[numEdges-i-1], pathEdges[i]\n\t}\n\n\treturn pathEdges, nil\n}", "title": "" }, { "docid": "10bb4518eee54b115fece608db2f8635", "score": "0.4556615", "text": "func (i *rawBlockIter) Prev() bool {\n\tif n := len(i.cached) - 1; n > 0 && i.cached[n].offset == i.offset {\n\t\ti.nextOffset = i.offset\n\t\te := &i.cached[n-1]\n\t\ti.offset = e.offset\n\t\ti.val = getBytes(unsafe.Pointer(uintptr(i.ptr)+uintptr(e.valStart)), int(e.valSize))\n\t\ti.ikey.UserKey = i.cachedBuf[e.keyStart:e.keyEnd]\n\t\ti.cached = i.cached[:n]\n\t\treturn true\n\t}\n\n\tif i.offset == 0 {\n\t\ti.offset = -1\n\t\ti.nextOffset = 0\n\t\treturn false\n\t}\n\n\ttargetOffset := i.offset\n\tindex := sort.Search(int(i.numRestarts), func(j int) bool {\n\t\toffset := int32(binary.LittleEndian.Uint32(i.data[int(i.restarts)+4*j:]))\n\t\treturn offset >= targetOffset\n\t})\n\ti.offset = 0\n\tif index > 0 {\n\t\ti.offset = int32(binary.LittleEndian.Uint32(i.data[int(i.restarts)+4*(index-1):]))\n\t}\n\n\ti.readEntry()\n\ti.clearCache()\n\ti.cacheEntry()\n\n\tfor i.nextOffset < targetOffset {\n\t\ti.offset = i.nextOffset\n\t\ti.readEntry()\n\t\ti.cacheEntry()\n\t}\n\n\ti.ikey.UserKey = i.key\n\treturn true\n}", "title": "" }, { "docid": "2fe6238e9f6fdc8d5822f6e676a2ce3d", "score": "0.45530182", "text": "func (b *Block) LastCommit() *Commit { return CopyCommit(b.lastCommit) }", "title": "" }, { "docid": "65d7d0411f7d7828041dd43aba983fbc", "score": "0.45520085", "text": "func ExtractPrevBlockHashBE(header btcspv.RawHeader) btcspv.Hash256Digest {\n\treturn btcspv.ReverseHash256Endianness(btcspv.ExtractPrevBlockHashLE(header))\n}", "title": "" }, { "docid": "d40f95574e5ac6e1e0316f3696a602bf", "score": "0.4551357", "text": "func (s *BitSet) PrevUnset(current uint64) uint64 {\n\tfor i := current; i > 0; i-- {\n\t\tif !s.IsSet(i) && i <= s.len {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn 0\n}", "title": "" }, { "docid": "af4ee20dcfe7f7853d054db89776943d", "score": "0.45501822", "text": "func (cache *blockCache) reorg(from int64) {\n\tcache.mtx.Lock()\n\tdefer cache.mtx.Unlock()\n\tif from < 0 {\n\t\treturn\n\t}\n\tfor height := uint32(from); height <= cache.best.height; height++ {\n\t\tblock, found := cache.mainchain[height]\n\t\tif !found {\n\t\t\tcache.log.Errorf(\"reorg block not found on mainchain at height %d for a reorg from %d to %d\", height, from, cache.best.height)\n\t\t\tcontinue\n\t\t}\n\t\t// Delete the block from mainchain.\n\t\tdelete(cache.mainchain, block.height)\n\t\t// Store an orphaned block in the blocks cache.\n\t\tcache.blocks[block.hash] = &dcrBlock{\n\t\t\thash: block.hash,\n\t\t\theight: block.height,\n\t\t\torphaned: true,\n\t\t\tvote: block.vote,\n\t\t}\n\t}\n\t// Set this to a zero block so that the new block will replace it even if\n\t// it is of the same height as the previous best block.\n\tcache.best = dcrBlock{}\n}", "title": "" }, { "docid": "eee5b841bcafbbe9d97f08045e1ea798", "score": "0.45501414", "text": "func (iterator *IteratorByPrev) Prev() bool {\n\tif iterator.position == beginByPrev {\n\t\tgoto begin\n\t}\n\tif iterator.position == endByPrev {\n\t\tright := iterator.tree.Right()\n\t\tif right == nil {\n\t\t\tgoto begin\n\t\t}\n\t\titerator.node = right\n\t\tgoto between\n\t}\n\tif iterator.node.Left != nil {\n\t\titerator.node = iterator.node.Left\n\t\tfor iterator.node.Right != nil {\n\t\t\titerator.node = iterator.node.Right\n\t\t}\n\t\tgoto between\n\t}\n\tif iterator.node.Parent != nil {\n\t\tnode := iterator.node\n\t\tfor iterator.node.Parent != nil {\n\t\t\titerator.node = iterator.node.Parent\n\t\t\tif node == iterator.node.Right {\n\t\t\t\tgoto between\n\t\t\t}\n\t\t\tnode = iterator.node\n\t\t\t//if iterator.tree.Comparator(node.Key, iterator.node.Key) >= 0 {\n\t\t\t//\tgoto between\n\t\t\t//}\n\t\t}\n\t}\n\nbegin:\n\titerator.node = nil\n\titerator.position = beginByPrev\n\treturn false\n\nbetween:\n\titerator.position = betweenByPrev\n\treturn true\n}", "title": "" }, { "docid": "bbd901ca610d62efc5edacfb8553b545", "score": "0.4549449", "text": "func (f Finder) FindPrevSibling(pred pred.Predicate) (r Finder) {\n\tif f.Node == nil {\n\t\treturn\n\t}\n\n\tfor c := f.Node.PrevSibling; c != nil; c = c.PrevSibling {\n\t\tif pred(c) {\n\t\t\treturn Finder{c}\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "c472f76297cd06e8a9235944618f6297", "score": "0.45344627", "text": "func (c *Node) Uncover() {\n\t// log.Println(\"Uncover col\", c.Name)\n\tfor i := c.Up; i != c; i = i.Up {\n\t\tfor j := i.Left; j != i; j = j.Left {\n\t\t\tj.Col.Size++\n\t\t\tj.Down.Up = j\n\t\t\tj.Up.Down = j\n\t\t}\n\t}\n\tc.Right.Left = c\n\tc.Left.Right = c\n}", "title": "" }, { "docid": "917706e0f1bfa6e3aa13d660f8705057", "score": "0.4532595", "text": "func GetLastBlockHeader(db database.Db, lastSha *chainhash.Hash, proofOfStake bool) (\n\theader *wire.BlockHeader, meta *wire.Meta, err error) {\n\tsha := lastSha\n\tfor true {\n\t\theader, meta, err = db.FetchBlockHeaderBySha(sha)\n\t\tif err != nil {\n\t\t\tbreak\n\t\t}\n\t\tif header.PrevBlock.IsEqual(zeroHash) {\n\t\t\tbreak\n\t\t}\n\t\tif isProofOfStake(meta) == proofOfStake {\n\t\t\tbreak\n\t\t}\n\t\tsha = &header.PrevBlock\n\t}\n\treturn\n}", "title": "" }, { "docid": "8f73883c0232bbc8cc4d4a1f477610bd", "score": "0.4530001", "text": "func GetLastBlockHashes(lookback int) ([]*types.BlockHashNumber, error) {\n\tvar hashes []*types.BlockHashNumber\n\terr := DB.Select(&hashes, \"SELECT statehash, canonical, previousstatehash, height FROM blocks ORDER BY height DESC limit $1\", lookback)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error retrieving last block hashes: %w\", err)\n\t}\n\n\treturn hashes, nil\n}", "title": "" }, { "docid": "a4c4b738a966adc8e4768b0de276e423", "score": "0.45291707", "text": "func (bs *BlockStore) PruneBlocks(height int64) (uint64, error) {\n\tif height <= 0 {\n\t\treturn 0, fmt.Errorf(\"height must be greater than 0\")\n\t}\n\tbs.mtx.RLock()\n\tif height > bs.height {\n\t\tbs.mtx.RUnlock()\n\t\treturn 0, fmt.Errorf(\"cannot prune beyond the latest height %v\", bs.height)\n\t}\n\tbase := bs.base\n\tbs.mtx.RUnlock()\n\tif height < base {\n\t\treturn 0, fmt.Errorf(\"cannot prune to height %v, it is lower than base height %v\",\n\t\t\theight, base)\n\t}\n\n\tpruned := uint64(0)\n\tbatch := bs.db.NewBatch()\n\tdefer batch.Close()\n\tflush := func(batch dbm.Batch, base int64) error {\n\t\t// We can't trust batches to be atomic, so update base first to make sure noone\n\t\t// tries to access missing blocks.\n\t\tbs.mtx.Lock()\n\t\tbs.base = base\n\t\tbs.mtx.Unlock()\n\t\tbs.saveState()\n\n\t\terr := batch.WriteSync()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to prune up to height %v: %w\", base, err)\n\t\t}\n\t\tbatch.Close()\n\t\treturn nil\n\t}\n\n\tfor h := base; h < height; h++ {\n\t\tmeta := bs.LoadBlockMeta(h)\n\t\tif meta == nil { // assume already deleted\n\t\t\tcontinue\n\t\t}\n\t\tif err := batch.Delete(calcBlockMetaKey(h)); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tif err := batch.Delete(calcBlockHashKey(meta.BlockID.Hash)); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tif err := batch.Delete(calcBlockCommitKey(h)); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tif err := batch.Delete(calcSeenCommitKey(h)); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tfor p := 0; p < int(meta.BlockID.PartSetHeader.Total); p++ {\n\t\t\tif err := batch.Delete(calcBlockPartKey(h, p)); err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t}\n\t\tpruned++\n\n\t\t// flush every 1000 blocks to avoid batches becoming too large\n\t\tif pruned%1000 == 0 && pruned > 0 {\n\t\t\terr := flush(batch, h)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t\tbatch = bs.db.NewBatch()\n\t\t\tdefer batch.Close()\n\t\t}\n\t}\n\n\terr := flush(batch, height)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn pruned, nil\n}", "title": "" }, { "docid": "10ae04ea767f9e5a597354f4fb012efa", "score": "0.45280245", "text": "func (this *shortestPathQueue) popNearest() *shortestPathVertex {\n return heap.Pop(this).(*shortestPathVertex)\n}", "title": "" } ]
b8e2adf7dc66c50c32bd24f8ca6100ca
StopSpan ends the span specified
[ { "docid": "dbfe8ebbc375c3f8c0c275539ab1c6b4", "score": "0.86177105", "text": "func StopSpan(span trace.Span) {\n\tspan.End()\n}", "title": "" } ]
[ { "docid": "7f49b3d3ba58f2798badbf4fb59631e3", "score": "0.73980534", "text": "func EndSpan(span interface{}, status int) error {\n\topenlogging.Debug(\"EndSpan \" + strconv.Itoa(status))\n\tapm.EndSpan(span, status, troption)\n\treturn nil\n}", "title": "" }, { "docid": "d40248cfc29fbabbb9a24ffd61366eb5", "score": "0.69898224", "text": "func endSpan(s oteltrace.Span, err error) {\n\tif err != nil {\n\t\ts.SetStatus(codes.Error, err.Error())\n\t}\n\ts.End()\n}", "title": "" }, { "docid": "d75eeb39a5fda1a44a6a7b7e43c98238", "score": "0.6917989", "text": "func (tracerObj *traceHandler) stopNodeSpan(node string) {\n\n\ttracerObj.nodeSpans[node].Finish()\n}", "title": "" }, { "docid": "4b19d243c4e8c184b8a21dfd69b7fb26", "score": "0.6889687", "text": "func (tracerObj *traceHandler) stopOperationSpan(node string, operationId string) {\n\n\tif tracerObj.nodeSpans[node] == nil {\n\t\treturn\n\t}\n\n\toperationSpans := tracerObj.operationSpans[node]\n\toperationSpans[operationId].Finish()\n}", "title": "" }, { "docid": "287b6f9b59c8374ec2bbd2614d33de38", "score": "0.6775037", "text": "func (c *SpanCollector) Stop() {\n\tif swapSpan(&c.check, donePointer) != donePointer {\n\t\tclose(c.done)\n\t}\n}", "title": "" }, { "docid": "b682e6a5890ce33dbeb1363d371017c5", "score": "0.64648265", "text": "func (s *noopSpan) End(options *EndOptions) {\n\t// noop\n}", "title": "" }, { "docid": "ea808a6211410e564120ea42bca2402e", "score": "0.6438027", "text": "func FinishSpan(span opentracing.Span, err error) {\n\tif span == nil {\n\t\treturn\n\t}\n\tdefer span.Finish()\n\n\tif err != nil {\n\t\text.LogError(span, err)\n\t} else {\n\t\text.Error.Set(span, false)\n\t}\n}", "title": "" }, { "docid": "697ada16d679e7ea466270b9847dac29", "score": "0.64351064", "text": "func (os otelSpan) End(options ...trace.EndOption) {\n\treturn\n}", "title": "" }, { "docid": "ac7920c0aa196e9ad236e56bc071c4f6", "score": "0.6429452", "text": "func stopSpanning()string{\n return \"</span>\"\n}", "title": "" }, { "docid": "b89063a454031f692c96b5326732822c", "score": "0.62725365", "text": "func (tracerObj *traceHandler) stopReqSpan() {\n\tif tracerObj.reqSpan == nil {\n\t\treturn\n\t}\n\n\ttracerObj.reqSpan.Finish()\n}", "title": "" }, { "docid": "60fbc71549d9604a7d3ae65c35fcc4f0", "score": "0.6054315", "text": "func End(tag string, spanID uint64) error {\n\ttracer := []byte(\"<:\" + strconv.FormatUint(spanID, 10) + \":\" + tag + \"::\")\n\n\treturn writeTracer(tracer)\n}", "title": "" }, { "docid": "d9cea602b6eb60bd9b128771e16de477", "score": "0.59251326", "text": "func (s *BaseFoxySheepListener) ExitSpanB(ctx *SpanBContext) {}", "title": "" }, { "docid": "bf34c6470fe4e079d14d51537d3dee03", "score": "0.58865243", "text": "func (s *Span) Finish() {\n\ts.finish(now())\n}", "title": "" }, { "docid": "eefe769683e9af0ce75f7c5733e05536", "score": "0.5843401", "text": "func (hct *httpClientTracer) finishSpan() {\n\thct.mutex.Lock()\n\tdefer hct.mutex.Unlock()\n\thct.currentSpan.ClientFinish(hct.traceClient)\n}", "title": "" }, { "docid": "5f3bc8544478c3e5c05d45a47701f415", "score": "0.5799249", "text": "func (s *spanImpl) Finish() {\n\ts.FinishWithOptions(opentracing.FinishOptions{})\n}", "title": "" }, { "docid": "db1ff87930ef0a22dbe1c42c6280b132", "score": "0.5742006", "text": "func (s *Span) Finish() {\n\ts.span.Finish(tracer.WithError(s.err))\n}", "title": "" }, { "docid": "008118ae6f7a806ed02873e8c39888b3", "score": "0.57143205", "text": "func (s *BasevbaListener) ExitStopStmt(ctx *StopStmtContext) {}", "title": "" }, { "docid": "66ca1c48eb411c35a3d7f065bc86234c", "score": "0.569777", "text": "func (this *spanner) span(ambit *Ambit) []*spanT {\n tokens := this.tokenizer.Tokenize(ambit)\n var spans []*spanT\n for len(tokens) > 0 {\n spans, tokens = this.span2(spans, tokens)\n if len(tokens) > 0 {\n // stray closing\n var token *Token\n token, tokens = tokens[0], tokens[1:]\n spans = append(spans, &spanT{ Cat: \"ERR\", Err: fmt.Sprintf(\"unexpected closing bracket: '%s'\", token.Ambit.ToString()), Ambit: token.Ambit })\n }\n }\n return spans\n}", "title": "" }, { "docid": "9dc645a499fd7593db0d8daafc61aff9", "score": "0.56371623", "text": "func (s *Span) Done(err error) error {\n\tif !s.end.IsZero() {\n\t\treturn err\n\t}\n\tt1 := time.Now()\n\ts.end = t1\n\ttd := t1.Sub(s.start)\n\tvar text bytes.Buffer\n\tfmt.Fprintf(&text, \"after %s\", friendlyDuration(td))\n\tif err != nil {\n\t\tfmt.Fprintf(&text, \"; err=%v\", err)\n\t}\n\tif s.optText != \"\" {\n\t\tfmt.Fprintf(&text, \"; %v\", s.optText)\n\t}\n\tif st, ok := s.el.(Spanner); ok {\n\t\tpool.CoordinatorProcess().PutSpanRecord(st.SpanRecord(s, err))\n\t}\n\ts.el.LogEventTime(\"finish_\"+s.event, text.String())\n\treturn err\n}", "title": "" }, { "docid": "09a367bc6fe8968049e84d5a6482d5ef", "score": "0.5611326", "text": "func (s *Span) Finish(opts ...FinishOption) {\n\tif s == nil {\n\t\treturn\n\t}\n\tif !s.Traced() {\n\t\treturn\n\t}\n\ts.trace.finish(s, opts...)\n}", "title": "" }, { "docid": "3106ef6728fa239b42e59845058bb242", "score": "0.5593392", "text": "func (s *BaseFoxySheepListener) ExitSpanA(ctx *SpanAContext) {}", "title": "" }, { "docid": "011645529826c3488cfef65ee82738ab", "score": "0.5592174", "text": "func (ssm *SpanProcessor) OnEnd(span sdktrace.ReadOnlySpan) {\n\tsc := span.SpanContext()\n\tif sc.IsValid() {\n\t\tssm.activeSpansStore.Delete(spanKey(sc))\n\t}\n\n\tname := span.Name()\n\tvalue, ok := ssm.spanSampleStores.Load(name)\n\tif !ok {\n\t\tvalue, _ = ssm.spanSampleStores.LoadOrStore(name, newSampleStore(defaultBucketCapacity, defaultBucketCapacity))\n\t}\n\tvalue.(*sampleStore).sampleSpan(span)\n}", "title": "" }, { "docid": "5210393aaf261035b21fd39ed6e53b30", "score": "0.55269635", "text": "func FinishAnySpan(span opentracing.Span, kvs ...interface{}) {\n\tspan = TagAnySpan(span, kvs...)\n\tif span != nil {\n\t\tspan.Finish()\n\t}\n}", "title": "" }, { "docid": "e5814d4fe91ebe7a6c6e18659d7904d3", "score": "0.5511394", "text": "func (s *MockSpan) Finish() {\n\ts.FinishTime = time.Now()\n\ts.tracer.FinishedSpans = append(s.tracer.FinishedSpans, s)\n}", "title": "" }, { "docid": "a03f9a959dbb1dea9c37ad92d190c8f2", "score": "0.54790026", "text": "func (t *trace) finish(s *Span, opts ...FinishOption) error {\n\tfor _, o := range opts {\n\t\to.modifySpan(s)\n\t}\n\ts.end = time.Now()\n\ts.construct()\n\treturn t.client.collector.Collect(s)\n}", "title": "" }, { "docid": "623bcb490bddc48c8b37f6622ccb9280", "score": "0.5421048", "text": "func (s *BaseVisualBasic6ParserListener) ExitStopStmt(ctx *StopStmtContext) {}", "title": "" }, { "docid": "cd8f2055405fb04db82f885a025e64ac", "score": "0.53984654", "text": "func freeSpan(span C.Span) {\n\tC.free(unsafe.Pointer(span.ptr))\n}", "title": "" }, { "docid": "9fd75b5c042a024cc460ed1ae3d29dc2", "score": "0.5383136", "text": "func (s *SchedulerTracerBase) FinishActiveSpan() {\n\terr := s.context.FinishActiveSpan()\n\tif err != nil {\n\t\tlog.Logger().Error(\"finishing active span fail\", zap.Error(err))\n\t}\n}", "title": "" }, { "docid": "7a5ad9fd40f3545968b0c2297ad1f052", "score": "0.534824", "text": "func (x *TimeSampler) Stop() {\n\tt1 := time.Now()\n\tdiff := t1.Sub(*x.t0)\n\tx.t0 = nil\n\tx.m.Add(float64(diff))\n}", "title": "" }, { "docid": "fb0aa71fdff2aa451c870c4a18f9ff29", "score": "0.53108764", "text": "func (s *Sentences) Stop(ctx *core.Context) error {\n\treturn nil\n}", "title": "" }, { "docid": "4fef10ef3aaefcda3405b2f0157ac32c", "score": "0.5276531", "text": "func (s *Span) End() time.Time {\n\treturn s.end\n}", "title": "" }, { "docid": "a3fcb497763ef65b4f78b99e88bf525f", "score": "0.52691245", "text": "func (n *Normalizer) Span(s *cpb.Span) *cpb.Span {\n\tif s == nil {\n\t\treturn nil\n\t}\n\treturn &cpb.Span{\n\t\tStart: n.Point(s.Start),\n\t\tEnd: n.Point(s.End),\n\t}\n}", "title": "" }, { "docid": "a3fcb497763ef65b4f78b99e88bf525f", "score": "0.52691245", "text": "func (n *Normalizer) Span(s *cpb.Span) *cpb.Span {\n\tif s == nil {\n\t\treturn nil\n\t}\n\treturn &cpb.Span{\n\t\tStart: n.Point(s.Start),\n\t\tEnd: n.Point(s.End),\n\t}\n}", "title": "" }, { "docid": "e2f65e28adda154c5776aaed5fc66982", "score": "0.5257372", "text": "func (s *Span) FinishWithErr(err error) {\n\tif s == nil {\n\t\treturn\n\t}\n\ts.SetError(err)\n\ts.Finish()\n}", "title": "" }, { "docid": "5cf490dadf6c7c99c366f70625be8ffd", "score": "0.52572924", "text": "func (x RVec) span(y RVec) (int, int) {\n\txL, xU := x.Ix()\n\tyL, yU := y.Ix()\n\treturn imin(xL, yL), imax(xU, yU)\n}", "title": "" }, { "docid": "52d02a47dce3c5974061bf7c0d53a9af", "score": "0.5248709", "text": "func LabeledStmtEnd(s *ast.LabeledStmt,) token.Pos", "title": "" }, { "docid": "5aca12a6fc11a9f31d3c03b0c3f51f29", "score": "0.5244831", "text": "func (r Region) End() {\n\tif r.Span != nil {\n\t\tr.Span.Finish()\n\t}\n\tr.Region.End()\n}", "title": "" }, { "docid": "2ff1734627a61a2fb9fcf3c8563e4480", "score": "0.5230695", "text": "func (s *RemotelyControlledSampler) OnFinishSpan(span *Span) SamplingDecision {\n\ts.RLock()\n\tdefer s.RUnlock()\n\treturn s.sampler.OnFinishSpan(span)\n}", "title": "" }, { "docid": "1d904e5597b29aa254d72d01e0cdca06", "score": "0.52275395", "text": "func (s *Spinner) Stop() {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\ts.clear()\n\ts.RestoreCursor()\n\tif len(s.StopMsg) > 0 {\n\t\tfmt.Fprintf(s.writer, s.StopMsg)\n\t}\n\ts.stopChan <- struct{}{}\n}", "title": "" }, { "docid": "7de296b7326f89847131cac4883ced3a", "score": "0.5210725", "text": "func (ssp *simpleSpanProcessor) OnEnd(s ReadOnlySpan) {\n\tssp.exporterMu.RLock()\n\tdefer ssp.exporterMu.RUnlock()\n\n\tif ssp.exporter != nil && s.SpanContext().TraceFlags().IsSampled() {\n\t\tif err := ssp.exporter.ExportSpans(context.Background(), []ReadOnlySpan{s}); err != nil {\n\t\t\totel.Handle(err)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "edbea08643becf77f283b73f36a6f855", "score": "0.5200522", "text": "func (is *Spinner) DoneStop(message string, opts ...interface{}) {\n\tis.FinalMSG = fmt.Sprintln(append([]interface{}{OkPrefix, message}, opts...)...)\n\tis.Stop()\n}", "title": "" }, { "docid": "8ca401df7581ceef10cb0551b29567eb", "score": "0.5189499", "text": "func FinishRequestSpan(s tracer.Span, status int, opts ...tracer.FinishOption) {\n\tvar statusStr string\n\tif status == 0 {\n\t\tstatusStr = \"200\"\n\t} else {\n\t\tstatusStr = strconv.Itoa(status)\n\t}\n\ts.SetTag(ext.HTTPCode, statusStr)\n\tif status >= 500 && status < 600 {\n\t\ts.SetTag(ext.Error, fmt.Errorf(\"%s: %s\", statusStr, http.StatusText(status)))\n\t}\n\ts.Finish(opts...)\n}", "title": "" }, { "docid": "5117ae9cdfc36f820a5cd1c6cd35c082", "score": "0.5188063", "text": "func GcpFinishSpan(span spans.Factory, resp *http.Response) time.Duration {\n\tif nil == span || span.GetStart().IsZero() {\n\t\treturn time.Duration(0)\n\t}\n\tspan.SetStatusCode(int64(resp.StatusCode))\n\tif \"\" != resp.Status {\n\t\tspan.SetStatusMessage(resp.Status)\n\t}\n\treturn span.Finish()\n}", "title": "" }, { "docid": "0fd5c61ae29ff55fdd6543fe93bd30de", "score": "0.51848984", "text": "func (rec *Receiver) EndTracesOp(\n\treceiverCtx context.Context,\n\tformat string,\n\tnumReceivedSpans int,\n\terr error,\n) {\n\trec.endOp(receiverCtx, format, numReceivedSpans, err, config.TracesDataType)\n}", "title": "" }, { "docid": "f03354d9a690a4b63f4f65d360475676", "score": "0.51542264", "text": "func (p *Patcher) PatchSpan(span *cpb.Span) (newStart, newEnd int32, exists bool) {\n\treturn p.Patch(span.GetStart().GetByteOffset(), span.GetEnd().GetByteOffset())\n}", "title": "" }, { "docid": "e51ba4f3517d6f5cd90c9fe3f8fe26ba", "score": "0.5141976", "text": "func (s *Sampler) Stop() {\n\ts.samplerEngine.Stop()\n}", "title": "" }, { "docid": "30e7e2fb75befa182a5e886ad725a77e", "score": "0.51095825", "text": "func (w *TraceWriter) Stop() {\n\tclose(w.exit)\n\tw.exitWG.Wait()\n\tw.BaseWriter.Stop()\n}", "title": "" }, { "docid": "48295d45e1ffb8faed699982af1da905", "score": "0.51043826", "text": "func (c *SpanCollector) Finish(s *monkit.Span, err error, panicked bool,\n\tfinish time.Time) {\n\texisting := loadSpan(&c.check)\n\tif existing == donePointer || existing == nil ||\n\t\texisting.Trace() != s.Trace() {\n\t\treturn\n\t}\n\tfs := &FinishedSpan{Span: s, Err: err, Panicked: panicked, Finish: finish}\n\tc.mtx.Lock()\n\tif c.root != nil {\n\t\tc.mtx.Unlock()\n\t\treturn\n\t}\n\tif existing == s {\n\t\tc.root = fs\n\t\tc.mtx.Unlock()\n\t\tc.Stop()\n\t} else {\n\t\tid, ok := s.ParentId()\n\t\tkey := spanParent{id, ok}\n\t\tc.spansByParent[key] = append(c.spansByParent[key], fs)\n\t\tc.mtx.Unlock()\n\t}\n}", "title": "" }, { "docid": "d7d77c5023d659833ef04438801fc454", "score": "0.50856763", "text": "func (m *timeView) Stop() {\n}", "title": "" }, { "docid": "e39e4c8019ed422381933efcfaa79dda", "score": "0.5073803", "text": "func getSpan(t *testing.T, d *datadriven.TestData) Span {\n\tvar str string\n\td.ScanArgs(t, \"span\", &str)\n\tparts := strings.Split(str, \",\")\n\tif len(parts) > 2 {\n\t\td.Fatalf(t, \"incorrect span format: %s\", str)\n\t\treturn Span{}\n\t} else if len(parts) == 2 {\n\t\treturn Span{Start: []byte(parts[0]), End: []byte(parts[1])}\n\t} else {\n\t\treturn MakeSingleValSpan([]byte(parts[0]))\n\t}\n}", "title": "" }, { "docid": "76ab396d0d12e0a63d534e28bd2ee3e1", "score": "0.5062688", "text": "func StopTrace(name string, guid windows.GUID) error {\n\treturn ControlTrace(TraceHandle(0), name, guid, Stop)\n}", "title": "" }, { "docid": "2f5e71bce606e54146f02fe0039b833b", "score": "0.5059629", "text": "func (t *Timings) Stop(labels ...string) {\n\tt.append(labels, false)\n}", "title": "" }, { "docid": "18c011eade270a0512e5471b2df1de0e", "score": "0.50332695", "text": "func (to *gRPCtraceObserver) consumeSpan(span *spanEvent) {\n\tif to.isAppShutdownComplete() {\n\t\treturn\n\t}\n\n\tto.supportability.increment <- observerSeen\n\n\tif to.isShutdownInitiated() {\n\t\treturn\n\t}\n\n\tselect {\n\tcase to.messages <- span:\n\tdefault:\n\t\tif to.log.DebugEnabled() {\n\t\t\tto.log.Debug(\"could not send span to trace observer because channel is full\", map[string]interface{}{\n\t\t\t\t\"channel size\": to.queueSize,\n\t\t\t})\n\t\t}\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "1274e3d36a64b889d77d6588e6fe6dbc", "score": "0.50230604", "text": "func (recv *Spinner) Stop() {\n\tC.gtk_spinner_stop((*C.GtkSpinner)(recv.native))\n\n\treturn\n}", "title": "" }, { "docid": "ea5a76adb0bde1e9cf51123d748104c3", "score": "0.50172937", "text": "func (spinner *Spinner) Stop() error {\n\tspinner.stop <- true\n\treturn spinner.resetLine()\n}", "title": "" }, { "docid": "bd3271b8f1f1775b5096e43fd6240c86", "score": "0.50150543", "text": "func (b *Bar) Stop(msg, extMsg string) {\n\tb.mut.Lock()\n\tdefer b.mut.Unlock()\n\n\tif !b.stopped {\n\t\tb.stopped = true\n\t\tb.lastRender = true\n\t\tif msg != \"\" {\n\t\t\tb.msg = b.colors.StopMsg(msg)\n\t\t}\n\t\tif extMsg != \"\" {\n\t\t\tb.extMsg = b.colors.StopExtMsg(extMsg)\n\t\t\t// Only prepend the key name when rendered below the bars\n\t\t\tif !b.p.InlineExtMsg {\n\t\t\t\tb.extMsg = b.colors.Key(b.key) + b.colors.KeyDiv(\": \") + b.extMsg\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "143a48efb6745c1906276f87f37bf4bd", "score": "0.50112265", "text": "func (v *UintValue) span() interval.U64Span {\n\tif len(v.Ranges) == 0 {\n\t\treturn interval.U64Span{}\n\t}\n\treturn interval.U64Span{\n\t\tStart: v.Ranges[0].Start,\n\t\tEnd: v.Ranges[len(v.Ranges)-1].End,\n\t}\n}", "title": "" }, { "docid": "21d51c1f59ac18c439f46450464f2237", "score": "0.50045556", "text": "func StopTracer(provider *sdktrace.TracerProvider) {\n\tif provider != nil {\n\t\tctx, cancel := context.WithTimeout(context.Background(), time.Second*5)\n\t\tdefer cancel()\n\t\t_ = provider.Shutdown(ctx)\n\t}\n}", "title": "" }, { "docid": "3fa4dc4db792cc8165fa2c0cbc5378a3", "score": "0.5002826", "text": "func (e *Ed) Stop() {\n\te.Newline()\n\te.term.ClearLine()\n\te.term.Stop()\n}", "title": "" }, { "docid": "c0f393d1db4c09ec49455f62fb794475", "score": "0.4998891", "text": "func RangeStmtEnd(s *ast.RangeStmt,) token.Pos", "title": "" }, { "docid": "bcaf20ef94a70d501ab95fe6ee0a21d5", "score": "0.49829495", "text": "func (s *Spinner) Stop() {\n\ts.lock.Lock()\n\tdefer s.lock.Unlock()\n\tif s.active {\n\t\ts.active = false\n\t\ts.erase()\n\t\tif s.FinalMSG != \"\" {\n\t\t\tfmt.Fprintf(s.Writer, s.FinalMSG)\n\t\t}\n\t\ts.stopChan <- struct{}{}\n\t}\n}", "title": "" }, { "docid": "a5113c9ab6af05912056d391b886825d", "score": "0.49687073", "text": "func (s *BaseLuceneListener) ExitSep(ctx *SepContext) {}", "title": "" }, { "docid": "2872d339b039c9b39ad368d758efa3c4", "score": "0.49662834", "text": "func (is *Spinner) ErrStop(message string, opts ...interface{}) {\n\tis.FinalMSG = fmt.Sprintln(append([]interface{}{ErrPrefix, message}, opts...)...)\n\tis.Stop()\n}", "title": "" }, { "docid": "2cf0e04b28016663f6af7bdb1de11b1b", "score": "0.49642086", "text": "func Span(markup ...vecty.MarkupOrChild) *vecty.HTML {\n\treturn vecty.Tag(\"span\", markup...)\n}", "title": "" }, { "docid": "098a3bafbe7cfc03625ba3ebd0331412", "score": "0.49625525", "text": "func (b *Buffer) StopGroupUndo() {\n\tb.txt.EndEdit()\n}", "title": "" }, { "docid": "37879dd05ff6b408f5765ec47721edf1", "score": "0.49613705", "text": "func StopTrace() {}", "title": "" }, { "docid": "b4252b67a6a8345ed7abefcd79db3f86", "score": "0.49470952", "text": "func (this *spanner) spanUndent(src *Source) *Syntax {\n return Undent(src).mapUnparsedAmbits(func(a *Ambit)string { return fmt.Sprintf(\"%v\", this.span(a)) })\n}", "title": "" }, { "docid": "0929df9bd873dc689a39d30d4c095727", "score": "0.49435422", "text": "func (r Rect2) Span(i int) int {\n\treturn r.Max[i] - r.Min[i]\n}", "title": "" }, { "docid": "16ded45a56d8d8fe1cd823d49a6edf05", "score": "0.49400786", "text": "func (c *CalendarDataSource) addSpan(span timespan.Span) {\n\tfor _, existingSpan := range c.CalendarSpans {\n\t\tif trimmedSpan, overlap := span.TrimIfOverlaps(existingSpan); overlap {\n\t\t\tif trimmedSpan.IsZero() {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tspan = trimmedSpan\n\t\t}\n\t}\n\tc.CalendarSpans = append(c.CalendarSpans, span)\n}", "title": "" }, { "docid": "dd3b145e325c5cfe48f2598396878ce0", "score": "0.4936449", "text": "func (s *BasevbaListener) ExitMidStmt(ctx *MidStmtContext) {}", "title": "" }, { "docid": "278b1f60c4abedd0b6bc71315c41ba0a", "score": "0.4924795", "text": "func (r Range) Span() (Span, error) {\n\tf := r.FileSet.File(r.Start)\n\tif f == nil {\n\t\treturn Span{}, fmt.Errorf(\"file not found in FileSet\")\n\t}\n\ts := Span{v: span{URI: FileURI(f.Name())}}\n\tvar err error\n\ts.v.Start.Offset, err = offset(f, r.Start)\n\tif err != nil {\n\t\treturn Span{}, err\n\t}\n\tif r.End.IsValid() {\n\t\ts.v.End.Offset, err = offset(f, r.End)\n\t\tif err != nil {\n\t\t\treturn Span{}, err\n\t\t}\n\t}\n\ts.v.Start.clean()\n\ts.v.End.clean()\n\ts.v.clean()\n\tconverter := NewTokenConverter(r.FileSet, f)\n\treturn s.WithPosition(converter)\n}", "title": "" }, { "docid": "6746a7ee450a55de3ec10a93c5329edc", "score": "0.49204642", "text": "func (e *Exporter) ExportSpan(vd *trace.SpanData) {\n\ttraceID := hex.EncodeToString(vd.SpanContext.TraceID[:])\n\tspanID := hex.EncodeToString(vd.SpanContext.SpanID[:])\n\tparentSpanID := hex.EncodeToString(vd.ParentSpanID[:])\n\tl := e.log()\n\tl = l.Str(\"traceId\", traceID).Str(\"spanId\", spanID)\n\tif !reZero.MatchString(parentSpanID) {\n\t\tl = l.Str(\"parentSpanId\", parentSpanID)\n\t}\n\n\tl = l.Str(\"span\", vd.Name).\n\t\tStr(\"statusMessage\", vd.Status.Message).\n\t\tInt32(\"statusCode\", vd.Status.Code).\n\t\tDur(\"elapsed\", vd.EndTime.Sub(vd.StartTime))\n\n\tfor _, item := range vd.Annotations {\n\t\tfor k, v := range item.Attributes {\n\t\t\tl = l.Interface(\n\t\t\t\tfmt.Sprintf(\"annotations.%s.%s\", item.Message, k), v)\n\t\t}\n\t}\n\n\tfor k, v := range vd.Attributes {\n\t\tl = l.Interface(fmt.Sprintf(\"attributes.%s\", k), v)\n\t}\n\tl.Msg(\"trace\")\n}", "title": "" }, { "docid": "11dc55acfe2fdbd723368dd6195eb113", "score": "0.49149656", "text": "func (bls *BinlogStreamer) Stop() {\n\tbls.svm.Stop()\n}", "title": "" }, { "docid": "df736de1755b449d137063dc90c7404f", "score": "0.49100715", "text": "func (l *Listener) Stop(exit bool) {\n\n\t// lock list\n\tdefer l.thingsLock.Unlock()\n\tl.thingsLock.Lock()\n\n\tfor i, t := range l.thingList {\n\t\tlog.Debugf(\"removeSlice[%d] id[%d]\\n\", i, t.ShortD().CidNumber)\n\t\t// pop item from list\n\t\tl.thingList = l.thingList[1:]\n\t\tt.Stop()\n\t}\n\tl.waitGroup.Wait() // waiting for semaphore to hit zero\n\tlog.Debug(\"WaitGroup returned\")\n\n\tif exit {\n\t\t// wait for eventC to be flushed by our io writer\n\t\tfor len(l.eventC) > 0 {\n\t\t\ttime.Sleep(time.Millisecond * 10)\n\t\t}\n\n\t\t// send interrupt to the running listener loop\n\t\tl.waitGroup.Add(1)\n\t\tl.stopC <- things.ZeroStruct\n\t\tl.waitGroup.Wait()\n\t}\n}", "title": "" }, { "docid": "c70772ad6560c2e55f678c1a12d9ee9a", "score": "0.4907856", "text": "func (t *Tracer) Stop() {\n\tt.close()\n}", "title": "" }, { "docid": "940c4fd8b4f03e8562e60611416129c1", "score": "0.48970053", "text": "func (s *BaseJava8ParserListener) ExitBreakStatement(ctx *BreakStatementContext) {}", "title": "" }, { "docid": "83d084cec56c47ddf586fdce7b11634e", "score": "0.48951563", "text": "func (t *Trace) Stop() error {\n\tif t.err != nil {\n\t\treturn t.err\n\t}\n\n\tclose(t.stop)\n\terr := <-t.stopped\n\t// TODO(fg) does the trace format support writing error messages? if yes,\n\t// we should probably attempt to write the error to the file as well.\n\tif finishErr := t.enc.Finish(); finishErr != nil && err == nil {\n\t\terr = finishErr\n\t}\n\n\tif err != nil {\n\t\tt.err = err\n\t} else {\n\t\t// To be returned if Stop() is called more than once.\n\t\tt.err = errors.New(\"tracer is already stopped\")\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "ac624abdd06d418d8423c518a01707db", "score": "0.4885035", "text": "func (s *BaseECMAScriptListener) ExitBreakStatement(ctx *BreakStatementContext) {}", "title": "" }, { "docid": "587ef6aa00ea895741c29afa0668d11c", "score": "0.4877181", "text": "func (a *Annotator) endScope() {\n\ta.symbols.leave()\n}", "title": "" }, { "docid": "86e9e774971abf3dd3d8b0c72135e930", "score": "0.4872515", "text": "func (c *TraceTiming) EndTrace() {\n\tif c.tracer != nil {\n\t\telapsed := time.Now().UTC().UnixNano() - c.start\n\t\tc.tracer.Trace(c.correlationId, c.component, c.operation, elapsed/int64(time.Millisecond))\n\t}\n}", "title": "" }, { "docid": "0205e1e7f413e119f5ae51cdeae361db", "score": "0.4872005", "text": "func (s *Set) Remove(start, end utc.UTC) {\n\tif s.iset == nil {\n\t\tpanic(\"timespan.Set not initialised\")\n\t}\n\tif end < start {\n\t\tpanic(fmt.Errorf(\"start %s before end %s\", start, end))\n\t}\n\ts.iset.Remove(&Span{start, end})\n}", "title": "" }, { "docid": "265e197626d622445179a8f45176acdf", "score": "0.4871091", "text": "func (dm *DurationMetric) Stop() {\n\tdm.duration = time.Since(dm.startTime)\n}", "title": "" }, { "docid": "1ee63ef9351a2080da20fc256f3a11c8", "score": "0.48684147", "text": "func (t *Tracer) Stop() {\n\tclose(t.exit)\n\tt.exitWG.Wait()\n}", "title": "" }, { "docid": "7b71150bd5f5ab4f4c37ca776ae3c068", "score": "0.48668596", "text": "func (r *Runner) Stop() {\n\tr.ticker.Stop()\n}", "title": "" }, { "docid": "baab303be0b46f3e78e3ac44518fd9f5", "score": "0.48627585", "text": "func (rec *Receiver) endOp(\n\treceiverCtx context.Context,\n\tformat string,\n\tnumReceivedItems int,\n\terr error,\n\tdataType config.DataType,\n) {\n\tnumAccepted := numReceivedItems\n\tnumRefused := 0\n\tif err != nil {\n\t\tnumAccepted = 0\n\t\tnumRefused = numReceivedItems\n\t}\n\n\tspan := trace.SpanFromContext(receiverCtx)\n\n\tif obsreportconfig.Level != configtelemetry.LevelNone {\n\t\tvar acceptedMeasure, refusedMeasure *stats.Int64Measure\n\t\tswitch dataType {\n\t\tcase config.TracesDataType:\n\t\t\tacceptedMeasure = obsmetrics.ReceiverAcceptedSpans\n\t\t\trefusedMeasure = obsmetrics.ReceiverRefusedSpans\n\t\tcase config.MetricsDataType:\n\t\t\tacceptedMeasure = obsmetrics.ReceiverAcceptedMetricPoints\n\t\t\trefusedMeasure = obsmetrics.ReceiverRefusedMetricPoints\n\t\tcase config.LogsDataType:\n\t\t\tacceptedMeasure = obsmetrics.ReceiverAcceptedLogRecords\n\t\t\trefusedMeasure = obsmetrics.ReceiverRefusedLogRecords\n\t\t}\n\n\t\tstats.Record(\n\t\t\treceiverCtx,\n\t\t\tacceptedMeasure.M(int64(numAccepted)),\n\t\t\trefusedMeasure.M(int64(numRefused)))\n\t}\n\n\t// end span according to errors\n\tif span.IsRecording() {\n\t\tvar acceptedItemsKey, refusedItemsKey string\n\t\tswitch dataType {\n\t\tcase config.TracesDataType:\n\t\t\tacceptedItemsKey = obsmetrics.AcceptedSpansKey\n\t\t\trefusedItemsKey = obsmetrics.RefusedSpansKey\n\t\tcase config.MetricsDataType:\n\t\t\tacceptedItemsKey = obsmetrics.AcceptedMetricPointsKey\n\t\t\trefusedItemsKey = obsmetrics.RefusedMetricPointsKey\n\t\tcase config.LogsDataType:\n\t\t\tacceptedItemsKey = obsmetrics.AcceptedLogRecordsKey\n\t\t\trefusedItemsKey = obsmetrics.RefusedLogRecordsKey\n\t\t}\n\n\t\tspan.SetAttributes(\n\t\t\tattribute.String(obsmetrics.FormatKey, format),\n\t\t\tattribute.Int64(acceptedItemsKey, int64(numAccepted)),\n\t\t\tattribute.Int64(refusedItemsKey, int64(numRefused)),\n\t\t)\n\t\trecordError(span, err)\n\t}\n\tspan.End()\n}", "title": "" }, { "docid": "1aa2b2b14b65f987bbf8eab91e21485b", "score": "0.48607418", "text": "func (s *Spooler) Stop() {\n}", "title": "" }, { "docid": "7e9c7cc58d104123165ddc52f305b82d", "score": "0.4859906", "text": "func (t *jsTracer) Stop(err error) {\n\tt.vm.Interrupt(err)\n}", "title": "" }, { "docid": "7e93af484905a4ed29692f93e037ed51", "score": "0.48596188", "text": "func EndSegment(ctx context.Context, resp *http.Response) {\n\tif segment, ok := ctx.Value(segmentContextKey).(endable); ok {\n\t\tif resp != nil {\n\t\t\tif extSegment, ok := segment.(*newrelic.ExternalSegment); ok {\n\t\t\t\textSegment.Response = resp\n\t\t\t}\n\t\t\tif requestID := GetRequestID(resp.Header); requestID != \"\" {\n\t\t\t\ttxn := newrelic.FromContext(ctx)\n\t\t\t\tintegrationsupport.AddAgentSpanAttribute(txn, newrelic.SpanAttributeAWSRequestID, requestID)\n\t\t\t}\n\t\t}\n\t\tsegment.End()\n\t}\n}", "title": "" }, { "docid": "a07dac0e587e95bffee17819a82f61d9", "score": "0.48568386", "text": "func (i *Index) Stop(ctx context.Context) {\n\tclose(i.stop)\n}", "title": "" }, { "docid": "5caa98b93ea616de8bcc6c269eb4b5be", "score": "0.48453197", "text": "func (s *SampleBuilder) Stop() {\n\tif s.stop {\n\t\treturn\n\t}\n\ts.stop = true\n}", "title": "" }, { "docid": "726a99895de01c60ae23e501888f72f4", "score": "0.48407048", "text": "func (ts TimeSpan) End() time.Time {\n\tif ts.duration < 0 {\n\t\treturn ts.mark\n\t}\n\treturn ts.mark.Add(ts.duration)\n}", "title": "" }, { "docid": "f472c54903070bb1e6ba8230343f6edb", "score": "0.4839011", "text": "func (ssp *simpleSpanProcessor) Shutdown(ctx context.Context) error {\n\tvar err error\n\tssp.stopOnce.Do(func() {\n\t\tstopFunc := func(exp SpanExporter) (<-chan error, func()) {\n\t\t\tdone := make(chan error)\n\t\t\treturn done, func() { done <- exp.Shutdown(ctx) }\n\t\t}\n\n\t\t// The exporter field of the simpleSpanProcessor needs to be zeroed to\n\t\t// signal it is shut down, meaning all subsequent calls to OnEnd will\n\t\t// be gracefully ignored. This needs to be done synchronously to avoid\n\t\t// any race condition.\n\t\t//\n\t\t// A closure is used to keep reference to the exporter and then the\n\t\t// field is zeroed. This ensures the simpleSpanProcessor is shut down\n\t\t// before the exporter. This order is important as it avoids a\n\t\t// potential deadlock. If the exporter shut down operation generates a\n\t\t// span, that span would need to be exported. Meaning, OnEnd would be\n\t\t// called and try acquiring the lock that is held here.\n\t\tssp.exporterMu.Lock()\n\t\tdone, shutdown := stopFunc(ssp.exporter)\n\t\tssp.exporter = nil\n\t\tssp.exporterMu.Unlock()\n\n\t\tgo shutdown()\n\n\t\t// Wait for the exporter to shut down or the deadline to expire.\n\t\tselect {\n\t\tcase err = <-done:\n\t\tcase <-ctx.Done():\n\t\t\t// It is possible for the exporter to have immediately shut down\n\t\t\t// and the context to be done simultaneously. In that case this\n\t\t\t// outer select statement will randomly choose a case. This will\n\t\t\t// result in a different returned error for similar scenarios.\n\t\t\t// Instead, double check if the exporter shut down at the same\n\t\t\t// time and return that error if so. This will ensure consistency\n\t\t\t// as well as ensure the caller knows the exporter shut down\n\t\t\t// successfully (they can already determine if the deadline is\n\t\t\t// expired given they passed the context).\n\t\t\tselect {\n\t\t\tcase err = <-done:\n\t\t\tdefault:\n\t\t\t\terr = ctx.Err()\n\t\t\t}\n\t\t}\n\t})\n\treturn err\n}", "title": "" }, { "docid": "816e906932fe57bce748f81a1b005f69", "score": "0.483487", "text": "func (s *IncrementStat) Stop() {\n\ts.m.Lock()\n\tdefer s.m.Unlock()\n\ts.isStarted = true\n}", "title": "" }, { "docid": "8f90a65de1f2743e027d3107bf4e7f9b", "score": "0.483268", "text": "func (w *Writer) Stop() {\n\tw.done <- true\n\t<-w.stopDone\n}", "title": "" }, { "docid": "f57b4966cab96d8b6bd3a54b24fcd7dc", "score": "0.48198798", "text": "func (a *PulseAnimation) Stop() {\n\ta.running = false\n}", "title": "" }, { "docid": "b78d02c0c6dd02a9093d9c899cbfa3f7", "score": "0.48174503", "text": "func (mr MappedRange) Span() span.Span {\n\tspn, err := mr.Mapper.OffsetSpan(mr.start, mr.end)\n\tif err != nil {\n\t\tpanic(err) // can't happen\n\t}\n\treturn spn\n}", "title": "" }, { "docid": "1b9e2474b1e91ff092555dfc3973880c", "score": "0.4817121", "text": "func (rb *NlpRobertaTokenizationConfigBuilder) Span(span int) *NlpRobertaTokenizationConfigBuilder {\n\trb.v.Span = &span\n\treturn rb\n}", "title": "" }, { "docid": "5d606f9c3dc404ff8256363f3e4e7616", "score": "0.48161545", "text": "func NewSpan() *Span {\n\treturn &Span{r1: RwMissing, r2: RwMissing, c1: RwMissing, c2: RwMissing, rby: RwMissing, cby: RwMissing}\n}", "title": "" }, { "docid": "cd7fee9f8824801607ec27b4ab25fbf2", "score": "0.48125967", "text": "func (p *Patcher) PatchSpan(s *cpb.Span) (span *cpb.Span, exists bool) {\n\tspanStart, spanEnd := ByteOffsets(s)\n\tif spanStart > spanEnd {\n\t\treturn nil, false\n\t} else if p == nil || s == nil {\n\t\treturn s, true\n\t}\n\n\t// Find the diff span that contains the starting offset.\n\tidx := sort.Search(len(p.spans), func(i int) bool {\n\t\treturn spanStart < p.spans[i].oldPrefix.Offset\n\t}) - 1\n\tif idx < 0 {\n\t\treturn nil, false\n\t}\n\n\td := p.spans[idx]\n\tif d.Type != eq || spanEnd > d.oldPrefix.Offset+d.Length {\n\t\treturn nil, false\n\t}\n\n\tlineDiff := d.newPrefix.Lines - d.oldPrefix.Lines\n\tcolDiff := d.newPrefix.ColumnOffset - d.oldPrefix.ColumnOffset\n\tif d.FirstNewline != -1 && spanStart-d.oldPrefix.Offset >= d.FirstNewline {\n\t\t// The given span is past the first newline so it has no column diff.\n\t\tcolDiff = 0\n\t}\n\treturn &cpb.Span{\n\t\tStart: &cpb.Point{\n\t\t\tByteOffset: d.newPrefix.Offset + (spanStart - d.oldPrefix.Offset),\n\t\t\tColumnOffset: s.GetStart().GetColumnOffset() + colDiff,\n\t\t\tLineNumber: s.GetStart().GetLineNumber() + lineDiff,\n\t\t},\n\t\tEnd: &cpb.Point{\n\t\t\tByteOffset: d.newPrefix.Offset + (spanEnd - d.oldPrefix.Offset),\n\t\t\tColumnOffset: s.GetEnd().GetColumnOffset() + colDiff,\n\t\t\tLineNumber: s.GetEnd().GetLineNumber() + lineDiff,\n\t\t},\n\t}, true\n}", "title": "" }, { "docid": "95b28900985ffe493aa7adc3acd1b4b1", "score": "0.4811107", "text": "func (s *BasevbaListener) ExitSeekStmt(ctx *SeekStmtContext) {}", "title": "" } ]
cc97d4c1a400668137cfc9a6f10e61af
EnterEveryRule is called when any rule is entered.
[ { "docid": "2bca59887ccdd4244e8121f16245e8f3", "score": "0.82351536", "text": "func (s *BasePlSqlParserListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" } ]
[ { "docid": "288d8c02a124f02f2fd109e62062c8c1", "score": "0.8551762", "text": "func (s *BasempsListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "73eae9cd9badeaeeaa89c91097bc0dc5", "score": "0.8547191", "text": "func (s *BasestellarisListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "68c6dd0b362a3e6c32aab3f06af70858", "score": "0.8540618", "text": "func (s *BaseAnzerListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "8b2d1481ab9d303978d828a59a22f5b7", "score": "0.8537819", "text": "func (s *BaseargusListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "9b4e3b6798cfeb8ed967f9c97eb632ac", "score": "0.8532177", "text": "func (s *BaseRListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "dbfe555ca1be88d3eddddf15bd6870d8", "score": "0.8509084", "text": "func (s *BasesnowballListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "aa229939faee9a57b7f5c01af40c4008", "score": "0.8503693", "text": "func (s *BaseKlListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "178f8cee6e5a63f6e4d56e431a0ed705", "score": "0.848965", "text": "func (s *BaseangelscriptListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "8f0051a773ef9a4bfd5afd167c58acb8", "score": "0.8484159", "text": "func (s *BasenewickListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "447072929b12a84836af216f46c81c42", "score": "0.8462117", "text": "func (s *BaseScimFilterListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "a9669d4c8cc6fc1a89e3efda538208ca", "score": "0.8459959", "text": "func (s *BaseSimplListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "6714896fc3f10bcc61fb348a7f65536e", "score": "0.84528166", "text": "func (s *BaseCOOLListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "35e4164d52ed771f8a2da9533a2632bc", "score": "0.84391785", "text": "func (s *Basegrulev3Listener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "d59e5c14398cdf4976dedf81474f50a9", "score": "0.84342486", "text": "func (s *BaseplucidListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "f960291ad8a8db72ba63953735cac35b", "score": "0.8431676", "text": "func (s *BaseCDLangListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "40f290151cd9feacda267bac1a585c26", "score": "0.8423297", "text": "func (s *BaseBundListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "cc5d737c997da58bc002a4b9293ea0ca", "score": "0.84178334", "text": "func (s *BaseemarkListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "f1d2acd599ba89f77cc536a6b1a0f1d8", "score": "0.84078014", "text": "func (s *BaseSolidityListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "129306c747eef06c00478acae9432d38", "score": "0.8391431", "text": "func (s *BaseNuggetListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "f6c34e05d17d0dfdcdccedf06dbae1c8", "score": "0.8388324", "text": "func (s *BasebcplListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "d84c53d91d7255f79dbc2b0f6bb24478", "score": "0.8370828", "text": "func (s *BaseSwaggableParserListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "d2414f1e93df29d35bb0a714e6afbe3c", "score": "0.8344346", "text": "func (s *BaseSymbolanListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "2829ee5482eead72531a0ee38a9d93be", "score": "0.83402115", "text": "func (s *BaseClickHouseParserListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "93d6212550a13d1b1143c3fe698192d5", "score": "0.83391494", "text": "func (s *BaseCoAsmListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "b5be75bc5be59d32ae73d9872ca07aed", "score": "0.8290334", "text": "func (s *BaseCommandsListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "64a3d9fb915f68cd224cb7651d4eaa24", "score": "0.8273563", "text": "func (s *BaseFusionTablesSqlListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "f744096a569e7fc7ed5920e32ffc1bec", "score": "0.82683337", "text": "func (s *BaseSOQLListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "9656a5ab28596d023a6a9ec944679390", "score": "0.8257082", "text": "func (s *BaseExprListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "e88ca2de641c49d8810393ceab1f1f3b", "score": "0.82548064", "text": "func (s *BasePathListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "885e044af304cd4c4c2e38fd6e6fde66", "score": "0.8244473", "text": "func (s *BasecminusListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "222438b86a5755795f87f8e120f4f321", "score": "0.824104", "text": "func (s *BaseScssParserListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "3682fdd1cba79a7069508d36d20be2cf", "score": "0.82288545", "text": "func (s *BaseCMakeListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "600fcbdc0d4948593364f2de930cbbd6", "score": "0.82216555", "text": "func (s *BasesexpressionListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "600fcbdc0d4948593364f2de930cbbd6", "score": "0.82216555", "text": "func (s *BasesexpressionListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "61bc1ff05b380d49c77e7596259b7473", "score": "0.82161725", "text": "func (s *BaseGraphQLListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "204c84f573065b4d528838d54540408d", "score": "0.8186517", "text": "func (s *BaseChatListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "b49ec1acb6b6df82b30f14e44f42c4dc", "score": "0.8168346", "text": "func (s *BasePromQLParserListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "e3436931d6620ed155b4ce053670c2c9", "score": "0.81661105", "text": "func (s *BasearithmeticListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "a42a2d72a6ef1af5262f5b1daf8731e5", "score": "0.81108123", "text": "func (s *BasepostalcodeListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "7cca6fcf6189dd16088f75baf2badc65", "score": "0.8095686", "text": "func (s *BaseIDLListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "0c5a10b9d0422eec06b5ad81f46445d5", "score": "0.80834925", "text": "func (s *BaseASNListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "1e6459f7b8fbd94b3c042feeba029ccd", "score": "0.7990649", "text": "func (s *BaseSQLiteListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "49c7b97a435db0b271420f920dabb755", "score": "0.79711324", "text": "func (s *Baseiso8601Listener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "5a05e8cba24bcd05c0300c33b9434185", "score": "0.79400474", "text": "func (s *BasepluralListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "d7c615bc588d018c41842a240c31cb53", "score": "0.78362745", "text": "func (s *BaseArrayIntListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "7b94f2c3f5c70585c54b92943bfabc92", "score": "0.77574325", "text": "func (s *BasezipListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "499331731d42e9c033cf66aa839b9316", "score": "0.7529154", "text": "func (s *BasePostgreSQLParserListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "44ac52dd1237295aca86ce273c30010b", "score": "0.6767613", "text": "func (s *BaseAnzerListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "a0d15cc6bbd8271b2c37341305964baa", "score": "0.67314243", "text": "func (s *BasestellarisListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "b2518d1a6c64b5a2b715502a8961da4c", "score": "0.66962636", "text": "func (s *BaseDeleteStatementParserListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "dc55f2519c0d3ac4d8f73bd6f4f9ce47", "score": "0.66891474", "text": "func (s *BaseangelscriptListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "3e5436d6df99add680fa7266ad217520", "score": "0.667818", "text": "func (s *BaseemarkListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "46a4d665108e5451ec43d484f280ed0f", "score": "0.66476566", "text": "func (s *BaseSolidityListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "917027743852407d49faac4df4ef0ece", "score": "0.6644352", "text": "func (s *BasearithmeticListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "d8f3e84dff648018a87505883d5dd9b5", "score": "0.6643459", "text": "func (s *BasempsListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "763d22eec48744feb36ac7c2b81b5e65", "score": "0.66421294", "text": "func (s *BaseKlListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "3e9617116b1a2713ea20010434f1139e", "score": "0.6637368", "text": "func (s *BasesnowballListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "72b176b61ed9c35354c5373808ee71bd", "score": "0.65994674", "text": "func (s *BaseCOOLListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "33b0ffb24c82866aaaf97da156e81a91", "score": "0.6594276", "text": "func (s *BaseargusListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "dec82a3baabe1490cc7c3bd676ca13f7", "score": "0.6574203", "text": "func (s *BaseSOQLListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "13bef1ddac60b4046711bf281290d96e", "score": "0.65695137", "text": "func (s *BaseplucidListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "d77e48d96a6bf536fbf5d3b655b87587", "score": "0.65656936", "text": "func (s *BasebcplListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "6f517532b77f13e191e2b3bd255d0e68", "score": "0.6562434", "text": "func (s *Baseiso8601Listener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "ce38b8ef8945dbd058b5df1125beae8f", "score": "0.6558662", "text": "func (s *BaseScimFilterListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "baa02688ed6a40a84efa549995d8aa85", "score": "0.6542483", "text": "func (s *BaseNuggetListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "18867b7062ba41fdaffd1a6253ffe11f", "score": "0.6525932", "text": "func (s *BaseRListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "81dba6559f6e486427a5bca244ec1810", "score": "0.6524239", "text": "func (s *BaseBundListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "162885c0cfdd9632b4220e8167273474", "score": "0.6518595", "text": "func (s *BaseClickHouseParserListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "7864c425d79f4ba3760087f8879877da", "score": "0.6513698", "text": "func (s *BasePlSqlParserListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "733116e7c725d0a794d5809674ed85b1", "score": "0.6503414", "text": "func (s *BasepostalcodeListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "60a0d338a69b647b33998a380ba5ee5c", "score": "0.6501733", "text": "func (s *BaseSimplListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "a4aa9170e63b417e86c39b0fe5ee569f", "score": "0.6498632", "text": "func (s *Basegrulev3Listener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "97ad715cca4a4294578008593e8607fb", "score": "0.64978886", "text": "func (s *BaseCDLangListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "93a113935bf344efaeeb91d722949582", "score": "0.6493861", "text": "func (s *BaseFusionTablesSqlListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "81693fe69ee3aed09ac5d56a1e54d10f", "score": "0.6486766", "text": "func (s *BaseSwaggableParserListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "6ce265450cb711abc71f28c65db816c9", "score": "0.6474315", "text": "func (s *BasenewickListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "c2058cdaa49afd8794377b41c7a5f4d3", "score": "0.64710844", "text": "func (s *BaseExprListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "5359c0134c8f63c7eb83c0f4be451968", "score": "0.6468579", "text": "func (s *BaseScssParserListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "ce5ebcb73fb753d1d1554d392c94c0dc", "score": "0.6467478", "text": "func (s *BaseSymbolanListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "7b91f08eecf476e5919d6a24926e7a92", "score": "0.6465037", "text": "func (s *BaseASNListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "9483e20c553e4d6c7b1ca3cfeed6ca8c", "score": "0.6418354", "text": "func (s *BasecminusListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "13db2943f9f5b262bb7add2f7284ae47", "score": "0.6417967", "text": "func (s *BaseGraphQLListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "17eed6ced8bfb0ec25e40d5c23dc2266", "score": "0.64163524", "text": "func (s *BasesexpressionListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "17eed6ced8bfb0ec25e40d5c23dc2266", "score": "0.64163524", "text": "func (s *BasesexpressionListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "8d12a45f341f6004b54b9be8075f27f0", "score": "0.64120466", "text": "func (s *BaseIDLListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "e12b9717e314f074694fc55479e6709d", "score": "0.64093375", "text": "func (s *BaseCoAsmListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "d59a3862b9ebe894cb811b53a2865666", "score": "0.6401026", "text": "func (s *BasePromQLParserListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "8797b2015e8644adac21094101409d0c", "score": "0.6387418", "text": "func (s *BaseChatListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "0c8a92ae897392e403c312e14ae8765e", "score": "0.6366178", "text": "func (s *BasepluralListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "7be89e1c92e6fb91fb7f7aaeac865e24", "score": "0.6358457", "text": "func (s *BasePathListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "e41f93012719f9d9e87ec7367eb8fc50", "score": "0.6344135", "text": "func (s *BaseSQLiteListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "45a073bbc39e5c384ad9f54ada66c7fc", "score": "0.63248277", "text": "func (s *BaseCommandsListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "e4249c4fd7e5261b354de18ea1093ee2", "score": "0.63187313", "text": "func (p *BaseParser) TriggerEnterRuleEvent() {\n\tif p.parseListeners != nil {\n\t\tctx := p.ctx\n\t\tfor _, listener := range p.parseListeners {\n\t\t\tlistener.EnterEveryRule(ctx)\n\t\t\tctx.EnterRule(listener)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "4cc492f76e03c1410c7aa071de88c99b", "score": "0.6220636", "text": "func (s *BaseArrayIntListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "14b5c5dc027ba4b837777a3290b2858a", "score": "0.61962575", "text": "func (s *BaseCMakeListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "81f580072f61f6f32430017b2fc603da", "score": "0.6107775", "text": "func (s *BasezipListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" }, { "docid": "f9269274f4b61683fad2d1c25e8347ac", "score": "0.60564786", "text": "func (s *BaseScssParserListener) EnterRuleset(ctx *RulesetContext) {}", "title": "" }, { "docid": "e3857c686377b8bc2e13f0f88979ca23", "score": "0.6039831", "text": "func (s *BaseSymbolanListener) EnterRuleAtom(ctx *RuleAtomContext) {}", "title": "" }, { "docid": "1f080751b65d7c2ccec7986c0a55c156", "score": "0.6011597", "text": "func (s *BaseSymbolanListener) EnterRule_function(ctx *Rule_functionContext) {}", "title": "" }, { "docid": "a3aa66f867640d866c53d6c4d6ef422d", "score": "0.6010065", "text": "func (s *BasePostgreSQLParserListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}", "title": "" } ]
89a7a19d6297f8659ca255279b77d289
PropagateConnection information from the supplied managed resource to the supplied resource claim.
[ { "docid": "d86241a8e331f999a9910e728c716741", "score": "0.45243007", "text": "func (fn ManagedConnectionPropagatorFn) PropagateConnection(ctx context.Context, o LocalConnectionSecretOwner, mg Managed) error {\n\treturn fn(ctx, o, mg)\n}", "title": "" } ]
[ { "docid": "be854059224c4303a1ec396463b959a7", "score": "0.5565191", "text": "func (r *Reconciler) Reconcile(req reconcile.Request) (reconcile.Result, error) { // nolint:gocyclo\n\t// NOTE(negz): This method is well over our cyclomatic complexity goal.\n\t// Be wary of adding additional complexity.\n\n\tlog := r.log.WithValues(\"request\", req)\n\tlog.Debug(\"Reconciling\")\n\n\tctx, cancel := context.WithTimeout(context.Background(), claimReconcileTimeout)\n\tdefer cancel()\n\n\tclaim := r.newClaim()\n\tif err := r.client.Get(ctx, req.NamespacedName, claim); err != nil {\n\t\t// There's no need to requeue if we no longer exist. Otherwise we'll be\n\t\t// requeued implicitly because we return an error.\n\t\tlog.Debug(\"Cannot get resource claim\", \"error\", err)\n\t\treturn reconcile.Result{}, errors.Wrap(resource.IgnoreNotFound(err), errGetClaim)\n\t}\n\n\trecord := r.record.WithAnnotations(\"external-name\", meta.GetExternalName(claim))\n\tlog = log.WithValues(\n\t\t\"uid\", claim.GetUID(),\n\t\t\"version\", claim.GetResourceVersion(),\n\t\t\"external-name\", meta.GetExternalName(claim),\n\t)\n\n\tmanaged := r.newManaged()\n\tif ref := claim.GetResourceReference(); ref != nil {\n\t\trecord = record.WithAnnotations(\"managed-name\", claim.GetResourceReference().Name)\n\t\tlog = log.WithValues(\"managed-name\", claim.GetResourceReference().Name)\n\n\t\terr := r.client.Get(ctx, meta.NamespacedNameOf(ref), managed)\n\t\tif kerrors.IsNotFound(err) {\n\t\t\t// If the managed resource we explicitly reference doesn't exist yet\n\t\t\t// we want to retry after a brief wait, in case it is created. We\n\t\t\t// must explicitly requeue because our EnqueueRequestForClaim\n\t\t\t// handler can only enqueue reconciles for managed resources that\n\t\t\t// have their claim reference set, so we can't expect to be queued\n\t\t\t// implicitly when the managed resource we want to bind to appears.\n\t\t\tlog.Debug(\"Referenced managed resource not found\", \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\trecord.Event(claim, event.Normal(reasonResourceNotFound, \"Referenced managed resource not found\"))\n\t\t\tclaim.SetConditions(Binding(), v1alpha1.ReconcileSuccess())\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\t\tif err != nil {\n\t\t\t// If we didn't hit this error last time we'll be requeued\n\t\t\t// implicitly due to the status update. Otherwise we want to retry\n\t\t\t// after a brief wait, in case this was a transient error.\n\t\t\tlog.Debug(\"Cannot get referenced managed resource\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\trecord.Event(claim, event.Warning(reasonCannotGetResource, err))\n\t\t\tclaim.SetConditions(v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\t}\n\n\tif meta.WasDeleted(claim) {\n\t\tlog = log.WithValues(\"deletion-timestamp\", claim.GetDeletionTimestamp())\n\n\t\tif err := r.claim.Unbind(ctx, claim, managed); err != nil {\n\t\t\t// If we didn't hit this error last time we'll be requeued\n\t\t\t// implicitly due to the status update. Otherwise we want to retry\n\t\t\t// after a brief wait, in case this was a transient error.\n\t\t\tlog.Debug(\"Cannot unbind claim\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\trecord.Event(claim, event.Warning(reasonCannotUnbind, err))\n\t\t\tclaim.SetConditions(v1alpha1.Deleting(), v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\n\t\tlog.Debug(\"Successfully unbound managed resource\")\n\t\trecord.Event(claim, event.Normal(reasonUnbound, \"Successfully unbound managed resource\"))\n\n\t\tif err := r.claim.RemoveFinalizer(ctx, claim); err != nil {\n\t\t\t// If we didn't hit this error last time we'll be requeued\n\t\t\t// implicitly due to the status update. Otherwise we want to retry\n\t\t\t// after a brief wait, in case this was a transient error.\n\t\t\tlog.Debug(\"Cannot remove finalizer\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\tclaim.SetConditions(v1alpha1.Deleting(), v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\n\t\t// We've successfully deleted our claim and removed our finalizer. If we\n\t\t// assume we were the only controller that added a finalizer to this\n\t\t// claim then it should no longer exist and thus there is no point\n\t\t// trying to update its status.\n\t\tlog.Debug(\"Successfully deleted resource claim\")\n\t\treturn reconcile.Result{Requeue: false}, nil\n\t}\n\n\tif err := r.claim.AddFinalizer(ctx, claim); err != nil {\n\t\t// If we didn't hit this error last time we'll be requeued\n\t\t// implicitly due to the status update. Otherwise we want to retry\n\t\t// after a brief wait, in case this was a transient error.\n\t\tlog.Debug(\"Cannot add resource claim finalizer\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\tclaim.SetConditions(v1alpha1.Creating(), v1alpha1.ReconcileError(err))\n\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t}\n\n\t// Claim reconcilers (should) watch for either claims with a resource ref,\n\t// claims with a class ref, or managed resources with a claim ref. In the\n\t// first case the managed resource always exists by the time we get here. In\n\t// the second case the class reference is set. The third case exposes us to\n\t// a pathological scenario in which a managed resource references a claim\n\t// that has no resource ref or class ref, so we can't assume the class ref\n\t// is always set at this point.\n\tif !meta.WasCreated(managed) && claim.GetClassReference() != nil {\n\t\trecord = record.WithAnnotations(\"class-name\", claim.GetClassReference().Name)\n\t\tlog = log.WithValues(\"class-name\", claim.GetClassReference().Name)\n\n\t\tclass := r.newClass()\n\t\t// Class reference should always be set by the time we get this far; we\n\t\t// set it on last reconciliation.\n\t\tif err := r.client.Get(ctx, meta.NamespacedNameOf(claim.GetClassReference()), class); err != nil {\n\t\t\t// If we didn't hit this error last time we'll be requeued\n\t\t\t// implicitly due to the status update. Otherwise we want to retry\n\t\t\t// after a brief wait, in case this was a transient error or the\n\t\t\t// class is (re)created.\n\t\t\tlog.Debug(\"Cannot get referenced resource class\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\trecord.Event(claim, event.Warning(reasonCannotGetClass, err))\n\t\t\tclaim.SetConditions(v1alpha1.Creating(), v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\n\t\tif err := r.managed.Configure(ctx, claim, class, managed); err != nil {\n\t\t\t// If we didn't hit this error last time we'll be requeued\n\t\t\t// implicitly due to the status update. Otherwise we want to retry\n\t\t\t// after a brief wait, in case this was a transient error or some\n\t\t\t// issue with the resource class was resolved.\n\t\t\tlog.Debug(\"Cannot configure managed resource\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\trecord.Event(claim, event.Warning(reasonCannotConfigureResource, err))\n\t\t\tclaim.SetConditions(v1alpha1.Creating(), v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\n\t\t// We'll know our managed resource's name at this point because it was\n\t\t// set by the above configure step.\n\t\trecord = record.WithAnnotations(\"managed-name\", managed.GetName())\n\t\tlog = log.WithValues(\"managed-name\", managed.GetName())\n\n\t\tif err := r.managed.Create(ctx, claim, class, managed); err != nil {\n\t\t\t// If we didn't hit this error last time we'll be requeued\n\t\t\t// implicitly due to the status update. Otherwise we want to retry\n\t\t\t// after a brief wait, in case this was a transient error.\n\t\t\tlog.Debug(\"Cannot create managed resource\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\trecord.Event(claim, event.Warning(reasonCannotCreateResource, err))\n\t\t\tclaim.SetConditions(v1alpha1.Creating(), v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\n\t\tlog.Debug(\"Successfully created managed resource\")\n\t\trecord.Event(claim, event.Normal(reasonCreatedResource, \"Successfully created managed resource\"))\n\t}\n\n\tif !resource.IsBindable(managed) && !resource.IsBound(managed) {\n\t\tlog.Debug(\"Managed resource is not yet bindable\")\n\t\trecord.Event(claim, event.Normal(reasonWaitingToBind, \"Managed resource is not yet bindable\"))\n\n\t\tif managed.GetClaimReference() == nil {\n\t\t\t// We're waiting to bind to a statically provisioned managed\n\t\t\t// resource. We must requeue because our EnqueueRequestForClaim\n\t\t\t// handler can only enqueue reconciles for managed resource updates\n\t\t\t// when they have their claim reference set, and that doesn't happen\n\t\t\t// until we bind to the managed resource we're waiting for.\n\t\t\tclaim.SetConditions(Binding(), v1alpha1.ReconcileSuccess())\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\n\t\t// If this claim was not already binding we'll be requeued due to the\n\t\t// status update. Otherwise there's no need to requeue. We should be\n\t\t// watching both the resource claims and the resources we own, so we'll\n\t\t// be queued if anything changes.\n\t\tclaim.SetConditions(Binding(), v1alpha1.ReconcileSuccess())\n\t\treturn reconcile.Result{Requeue: false}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t}\n\n\tif resource.IsBindable(managed) {\n\t\tif err := r.managed.PropagateConnection(ctx, claim, managed); err != nil {\n\t\t\t// If we didn't hit this error last time we'll be requeued implicitly\n\t\t\t// due to the status update. Otherwise we want to retry after a brief\n\t\t\t// wait in case this was a transient error, or the resource connection\n\t\t\t// secret is created.\n\t\t\tlog.Debug(\"Cannot propagate connection details from managed resource to claim\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\trecord.Event(claim, event.Warning(reasonCannotPropagate, err))\n\t\t\tclaim.SetConditions(Binding(), v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\n\t\tif err := r.claim.Bind(ctx, claim, managed); err != nil {\n\t\t\t// If we didn't hit this error last time we'll be requeued implicitly\n\t\t\t// due to the status update. Otherwise we want to retry after a brief\n\t\t\t// wait, in case this was a transient error.\n\t\t\tlog.Debug(\"Cannot bind to managed resource\", \"error\", err, \"requeue-after\", time.Now().Add(aShortWait))\n\t\t\trecord.Event(claim, event.Warning(reasonCannotBind, err))\n\t\t\tclaim.SetConditions(Binding(), v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: aShortWait}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n\t\t}\n\n\t\tlog.Debug(\"Successfully bound managed resource\")\n\t\trecord.Event(claim, event.Normal(reasonBound, \"Successfully bound managed resource\"))\n\t}\n\n\t// No need to requeue. We should be watching both the resource claims and\n\t// the resources we own, so we'll be queued if anything changes.\n\tclaim.SetConditions(v1alpha1.Available(), v1alpha1.ReconcileSuccess())\n\treturn reconcile.Result{Requeue: false}, errors.Wrap(r.client.Status().Update(ctx, claim), errUpdateClaimStatus)\n}", "title": "" }, { "docid": "ca7228b9c899573fe65272257f5f3ef4", "score": "0.52650195", "text": "func (c *NitroClient) ActOnResource(resourceType string, resourceStruct interface{}, action string) error {\n\n\tnsResource := make(map[string]interface{})\n\tnsResource[resourceType] = resourceStruct\n\n\tresourceJSON, err := JSONMarshal(nsResource)\n\n\tc.logger.Trace(\"Resourcejson is \", \"resourceJSON\", string(resourceJSON))\n\n\t_, err = c.actOnResource(resourceType, resourceJSON, action)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] nitro-go: Failed to apply action on resource of type %s, action=%s err=%s\", resourceType, action, err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "a5a993b3839e13ae7d1e7276f7a956cf", "score": "0.52208084", "text": "func adaptToCentralResource(reportResource *metav1.ObjectMeta, storedResource *metav1.ObjectMeta) {\n\t// The resource updated to etcd should have the same ResourceVersion of the one stored in etcd.\n\treportResource.ResourceVersion = storedResource.ResourceVersion\n\n\t// The resource from edge cluster should have the same uid of the one stored in etcd.\n\treportResource.UID = storedResource.UID\n}", "title": "" }, { "docid": "e0c42662f86f30854a498b660509d056", "score": "0.49818268", "text": "func (r *SimpleAppReconciler) ReconcileResource(app webappv1.SimpleApp, obj client.Object, state reconciler.DesiredState) (*reconcile.Result, error) {\n\t// @TODO this (along with the app) should probably live in some sort of parent reconciler struct\n\tresourceReconciler := reconciler.NewReconcilerWith(r.Client, reconciler.WithLog(r.Log))\n\n\terr := ctrl.SetControllerReference(&app, obj, r.Scheme)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn resourceReconciler.ReconcileResource(obj, state)\n}", "title": "" }, { "docid": "e4a68026c4960d2b34d1663acf3bec30", "score": "0.49507403", "text": "func (_ERC20AtomicSwapper *ERC20AtomicSwapperTransactor) Claim(opts *TransactOpts, _swapID [32]byte, _randomNumber [32]byte) (*types.Transaction, error) {\n\treturn _ERC20AtomicSwapper.contract.Transact(opts, \"claim\", _swapID, _randomNumber)\n}", "title": "" }, { "docid": "561de3ccbb0bcc7ace89694ec9fcf87f", "score": "0.49366865", "text": "func (_Incmode *IncmodeTransactor) Claim(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _Incmode.contract.Transact(opts, \"claim\")\n}", "title": "" }, { "docid": "67d7d9f3341e0ab84e6feacdff4762e2", "score": "0.49316704", "text": "func (syncer *Syncer) InformResource(resource string, version string) {\n\tsyncer.setVersion(resource, version)\n\n\tif syncer.isDone() {\n\t\treturn\n\t}\n\tsyncer.syncResource(resource, version)\n}", "title": "" }, { "docid": "da0bcfef4854faab2cb96f18d2e734c4", "score": "0.4901946", "text": "func (c *NitroClient) ApplyResource(resourceType string, resourceStruct interface{}) error {\n\n\tnsResource := make(map[string]interface{})\n\tnsResource[resourceType] = resourceStruct\n\n\tresourceJSON, err := JSONMarshal(nsResource)\n\n\tc.logger.Trace(\"ApplyResource \", \"resourceJSON\", string(resourceJSON))\n\n\tbody, err := c.applyResource(resourceType, resourceJSON)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] nitro-go: Failed to apply resource of type %s, err=%s\", resourceType, err)\n\t}\n\t_ = body\n\n\treturn nil\n}", "title": "" }, { "docid": "af14db6eecf8a00421012cef5ed7e6a7", "score": "0.487251", "text": "func ReadResource(resource *schema.Resource, d *schema.ResourceData, m interface{}) error {\n\n\tparams := m.(map[string]interface{})\n\tclient := params[\"ibxClient\"].(*skyinfoblox.Client)\n\n\tref := d.Id()\n\tobj := make(map[string]interface{})\n\n\tattrs := GetAttrs(resource)\n\tkeys := []string{}\n\tfor _, attr := range attrs {\n\t\tkeys = append(keys, attr.Name)\n\t}\n\terr := client.Read(ref, keys, &obj)\n\tif err != nil {\n\t\td.SetId(\"\")\n\t\treturn err\n\t}\n\n\tdelete(obj, \"_ref\")\n\tfor key := range obj {\n\t\tif isScalar(obj[key]) == true {\n\t\t\tlog.Printf(\"Setting key %s to %+v\\n\", key, obj[key])\n\t\t\td.Set(key, obj[key])\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "82d0501b04855c32429597d200f7229b", "score": "0.48333633", "text": "func (s *Server) ReadResource(ctx context.Context, req *ReadResourceRequest, resp *ReadResourceResponse) {\n\tif req == nil {\n\t\treturn\n\t}\n\n\tif req.CurrentState == nil {\n\t\tresp.Diagnostics.AddError(\n\t\t\t\"Unexpected Read Request\",\n\t\t\t\"An unexpected error was encountered when reading the resource. The current state was missing.\\n\\n\"+\n\t\t\t\t\"This is always a problem with Terraform or terraform-plugin-framework. Please report this to the provider developer.\",\n\t\t)\n\n\t\treturn\n\t}\n\n\tif resourceWithConfigure, ok := req.Resource.(resource.ResourceWithConfigure); ok {\n\t\tlogging.FrameworkTrace(ctx, \"Resource implements ResourceWithConfigure\")\n\n\t\tconfigureReq := resource.ConfigureRequest{\n\t\t\tProviderData: s.ResourceConfigureData,\n\t\t}\n\t\tconfigureResp := resource.ConfigureResponse{}\n\n\t\tlogging.FrameworkDebug(ctx, \"Calling provider defined Resource Configure\")\n\t\tresourceWithConfigure.Configure(ctx, configureReq, &configureResp)\n\t\tlogging.FrameworkDebug(ctx, \"Called provider defined Resource Configure\")\n\n\t\tresp.Diagnostics.Append(configureResp.Diagnostics...)\n\n\t\tif resp.Diagnostics.HasError() {\n\t\t\treturn\n\t\t}\n\t}\n\n\treadReq := resource.ReadRequest{\n\t\tState: tfsdk.State{\n\t\t\tSchema: req.CurrentState.Schema,\n\t\t\tRaw: req.CurrentState.Raw.Copy(),\n\t\t},\n\t}\n\treadResp := resource.ReadResponse{\n\t\tState: tfsdk.State{\n\t\t\tSchema: req.CurrentState.Schema,\n\t\t\tRaw: req.CurrentState.Raw.Copy(),\n\t\t},\n\t}\n\n\tif req.ProviderMeta != nil {\n\t\treadReq.ProviderMeta = *req.ProviderMeta\n\t}\n\n\tprivateProviderData := privatestate.EmptyProviderData(ctx)\n\n\treadReq.Private = privateProviderData\n\treadResp.Private = privateProviderData\n\n\tif req.Private != nil {\n\t\tif req.Private.Provider != nil {\n\t\t\treadReq.Private = req.Private.Provider\n\t\t\treadResp.Private = req.Private.Provider\n\t\t}\n\n\t\tresp.Private = req.Private\n\t}\n\n\tlogging.FrameworkDebug(ctx, \"Calling provider defined Resource Read\")\n\treq.Resource.Read(ctx, readReq, &readResp)\n\tlogging.FrameworkDebug(ctx, \"Called provider defined Resource Read\")\n\n\tresp.Diagnostics = readResp.Diagnostics\n\tresp.NewState = &readResp.State\n\n\tif readResp.Private != nil {\n\t\tif resp.Private == nil {\n\t\t\tresp.Private = &privatestate.Data{}\n\t\t}\n\n\t\tresp.Private.Provider = readResp.Private\n\t}\n\n\tif resp.Diagnostics.HasError() {\n\t\treturn\n\t}\n\n\tsemanticEqualityReq := SchemaSemanticEqualityRequest{\n\t\tPriorData: fwschemadata.Data{\n\t\t\tDescription: fwschemadata.DataDescriptionState,\n\t\t\tSchema: req.CurrentState.Schema,\n\t\t\tTerraformValue: req.CurrentState.Raw.Copy(),\n\t\t},\n\t\tProposedNewData: fwschemadata.Data{\n\t\t\tDescription: fwschemadata.DataDescriptionState,\n\t\t\tSchema: resp.NewState.Schema,\n\t\t\tTerraformValue: resp.NewState.Raw.Copy(),\n\t\t},\n\t}\n\tsemanticEqualityResp := &SchemaSemanticEqualityResponse{\n\t\tNewData: semanticEqualityReq.ProposedNewData,\n\t}\n\n\tSchemaSemanticEquality(ctx, semanticEqualityReq, semanticEqualityResp)\n\n\tresp.Diagnostics.Append(semanticEqualityResp.Diagnostics...)\n\n\tif resp.Diagnostics.HasError() {\n\t\treturn\n\t}\n\n\tif semanticEqualityResp.NewData.TerraformValue.Equal(resp.NewState.Raw) {\n\t\treturn\n\t}\n\n\tlogging.FrameworkDebug(ctx, \"State updated due to semantic equality\")\n\n\tresp.NewState.Raw = semanticEqualityResp.NewData.TerraformValue\n}", "title": "" }, { "docid": "04203cd77ff3862b45494bd07e424b1f", "score": "0.4827876", "text": "func (op *bindOp) spaCanClaimOwnership(ctx context.Context) {\n\tif op.vsrSpaInUse() && !op.vsrOwnsSpa() {\n\t\top.rhs.RetryLater = true\n\t}\n}", "title": "" }, { "docid": "f9bc19d1a87d90150bc861b4940a439a", "score": "0.48006508", "text": "func (r *BlockDeviceClaimReconciler) Reconcile(ctx context.Context, request ctrl.Request) (ctrl.Result, error) {\n\t_ = context.Background()\n\t// _ = r.Log.WithValues(\"blockdeviceclaim\", request.NamespacedName)\n\n\t// your logic here\n\t// Fetch the BlockDeviceClaim instance\n\n\tinstance := &apis.BlockDeviceClaim{}\n\terr := r.Client.Get(context.TODO(), request.NamespacedName, instance)\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\treturn reconcile.Result{}, err\n\t}\n\n\t// check if reconciliation is disabled for this resource\n\tif IsReconcileDisabled(instance) {\n\t\treturn reconcile.Result{}, nil\n\t}\n\n\tswitch instance.Status.Phase {\n\tcase apis.BlockDeviceClaimStatusPending:\n\t\tfallthrough\n\tcase apis.BlockDeviceClaimStatusEmpty:\n\t\tklog.Infof(\"BDC %s claim phase is: %s\", instance.Name, instance.Status.Phase)\n\t\t// claim the BD only if deletion time stamp is not set.\n\t\t// since BDC can now have multiple finalizers, we should not claim a\n\t\t// BD if its deletiontime stamp is set.\n\t\tif instance.DeletionTimestamp.IsZero() {\n\t\t\terr := r.claimDeviceForBlockDeviceClaim(instance)\n\t\t\tif err != nil {\n\t\t\t\tklog.Errorf(\"%s failed to claim: %v\", instance.Name, err)\n\t\t\t\treturn reconcile.Result{}, err\n\t\t\t}\n\t\t}\n\tcase apis.BlockDeviceClaimStatusInvalidCapacity:\n\t\t// migrating state to Pending if in InvalidCapacity state.\n\t\t// The InvalidCapacityState is deprecated and pending will be used.\n\t\t// InvalidCapacity will be the reason for why the BDC is in Pending state.\n\t\tinstance.Status.Phase = apis.BlockDeviceClaimStatusPending\n\t\terr := r.updateClaimStatus(apis.BlockDeviceClaimStatusPending, instance)\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"error in updating phase to pending from invalid capacity for %s: %v\", instance.Name, err)\n\t\t}\n\t\tklog.Infof(\"%s claim phase is: %s\", instance.Name, instance.Status.Phase)\n\tcase apis.BlockDeviceClaimStatusDone:\n\t\terr := r.FinalizerHandling(instance)\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"Finalizer handling failed for %s: %v\", instance.Name, err)\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "4eae216f51c6d82b53dcb6a6c73ce236", "score": "0.47737542", "text": "func (_SplitPayment *SplitPaymentTransactor) Claim(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _SplitPayment.contract.Transact(opts, \"claim\")\n}", "title": "" }, { "docid": "eec87624892c4e234e71b748f90942a4", "score": "0.47376633", "text": "func assertAdoptedResourceManaged(\n\texpectedManaged bool,\n\tt *testing.T,\n\tctx context.Context,\n\tkc *ctrlrtclientmock.Client,\n\tobject *ackv1alpha1.AdoptedResource,\n) {\n\tif expectedManaged {\n\t\tkc.AssertCalled(t, \"Patch\", ctx, object, mock.AnythingOfType(\"*client.mergeFromPatch\"))\n\t} else {\n\t\tkc.AssertNotCalled(t, \"Patch\", ctx, object, mock.AnythingOfType(\"*client.mergeFromPatch\"))\n\t}\n}", "title": "" }, { "docid": "1c749b4d462a3d3838e005df200b2a9a", "score": "0.4737334", "text": "func (p *Protocol) Claim(\n\tctx context.Context,\n\tsm protocol.StateManager,\n\tamount *big.Int,\n) error {\n\traCtx := protocol.MustGetRunActionsCtx(ctx)\n\tif err := p.updateTotalBalance(sm, amount); err != nil {\n\t\treturn err\n\t}\n\treturn p.claimFromAccount(sm, raCtx.Caller, amount)\n}", "title": "" }, { "docid": "bc735df310d18a3e36cfe4c17529560b", "score": "0.47366002", "text": "func (c *Client) Ensure(ctx context.Context, obj runtime.Object) error {\n\tlocal, err := c.convert(obj)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Set status\n\tremote, err := c.internal.Get(ctx, local.Spec.ResourceGroup, local.Spec.Name)\n\tfound := !remote.IsHTTPStatus(http.StatusNotFound)\n\tc.SetStatus(local, remote)\n\tif err != nil && found {\n\t\treturn err\n\t}\n\n\t// TODO(ace): create something like SQLServerCredential CRD, and pivot on state of that\n\t// Will allow for higher level orchestration better than the raw Kubernetes secret (?)\n\ttargetSecret, err := c.ensureSecret(ctx, local)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Pull from secret. Known to exist by construction.\n\tadminLogin := string(targetSecret.Data[\"username\"])\n\tadminPassword := string(targetSecret.Data[\"password\"])\n\n\t// Wrap, check status, and exit early if appropriate\n\tvar spec *Spec\n\tif found {\n\t\tspec = NewSpecWithRemote(&remote)\n\t\t// TODO(ace): this is not checking whether the secret needs to be updated\n\t\t// TODO(ace): this should be an extension point to gracefully handle immutable updates\n\t\t// if !spec.NeedsUpdate(local) {\n\t\t// \treturn nil\n\t\t// }\n\t} else {\n\t\tspec = NewSpec()\n\t}\n\n\t// Overlay new properties over old/default spec\n\tspec.Set(\n\t\tName(&local.Spec.Name),\n\t\tLocation(&local.Spec.Location),\n\t\tAdminLogin(&adminLogin), // n.b., immutable\n\t\tAdminPassword(&adminPassword),\n\t)\n\n\t// Apply to Azure. Use Update() if the object was found, to ensure that we set the password.\n\tif found {\n\t\tupdateProps := sql.ServerUpdate{\n\t\t\tServerProperties: spec.Build().ServerProperties,\n\t\t}\n\t\tfuture, err := c.internal.Update(ctx, local.Spec.ResourceGroup, local.Spec.Name, updateProps)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tb, err := future.MarshalJSON()\n\t\tlitter.Dump(string(b))\n\t\tif err := future.WaitForCompletionRef(ctx, c.internal.Client); err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\t// Opt to allow for blocking calls, highly parallelizing the controller instead.\n\t\tfuture, err := c.internal.CreateOrUpdate(ctx, local.Spec.ResourceGroup, local.Spec.Name, spec.Build())\n\t\tif err != nil {\n\t\t\treturn nil\n\t\t}\n\t\tb, err := future.MarshalJSON()\n\t\tlitter.Dump(string(b))\n\t\tif err := future.WaitForCompletionRef(ctx, c.internal.Client); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Block access after creation if desired\n\tif err := c.ensureRule(ctx, local); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c2440417274518f4f3889a80d66f1098", "score": "0.47255486", "text": "func (v *CSIVolume) Claim(claim *CSIVolumeClaim, alloc *Allocation) error {\n\t// COMPAT: volumes registered prior to 1.1.0 will be missing caps for the\n\t// volume on any claim. Correct this when we make the first change to a\n\t// claim by setting its currently claimed capability as the only requested\n\t// capability\n\tif len(v.RequestedCapabilities) == 0 && v.AccessMode != \"\" && v.AttachmentMode != \"\" {\n\t\tv.RequestedCapabilities = []*CSIVolumeCapability{\n\t\t\t{\n\t\t\t\tAccessMode: v.AccessMode,\n\t\t\t\tAttachmentMode: v.AttachmentMode,\n\t\t\t},\n\t\t}\n\t}\n\tif v.AttachmentMode != CSIVolumeAttachmentModeUnknown &&\n\t\tclaim.AttachmentMode != CSIVolumeAttachmentModeUnknown &&\n\t\tv.AttachmentMode != claim.AttachmentMode {\n\t\treturn fmt.Errorf(\"cannot change attachment mode of claimed volume\")\n\t}\n\n\tif claim.State == CSIVolumeClaimStateTaken {\n\t\tswitch claim.Mode {\n\t\tcase CSIVolumeClaimRead:\n\t\t\treturn v.claimRead(claim, alloc)\n\t\tcase CSIVolumeClaimWrite:\n\t\t\treturn v.claimWrite(claim, alloc)\n\t\t}\n\t}\n\t// either GC or a Unpublish checkpoint\n\treturn v.claimRelease(claim)\n}", "title": "" }, { "docid": "5287df637147fcdb1eb9f57c8b9aa2df", "score": "0.47130007", "text": "func (ownIngress *OwnIngress) ApplyOwnResource(instance *Unit, client client.Client,\n\tlogger logr.Logger, scheme *runtime.Scheme) error {\n\n\t// assert if Ingress exist\n\texist, found, err := ownIngress.OwnResourceExist(instance, client, logger)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// make Ingress object\n\tsts, err := ownIngress.MakeOwnResource(instance, logger, scheme)\n\tif err != nil {\n\t\treturn err\n\t}\n\tnewIngress := sts.(*v1beta1.Ingress)\n\n\t// apply the Ingress object just make\n\tif !exist {\n\t\t// if Ingress not exist,then create it\n\t\tmsg := fmt.Sprintf(\"Ingress %s/%s not found, create it!\", newIngress.Namespace, newIngress.Name)\n\t\tlogger.Info(msg)\n\t\treturn client.Create(context.TODO(), newIngress)\n\t} else {\n\t\tfoundIngress := found.(*v1beta1.Ingress)\n\t\t// if Ingress exist with change,then try to update it\n\t\tif !reflect.DeepEqual(newIngress.Spec, foundIngress.Spec) {\n\t\t\tmsg := fmt.Sprintf(\"Updating Ingress %s/%s\", newIngress.Namespace, newIngress.Name)\n\t\t\tlogger.Info(msg)\n\t\t\treturn client.Update(context.TODO(), newIngress)\n\t\t}\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "720f166ffcfc59c5ff038084bec02a00", "score": "0.46824813", "text": "func (_Identityclaim *IdentityclaimCaller) Claim(opts *bind.CallOpts) ([32]byte, error) {\n\tvar (\n\t\tret0 = new([32]byte)\n\t)\n\tout := ret0\n\terr := _Identityclaim.contract.Call(opts, out, \"claim\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "1a3ff33fdb3af242c4a32d03c102d742", "score": "0.46601808", "text": "func (objRef ObjectRef) GrantCapability(capability Capability) ObjectRef {\n\tseg := capn.NewBuffer(nil)\n\tcap := msgs.NewCapability(seg)\n\tswitch capability {\n\tcase None:\n\t\tcap.SetNone()\n\tcase Read:\n\t\tcap.SetRead()\n\tcase Write:\n\t\tcap.SetWrite()\n\tcase ReadWrite:\n\t\tcap.SetReadWrite()\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"Unexpected capability value: %v\", capability))\n\t}\n\n\treturn ObjectRef{\n\t\tobject: objRef.object,\n\t\tcapability: common.NewCapability(cap),\n\t}\n}", "title": "" }, { "docid": "d2870fb1f739ee24f28b1cf3cf04d951", "score": "0.4623546", "text": "func (cs *CredentialSpecResource) ApplyTransition(nextState resourcestatus.ResourceStatus) error {\n\treturn errors.New(\"not implemented\")\n}", "title": "" }, { "docid": "a0333fa98f09b8a9f36c9a8ca4324575", "score": "0.46230268", "text": "func (_TokenContractFunctions *TokenContractFunctionsTransactor) Claim(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _TokenContractFunctions.contract.Transact(opts, \"claim\")\n}", "title": "" }, { "docid": "9b80f04386472e0550fba58da339220b", "score": "0.4622352", "text": "func UpdateResource(resource *schema.Resource, d *schema.ResourceData, m interface{}) error {\n\n\tneedsUpdate := false\n\n\tparams := m.(map[string]interface{})\n\tclient := params[\"ibxClient\"].(*skyinfoblox.Client)\n\n\tref := d.Id()\n\tobj := make(map[string]interface{})\n\n\tattrs := GetAttrs(resource)\n\tfor _, attr := range attrs {\n\t\tkey := attr.Name\n\t\tif d.HasChange(key) {\n\t\t\tattr.Value = d.Get(key)\n\t\t\tobj[key] = GetValue(attr)\n\t\t\tlog.Printf(\"Updating field %s, value: %+v\\n\", key, obj[key])\n\t\t\tneedsUpdate = true\n\t\t}\n\t}\n\n\tlog.Printf(\"UPDATE: going to update reference %s with obj: \\n%+v\\n\", ref, obj)\n\n\tif needsUpdate {\n\t\tnewRef, err := client.Update(ref, obj)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\td.SetId(newRef)\n\t}\n\n\treturn ReadResource(resource, d, m)\n}", "title": "" }, { "docid": "fd1bd33b2285242c21a8df7bf2a9f780", "score": "0.46078986", "text": "func (rm *resourceManager) provideUpdatedResource(\n\tdesired *resource,\n\treplicationGroup *svcsdk.ReplicationGroup,\n) (*resource, error) {\n\t// Merge in the information we read from the API call above to the copy of\n\t// the original Kubernetes object we passed to the function\n\tko := desired.ko.DeepCopy()\n\n\tif ko.Status.ACKResourceMetadata == nil {\n\t\tko.Status.ACKResourceMetadata = &ackv1alpha1.ResourceMetadata{}\n\t}\n\tif replicationGroup.ARN != nil {\n\t\tarn := ackv1alpha1.AWSResourceName(*replicationGroup.ARN)\n\t\tko.Status.ACKResourceMetadata.ARN = &arn\n\t}\n\tif replicationGroup.AuthTokenEnabled != nil {\n\t\tko.Status.AuthTokenEnabled = replicationGroup.AuthTokenEnabled\n\t}\n\tif replicationGroup.AuthTokenLastModifiedDate != nil {\n\t\tko.Status.AuthTokenLastModifiedDate = &metav1.Time{*replicationGroup.AuthTokenLastModifiedDate}\n\t}\n\tif replicationGroup.AutomaticFailover != nil {\n\t\tko.Status.AutomaticFailover = replicationGroup.AutomaticFailover\n\t}\n\tif replicationGroup.ClusterEnabled != nil {\n\t\tko.Status.ClusterEnabled = replicationGroup.ClusterEnabled\n\t}\n\tif replicationGroup.ConfigurationEndpoint != nil {\n\t\tf7 := &svcapitypes.Endpoint{}\n\t\tif replicationGroup.ConfigurationEndpoint.Address != nil {\n\t\t\tf7.Address = replicationGroup.ConfigurationEndpoint.Address\n\t\t}\n\t\tif replicationGroup.ConfigurationEndpoint.Port != nil {\n\t\t\tf7.Port = replicationGroup.ConfigurationEndpoint.Port\n\t\t}\n\t\tko.Status.ConfigurationEndpoint = f7\n\t}\n\tif replicationGroup.Description != nil {\n\t\tko.Status.Description = replicationGroup.Description\n\t}\n\tif replicationGroup.GlobalReplicationGroupInfo != nil {\n\t\tf9 := &svcapitypes.GlobalReplicationGroupInfo{}\n\t\tif replicationGroup.GlobalReplicationGroupInfo.GlobalReplicationGroupId != nil {\n\t\t\tf9.GlobalReplicationGroupID = replicationGroup.GlobalReplicationGroupInfo.GlobalReplicationGroupId\n\t\t}\n\t\tif replicationGroup.GlobalReplicationGroupInfo.GlobalReplicationGroupMemberRole != nil {\n\t\t\tf9.GlobalReplicationGroupMemberRole = replicationGroup.GlobalReplicationGroupInfo.GlobalReplicationGroupMemberRole\n\t\t}\n\t\tko.Status.GlobalReplicationGroupInfo = f9\n\t}\n\tif replicationGroup.MemberClusters != nil {\n\t\tf11 := []*string{}\n\t\tfor _, f11iter := range replicationGroup.MemberClusters {\n\t\t\tvar f11elem string\n\t\t\tf11elem = *f11iter\n\t\t\tf11 = append(f11, &f11elem)\n\t\t}\n\t\tko.Status.MemberClusters = f11\n\t}\n\tif replicationGroup.MultiAZ != nil {\n\t\tko.Status.MultiAZ = replicationGroup.MultiAZ\n\t}\n\tif replicationGroup.NodeGroups != nil {\n\t\tf13 := []*svcapitypes.NodeGroup{}\n\t\tfor _, f13iter := range replicationGroup.NodeGroups {\n\t\t\tf13elem := &svcapitypes.NodeGroup{}\n\t\t\tif f13iter.NodeGroupId != nil {\n\t\t\t\tf13elem.NodeGroupID = f13iter.NodeGroupId\n\t\t\t}\n\t\t\tif f13iter.NodeGroupMembers != nil {\n\t\t\t\tf13elemf1 := []*svcapitypes.NodeGroupMember{}\n\t\t\t\tfor _, f13elemf1iter := range f13iter.NodeGroupMembers {\n\t\t\t\t\tf13elemf1elem := &svcapitypes.NodeGroupMember{}\n\t\t\t\t\tif f13elemf1iter.CacheClusterId != nil {\n\t\t\t\t\t\tf13elemf1elem.CacheClusterID = f13elemf1iter.CacheClusterId\n\t\t\t\t\t}\n\t\t\t\t\tif f13elemf1iter.CacheNodeId != nil {\n\t\t\t\t\t\tf13elemf1elem.CacheNodeID = f13elemf1iter.CacheNodeId\n\t\t\t\t\t}\n\t\t\t\t\tif f13elemf1iter.CurrentRole != nil {\n\t\t\t\t\t\tf13elemf1elem.CurrentRole = f13elemf1iter.CurrentRole\n\t\t\t\t\t}\n\t\t\t\t\tif f13elemf1iter.PreferredAvailabilityZone != nil {\n\t\t\t\t\t\tf13elemf1elem.PreferredAvailabilityZone = f13elemf1iter.PreferredAvailabilityZone\n\t\t\t\t\t}\n\t\t\t\t\tif f13elemf1iter.ReadEndpoint != nil {\n\t\t\t\t\t\tf13elemf1elemf4 := &svcapitypes.Endpoint{}\n\t\t\t\t\t\tif f13elemf1iter.ReadEndpoint.Address != nil {\n\t\t\t\t\t\t\tf13elemf1elemf4.Address = f13elemf1iter.ReadEndpoint.Address\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif f13elemf1iter.ReadEndpoint.Port != nil {\n\t\t\t\t\t\t\tf13elemf1elemf4.Port = f13elemf1iter.ReadEndpoint.Port\n\t\t\t\t\t\t}\n\t\t\t\t\t\tf13elemf1elem.ReadEndpoint = f13elemf1elemf4\n\t\t\t\t\t}\n\t\t\t\t\tf13elemf1 = append(f13elemf1, f13elemf1elem)\n\t\t\t\t}\n\t\t\t\tf13elem.NodeGroupMembers = f13elemf1\n\t\t\t}\n\t\t\tif f13iter.PrimaryEndpoint != nil {\n\t\t\t\tf13elemf2 := &svcapitypes.Endpoint{}\n\t\t\t\tif f13iter.PrimaryEndpoint.Address != nil {\n\t\t\t\t\tf13elemf2.Address = f13iter.PrimaryEndpoint.Address\n\t\t\t\t}\n\t\t\t\tif f13iter.PrimaryEndpoint.Port != nil {\n\t\t\t\t\tf13elemf2.Port = f13iter.PrimaryEndpoint.Port\n\t\t\t\t}\n\t\t\t\tf13elem.PrimaryEndpoint = f13elemf2\n\t\t\t}\n\t\t\tif f13iter.ReaderEndpoint != nil {\n\t\t\t\tf13elemf3 := &svcapitypes.Endpoint{}\n\t\t\t\tif f13iter.ReaderEndpoint.Address != nil {\n\t\t\t\t\tf13elemf3.Address = f13iter.ReaderEndpoint.Address\n\t\t\t\t}\n\t\t\t\tif f13iter.ReaderEndpoint.Port != nil {\n\t\t\t\t\tf13elemf3.Port = f13iter.ReaderEndpoint.Port\n\t\t\t\t}\n\t\t\t\tf13elem.ReaderEndpoint = f13elemf3\n\t\t\t}\n\t\t\tif f13iter.Slots != nil {\n\t\t\t\tf13elem.Slots = f13iter.Slots\n\t\t\t}\n\t\t\tif f13iter.Status != nil {\n\t\t\t\tf13elem.Status = f13iter.Status\n\t\t\t}\n\t\t\tf13 = append(f13, f13elem)\n\t\t}\n\t\tko.Status.NodeGroups = f13\n\t}\n\tif replicationGroup.PendingModifiedValues != nil {\n\t\tf14 := &svcapitypes.ReplicationGroupPendingModifiedValues{}\n\t\tif replicationGroup.PendingModifiedValues.AuthTokenStatus != nil {\n\t\t\tf14.AuthTokenStatus = replicationGroup.PendingModifiedValues.AuthTokenStatus\n\t\t}\n\t\tif replicationGroup.PendingModifiedValues.AutomaticFailoverStatus != nil {\n\t\t\tf14.AutomaticFailoverStatus = replicationGroup.PendingModifiedValues.AutomaticFailoverStatus\n\t\t}\n\t\tif replicationGroup.PendingModifiedValues.PrimaryClusterId != nil {\n\t\t\tf14.PrimaryClusterID = replicationGroup.PendingModifiedValues.PrimaryClusterId\n\t\t}\n\t\tif replicationGroup.PendingModifiedValues.Resharding != nil {\n\t\t\tf14f3 := &svcapitypes.ReshardingStatus{}\n\t\t\tif replicationGroup.PendingModifiedValues.Resharding.SlotMigration != nil {\n\t\t\t\tf14f3f0 := &svcapitypes.SlotMigration{}\n\t\t\t\tif replicationGroup.PendingModifiedValues.Resharding.SlotMigration.ProgressPercentage != nil {\n\t\t\t\t\tf14f3f0.ProgressPercentage = replicationGroup.PendingModifiedValues.Resharding.SlotMigration.ProgressPercentage\n\t\t\t\t}\n\t\t\t\tf14f3.SlotMigration = f14f3f0\n\t\t\t}\n\t\t\tf14.Resharding = f14f3\n\t\t}\n\t\tko.Status.PendingModifiedValues = f14\n\t}\n\tif replicationGroup.SnapshottingClusterId != nil {\n\t\tko.Status.SnapshottingClusterID = replicationGroup.SnapshottingClusterId\n\t}\n\tif replicationGroup.Status != nil {\n\t\tko.Status.Status = replicationGroup.Status\n\t}\n\trm.setStatusDefaults(ko)\n\t// custom set output from response\n\trm.customSetOutput(desired, replicationGroup, ko)\n\treturn &resource{ko}, nil\n}", "title": "" }, { "docid": "4d89c813540f3b6efe23a209f16d8a14", "score": "0.45958796", "text": "func parametrizeResource(resource interface{}, context interfaces.GraphContext, replaceIn []string) interface{} {\n\treturn copier.CopyWithReplacements(resource, func(p string) string {\n\t\tvalue := context.GetArg(p)\n\t\tif value == \"\" {\n\t\t\treturn \"$\" + p\n\t\t}\n\t\treturn value\n\t}, append(replaceIn, \"ObjectMeta\")...)\n}", "title": "" }, { "docid": "e0f7c38f418fe9ceb0226086ecd35b84", "score": "0.45918494", "text": "func (_Bancor *BancorTransactor) ClaimAndConvertFor(opts *bind.TransactOpts, _path []common.Address, _amount *big.Int, _minReturn *big.Int, _beneficiary common.Address) (*types.Transaction, error) {\n\treturn _Bancor.contract.Transact(opts, \"claimAndConvertFor\", _path, _amount, _minReturn, _beneficiary)\n}", "title": "" }, { "docid": "f50d54f67b1d68660c68ae57b4ce659e", "score": "0.4590953", "text": "func (db *AdaCoreDB) SetResource(ctx context.Context, ri *adacorepb.ResourceInfo) error {\n\tbuf, err := proto.Marshal(ri)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = db.ankaDB.Set(ctx, AdaCoreDBName, makeKey(ri.HashName), buf)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "e6aabdcaf767e49162fca1982ce9dc1f", "score": "0.45644778", "text": "func (resc *Resource) Acquire() error {\n\tif resc.Status == ResourceLocked {\n\t\treturn errors.New(\"resource is already locked\")\n\t}\n\tresc.Status = ResourceLocked\n\treturn nil\n}", "title": "" }, { "docid": "220aeb03a8eef279b7d89870561e763b", "score": "0.4564196", "text": "func (r *AccountClaimReconciler) Reconcile(ctx context.Context, request ctrl.Request) (ctrl.Result, error) {\n\treqLogger := log.WithValues(\"Controller\", controllerName, \"Request.Namespace\", request.Namespace, \"Request.Name\", request.Name)\n\n\t// Watch AccountClaim\n\taccountClaim := &awsv1alpha1.AccountClaim{}\n\terr := r.Client.Get(context.TODO(), request.NamespacedName, accountClaim)\n\tif err != nil {\n\t\tif k8serr.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\treturn reconcile.Result{}, err\n\t}\n\n\t// Fake Account Claim Process for Hive Testing ..\n\t// Fake account claims are account claims which have the label `managed.openshift.com/fake: true`\n\t// These fake claims are used for testing within hive\n\tif accountClaim.Annotations[fakeAnnotation] == \"true\" {\n\t\trequeue, err := r.processFake(reqLogger, accountClaim)\n\t\tif err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\treturn reconcile.Result{Requeue: requeue}, nil\n\t}\n\n\t// Add finalizer to the CR in case it's not present (e.g. old accounts)\n\tif !controllerutils.Contains(accountClaim.GetFinalizers(), accountClaimFinalizer) {\n\t\terr := r.addFinalizer(reqLogger, accountClaim)\n\t\tif err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\n\t\treturn reconcile.Result{}, nil\n\t}\n\n\tif accountClaim.DeletionTimestamp != nil {\n\t\treturn reconcile.Result{}, r.handleAccountClaimDeletion(reqLogger, accountClaim)\n\t}\n\n\tisCCS := accountClaim.Spec.BYOCAWSAccountID != \"\"\n\n\tif accountClaim.Status.State == awsv1alpha1.ClaimStatusPending {\n\t\tnow := metav1.Now()\n\t\tpendingDuration := now.Sub(accountClaim.GetObjectMeta().GetCreationTimestamp().Time)\n\t\tlocalmetrics.Collector.SetAccountClaimPendingDuration(isCCS, pendingDuration.Seconds())\n\t}\n\n\tif accountClaim.Spec.BYOC {\n\t\treturn r.handleBYOCAccountClaim(reqLogger, accountClaim)\n\t}\n\n\t// Return if this claim has been satisfied\n\tif claimIsSatisfied(accountClaim) {\n\t\treqLogger.Info(fmt.Sprintf(\"Claim %s has been satisfied ignoring\", accountClaim.ObjectMeta.Name))\n\t\treturn reconcile.Result{}, nil\n\t}\n\n\tif accountClaim.Status.State == \"\" {\n\t\tmessage := \"Attempting to claim account\"\n\t\treqLogger.Info(message)\n\t\taccountClaim.Status.State = awsv1alpha1.ClaimStatusPending\n\n\t\taccountClaim.Status.Conditions = controllerutils.SetAccountClaimCondition(\n\t\t\taccountClaim.Status.Conditions,\n\t\t\tawsv1alpha1.AccountUnclaimed,\n\t\t\tcorev1.ConditionTrue,\n\t\t\tAccountClaimed,\n\t\t\tmessage,\n\t\t\tcontrollerutils.UpdateConditionNever,\n\t\t\tisCCS,\n\t\t)\n\n\t\t// Update the Spec on AccountClaim\n\t\treturn reconcile.Result{}, r.statusUpdate(reqLogger, accountClaim)\n\t}\n\n\tvar unclaimedAccount *awsv1alpha1.Account\n\n\t// Get an unclaimed account from the pool\n\tif accountClaim.Spec.AccountLink == \"\" {\n\t\tunclaimedAccount, err = r.getUnclaimedAccount(reqLogger, accountClaim)\n\t\tif err != nil {\n\t\t\treqLogger.Error(err, \"Unable to select an unclaimed account from the pool\")\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t} else {\n\t\tunclaimedAccount, err = r.getClaimedAccount(accountClaim.Spec.AccountLink, awsv1alpha1.AccountCrNamespace)\n\t\tif err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\n\t// Set Account.Spec.ClaimLink\n\t// This will trigger the reconcile loop for the account which will mark the account as claimed in its status\n\tif unclaimedAccount.Spec.ClaimLink == \"\" {\n\t\tupdateClaimedAccountFields(reqLogger, unclaimedAccount, accountClaim)\n\t\terr := r.accountSpecUpdate(reqLogger, unclaimedAccount)\n\t\tif err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\n\t// Set awsAccountClaim.Spec.AccountLink\n\tif accountClaim.Spec.AccountLink == \"\" {\n\t\tsetAccountLinkOnAccountClaim(reqLogger, unclaimedAccount, accountClaim)\n\t\treturn reconcile.Result{}, r.specUpdate(reqLogger, accountClaim)\n\t}\n\n\tif !accountClaim.Spec.ManualSTSMode {\n\t\terr = r.setSupportRoleARNManagedOpenshift(reqLogger, accountClaim, unclaimedAccount)\n\t\tif err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\n\t// Set awsAccountClaim.Spec.AwsAccountOU\n\tif accountClaim.Spec.AccountOU == \"\" || accountClaim.Spec.AccountOU == \"ROOT\" {\n\t\t// Determine if in fedramp env\n\t\tawsRegion := config.GetDefaultRegion()\n\n\t\t// aws client\n\t\tawsClient, err := r.awsClientBuilder.GetClient(controllerName, r.Client, awsclient.NewAwsClientInput{\n\t\t\tSecretName: controllerutils.AwsSecretName,\n\t\t\tNameSpace: awsv1alpha1.AccountCrNamespace,\n\t\t\tAwsRegion: awsRegion,\n\t\t})\n\t\tif err != nil {\n\t\t\tunexpectedErrorMsg := \"OU: Failed to build aws client\"\n\t\t\treqLogger.Info(unexpectedErrorMsg)\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\n\t\terr = MoveAccountToOU(r, reqLogger, awsClient, accountClaim, unclaimedAccount)\n\t\tif err != nil {\n\t\t\tif err == awsv1alpha1.ErrAccMoveRaceCondition {\n\t\t\t\t// Due to a race condition, we need to requeue the reconcile to ensure that the account was correctly moved into the correct OU\n\t\t\t\treturn reconcile.Result{Requeue: true}, nil\n\t\t\t}\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\n\t// Create secret for OCM to consume\n\tif !r.checkIAMSecretExists(accountClaim.Spec.AwsCredentialSecret.Name, accountClaim.Spec.AwsCredentialSecret.Namespace) {\n\t\terr = r.createIAMSecret(reqLogger, accountClaim, unclaimedAccount)\n\t\tif err != nil {\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t}\n\n\tif accountClaim.Status.State != awsv1alpha1.ClaimStatusReady && accountClaim.Spec.AccountLink != \"\" {\n\t\t// Set AccountClaim.Status.Conditions and AccountClaim.Status.State to Ready\n\t\tsetAccountClaimStatus(reqLogger, unclaimedAccount, accountClaim)\n\t\treturn reconcile.Result{}, r.statusUpdate(reqLogger, accountClaim)\n\t}\n\n\treturn reconcile.Result{}, nil\n}", "title": "" }, { "docid": "a7e05228c786fab92837c2df568c8566", "score": "0.4554298", "text": "func (m *GovernanceRoleDefinition) SetResource(value GovernanceResourceable)() {\n err := m.GetBackingStore().Set(\"resource\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "e7e58b9116a81227604150a2f56d3bc8", "score": "0.45485997", "text": "func (m *GovernanceRoleSetting) SetResource(value GovernanceResourceable)() {\n err := m.GetBackingStore().Set(\"resource\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "5a8f4a5f28f6044544fbd2e37481c5cd", "score": "0.45383066", "text": "func (_ERC20AtomicSwapper *ERC20AtomicSwapperSession) Claim(_swapID [32]byte, _randomNumber [32]byte) (*types.Transaction, error) {\n\treturn _ERC20AtomicSwapper.Contract.Claim(&_ERC20AtomicSwapper.TransactOpts, _swapID, _randomNumber)\n}", "title": "" }, { "docid": "11a41f6f36945bff0329fad94b8c22c7", "score": "0.45286125", "text": "func (p *Pair) BindResource(pr BindResource) *schema.Resource {\n\treadContext := func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {\n\t\treturn p.ReadContext(d, func(left, right string) error {\n\t\t\treturn pr.ReadContext(ctx, left, right, m.(*common.DatabricksClient))\n\t\t})\n\t}\n\treturn &schema.Resource{\n\t\tSchema: p.Schema(),\n\t\tReadContext: readContext,\n\t\tCreateContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {\n\t\t\tleft := d.Get(p.Left).(string)\n\t\t\tif left == \"\" {\n\t\t\t\treturn diag.Errorf(\"%s cannot be empty\", p.Left)\n\t\t\t}\n\t\t\tright := d.Get(p.Right).(string)\n\t\t\tif right == \"\" {\n\t\t\t\treturn diag.Errorf(\"%s cannot be empty\", p.Right)\n\t\t\t}\n\t\t\terr := pr.CreateContext(ctx, left, right, m.(*common.DatabricksClient))\n\t\t\tif err != nil {\n\t\t\t\treturn diag.FromErr(err)\n\t\t\t}\n\t\t\tp.Pack(d)\n\t\t\treturn readContext(ctx, d, m)\n\t\t},\n\t\tDeleteContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {\n\t\t\tleft, right, err := p.Unpack(d)\n\t\t\tif err != nil {\n\t\t\t\treturn diag.FromErr(err)\n\t\t\t}\n\t\t\terr = pr.DeleteContext(ctx, left, right, m.(*common.DatabricksClient))\n\t\t\tif err != nil {\n\t\t\t\treturn diag.FromErr(err)\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t\tImporter: &schema.ResourceImporter{\n\t\t\tStateContext: schema.ImportStatePassthroughContext,\n\t\t},\n\t}\n}", "title": "" }, { "docid": "784134bd382a00781d0bb1bed1f95425", "score": "0.45284382", "text": "func (_Claimable *ClaimableRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Claimable.Contract.ClaimableTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "9a0ac211900b499e4b4fe7d80a04a6d2", "score": "0.45200512", "text": "func (_Contract *ContractTransactor) Reclaim(opts *bind.TransactOpts, id *big.Int, owner common.Address) (*types.Transaction, error) {\n\treturn _Contract.contract.Transact(opts, \"reclaim\", id, owner)\n}", "title": "" }, { "docid": "de4717f61c4dc3b6d3864dfd0c7ac370", "score": "0.4517035", "text": "func (v *CSIVolume) claimRead(claim *CSIVolumeClaim, alloc *Allocation) error {\n\tif _, ok := v.ReadAllocs[claim.AllocationID]; ok {\n\t\treturn nil\n\t}\n\tif alloc == nil {\n\t\treturn fmt.Errorf(\"allocation missing: %s\", claim.AllocationID)\n\t}\n\n\tif !v.ReadSchedulable() {\n\t\treturn ErrCSIVolumeUnschedulable\n\t}\n\n\tif !v.HasFreeReadClaims() {\n\t\treturn ErrCSIVolumeMaxClaims\n\t}\n\n\t// Allocations are copy on write, so we want to keep the id but don't need the\n\t// pointer. We'll get it from the db in denormalize.\n\tv.ReadAllocs[claim.AllocationID] = nil\n\tdelete(v.WriteAllocs, claim.AllocationID)\n\n\tv.ReadClaims[claim.AllocationID] = claim\n\tdelete(v.WriteClaims, claim.AllocationID)\n\tdelete(v.PastClaims, claim.AllocationID)\n\n\tv.setModesFromClaim(claim)\n\treturn nil\n}", "title": "" }, { "docid": "0d1a4830cc1b6043c1cee237398c978f", "score": "0.45088935", "text": "func (m *AccessPackageResourceScope) SetResource(value AccessPackageResourceable)() {\n err := m.GetBackingStore().Set(\"resource\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "ee07445525d5b6a3b0fe7990e6303ba3", "score": "0.44909868", "text": "func (r *genericInjectReconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {\n\tctx := context.Background()\n\tlog := r.log.WithValues(r.resourceName, req.NamespacedName)\n\n\t// fetch the target object\n\ttarget := r.injector.NewTarget()\n\tif err := r.Client.Get(ctx, req.NamespacedName, target.AsObject()); err != nil {\n\t\tlog.Error(err, \"unable to fetch target object to inject into\")\n\t\treturn ctrl.Result{}, err\n\t}\n\n\t// ensure that it wants injection\n\tmetaObj, err := meta.Accessor(target.AsObject())\n\tif err != nil {\n\t\tlog.Error(err, \"unable to get metadata for object\")\n\t\treturn ctrl.Result{}, err\n\t}\n\tcertNameRaw := metaObj.GetAnnotations()[WantInjectAnnotation]\n\thasInjectAPIServerCA := metaObj.GetAnnotations()[WantInjectAPIServerCAAnnotation] == \"true\"\n\tif certNameRaw != \"\" && hasInjectAPIServerCA {\n\t\tlog.Info(\"object has both inject-ca-from and inject-apiserver-ca annotations, skipping\")\n\t\treturn ctrl.Result{}, nil\n\t}\n\tif hasInjectAPIServerCA {\n\t\tlog.V(1).Info(\"setting apiserver ca bundle on injectable\")\n\t\ttarget.SetCA(r.apiserverCABundle)\n\n\t\t// actually update with injected CA data\n\t\tif err := r.Client.Update(ctx, target.AsObject()); err != nil {\n\t\t\tlog.Error(err, \"unable to update target object with new CA data\")\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\t\tlog.V(1).Info(\"updated object\")\n\t\treturn ctrl.Result{}, nil\n\t}\n\tif certNameRaw == \"\" {\n\t\tlog.V(1).Info(\"object does not want CA injection, skipping\")\n\t\treturn ctrl.Result{}, nil\n\t}\n\n\tcertName := splitNamespacedName(certNameRaw)\n\tlog = log.WithValues(\"certificate\", certName)\n\tif certName.Namespace == \"\" {\n\t\tlog.Error(nil, \"invalid certificate name\")\n\t\t// don't return an error, requeuing won't help till this is changed\n\t\treturn ctrl.Result{}, nil\n\t}\n\n\tvar cert certmanager.Certificate\n\tif err := r.Client.Get(ctx, certName, &cert); err != nil {\n\t\tlog.Error(err, \"unable to fetch associated certificate\")\n\t\t// don't requeue if we're just not found, we'll get called when the secret gets created\n\t\treturn ctrl.Result{}, dropNotFound(err)\n\t}\n\n\t// grab the associated secret, and ensure it's owned by the cert\n\tsecretName := types.NamespacedName{Namespace: cert.Namespace, Name: cert.Spec.SecretName}\n\tlog = log.WithValues(\"secret\", secretName)\n\tvar secret corev1.Secret\n\tif err := r.Client.Get(ctx, secretName, &secret); err != nil {\n\t\tlog.Error(err, \"unable to fetch associated secret\")\n\t\t// don't requeue if we're just not found, we'll get called when the secret gets created\n\t\treturn ctrl.Result{}, dropNotFound(err)\n\t}\n\towner := OwningCertForSecret(&secret)\n\tif owner == nil || *owner != certName {\n\t\tlog.Info(\"refusing to target secret not owned by certificate\", \"owner\", metav1.GetControllerOf(&secret))\n\t\treturn ctrl.Result{}, nil\n\t}\n\n\t// inject the CA data\n\tcaData, hasCAData := secret.Data[certctrl.TLSCAKey]\n\tif !hasCAData {\n\t\tlog.Error(nil, \"certificate has no CA data\")\n\t\t// don't requeue, we'll get called when the secret gets updated\n\t\treturn ctrl.Result{}, nil\n\t}\n\n\t// actually do the injection\n\ttarget.SetCA(caData)\n\n\t// actually update with injected CA data\n\tif err := r.Client.Update(ctx, target.AsObject()); err != nil {\n\t\tlog.Error(err, \"unable to update target object with new CA data\")\n\t\treturn ctrl.Result{}, err\n\t}\n\tlog.V(1).Info(\"updated object\")\n\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "d3784c51bf5895b6ffbf2fb9d717d11b", "score": "0.4485529", "text": "func (r *ManagedReconciler) Reconcile(req reconcile.Request) (reconcile.Result, error) { // nolint:gocyclo\n\t// NOTE(negz): This method is a little over our cyclomatic complexity goal.\n\t// Be wary of adding additional complexity.\n\n\tlog.V(logging.Debug).Info(\"Reconciling\", \"controller\", managedControllerName, \"request\", req)\n\n\tctx, cancel := context.WithTimeout(context.Background(), managedReconcileTimeout)\n\tdefer cancel()\n\n\tmanaged := r.newManaged()\n\tif err := r.client.Get(ctx, req.NamespacedName, managed); err != nil {\n\t\t// There's no need to requeue if we no longer exist. Otherwise we'll be\n\t\t// requeued implicitly because we return an error.\n\t\treturn reconcile.Result{}, errors.Wrap(IgnoreNotFound(err), errGetManaged)\n\t}\n\n\texternal, err := r.external.Connect(ctx, managed)\n\tif err != nil {\n\t\t// We'll usually hit this case if our Provider or its secret are missing\n\t\t// or invalid. If this is first time we encounter this issue we'll be\n\t\t// requeued implicitly when we update our status with the new error\n\t\t// condition. If not, we want to try again after a short wait.\n\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t}\n\n\tif err := r.managed.Initialize(ctx, managed); err != nil {\n\t\t// If this is the first time we encounter this issue we'll be requeued\n\t\t// implicitly when we update our status with the new error condition.\n\t\t// If not, we want to try again after a short wait.\n\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t}\n\n\tif !IsConditionTrue(managed.GetCondition(v1alpha1.TypeReferencesResolved)) {\n\t\tif err := r.managed.ResolveReferences(ctx, managed); err != nil {\n\t\t\tcondition := v1alpha1.ReconcileError(err)\n\t\t\tif IsReferencesAccessError(err) {\n\t\t\t\tcondition = v1alpha1.ReferenceResolutionBlocked(err)\n\t\t\t}\n\n\t\t\tmanaged.SetConditions(condition)\n\t\t\treturn reconcile.Result{RequeueAfter: r.longWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t\t}\n\n\t\t// Add ReferenceResolutionSuccess to the conditions\n\t\tmanaged.SetConditions(v1alpha1.ReferenceResolutionSuccess())\n\t}\n\n\tobservation, err := external.Observe(ctx, managed)\n\tif err != nil {\n\t\t// We'll usually hit this case if our Provider credentials are invalid\n\t\t// or insufficient for observing the external resource type we're\n\t\t// concerned with. If this is the first time we encounter this issue\n\t\t// we'll be requeued implicitly when we update our status with the new\n\t\t// error condition. If not, we want to try again after a short wait.\n\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t}\n\n\tif meta.WasDeleted(managed) {\n\t\t// TODO(muvaf): Reclaim Policy should be used between Claim and Managed. For Managed and External Resource,\n\t\t// we need another field.\n\t\tif observation.ResourceExists && managed.GetReclaimPolicy() == v1alpha1.ReclaimDelete {\n\t\t\tif err := external.Delete(ctx, managed); err != nil {\n\t\t\t\t// We'll hit this condition if we can't delete our external\n\t\t\t\t// resource, for example if our provider credentials don't have\n\t\t\t\t// access to delete it. If this is the first time we encounter this\n\t\t\t\t// issue we'll be requeued implicitly when we update our status with\n\t\t\t\t// the new error condition. If not, we want to try again after a\n\t\t\t\t// short wait.\n\t\t\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\t\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t\t\t}\n\t\t\tmanaged.SetConditions(v1alpha1.ReconcileSuccess())\n\t\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t\t}\n\t\tif err := r.managed.UnpublishConnection(ctx, managed, observation.ConnectionDetails); err != nil {\n\t\t\t// If this is the first time we encounter this issue we'll be\n\t\t\t// requeued implicitly when we update our status with the new error\n\t\t\t// condition. If not, we want to try again after a short wait.\n\t\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(IgnoreNotFound(r.client.Status().Update(ctx, managed)), errUpdateManagedStatus)\n\t\t}\n\t\tif err := r.managed.Finalize(ctx, managed); err != nil {\n\t\t\t// If this is the first time we encounter this issue we'll be\n\t\t\t// requeued implicitly when we update our status with the new error\n\t\t\t// condition. If not, we want to try again after a short wait.\n\t\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(IgnoreNotFound(r.client.Status().Update(ctx, managed)), errUpdateManagedStatus)\n\t\t}\n\t\t// We've successfully finalized the deletion of our external and managed\n\t\t// resources.\n\t\tmanaged.SetConditions(v1alpha1.ReconcileSuccess())\n\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(IgnoreNotFound(r.client.Status().Update(ctx, managed)), errUpdateManagedStatus)\n\t}\n\n\tif err := r.managed.PublishConnection(ctx, managed, observation.ConnectionDetails); err != nil {\n\t\t// If this is the first time we encounter this issue we'll be requeued\n\t\t// implicitly when we update our status with the new error condition. If\n\t\t// not, we want to try again after a short wait.\n\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t}\n\n\tif !observation.ResourceExists {\n\t\tcreation, err := external.Create(ctx, managed)\n\t\tif err != nil {\n\t\t\t// We'll hit this condition if we can't create our external\n\t\t\t// resource, for example if our provider credentials don't have\n\t\t\t// access to create it. If this is the first time we encounter this\n\t\t\t// issue we'll be requeued implicitly when we update our status with\n\t\t\t// the new error condition. If not, we want to try again after a\n\t\t\t// short wait.\n\t\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t\t}\n\n\t\tif err := r.managed.PublishConnection(ctx, managed, creation.ConnectionDetails); err != nil {\n\t\t\t// If this is the first time we encounter this issue we'll be\n\t\t\t// requeued implicitly when we update our status with the new error\n\t\t\t// condition. If not, we want to try again after a short wait.\n\t\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t\t}\n\n\t\t// We've successfully created our external resource. In many cases the\n\t\t// creation process takes a little time to finish. We requeue a short\n\t\t// wait in order to observe the external resource to determine whether\n\t\t// it's ready for use.\n\t\tmanaged.SetConditions(v1alpha1.ReconcileSuccess())\n\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t}\n\n\tif observation.ResourceUpToDate {\n\t\t// We did not need to create, update, or delete our external resource.\n\t\t// Per the below issue nothing will notify us if and when the external\n\t\t// resource we manage changes, so we requeue a speculative reconcile\n\t\t// after a long wait in order to observe it and react accordingly.\n\t\t// https://github.com/crossplaneio/crossplane/issues/289\n\t\tmanaged.SetConditions(v1alpha1.ReconcileSuccess())\n\t\treturn reconcile.Result{RequeueAfter: r.longWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t}\n\n\tupdate, err := external.Update(ctx, managed)\n\tif err != nil {\n\t\t// We'll hit this condition if we can't update our external resource,\n\t\t// for example if our provider credentials don't have access to update\n\t\t// it. If this is the first time we encounter this issue we'll be\n\t\t// requeued implicitly when we update our status with the new error\n\t\t// condition. If not, we want to try again after a short wait.\n\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t}\n\n\tif err := r.managed.PublishConnection(ctx, managed, update.ConnectionDetails); err != nil {\n\t\t// If this is the first time we encounter this issue we'll be requeued\n\t\t// implicitly when we update our status with the new error condition. If\n\t\t// not, we want to try again after a short wait.\n\t\tmanaged.SetConditions(v1alpha1.ReconcileError(err))\n\t\treturn reconcile.Result{RequeueAfter: r.shortWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n\t}\n\n\t// We've successfully updated our external resource. Per the below issue\n\t// nothing will notify us if and when the external resource we manage\n\t// changes, so we requeue a speculative reconcile after a long wait in order\n\t// to observe it and react accordingly.\n\t// https://github.com/crossplaneio/crossplane/issues/289\n\tmanaged.SetConditions(v1alpha1.ReconcileSuccess())\n\treturn reconcile.Result{RequeueAfter: r.longWait}, errors.Wrap(r.client.Status().Update(ctx, managed), errUpdateManagedStatus)\n}", "title": "" }, { "docid": "338488fb786b96a3cbec820721aefe7b", "score": "0.44841844", "text": "func WithManagedIdentityResourceID(serviceBusNamespaceName, managedIdentityResourceID string) ManagementOption {\n\treturn func(l *Listener) error {\n\t\tif serviceBusNamespaceName == \"\" {\n\t\t\treturn errors.New(\"no Service Bus namespace provided\")\n\t\t}\n\t\tns, err := servicebus.NewNamespace(servicebusinternal.NamespaceWithManagedIdentityResourceID(serviceBusNamespaceName, managedIdentityResourceID))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tl.namespace = ns\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "99aa046318358b16f8b6254b425ded5f", "score": "0.44807357", "text": "func (_Incmode *IncmodeFilterer) WatchClaim(opts *bind.WatchOpts, sink chan<- *IncmodeClaim) (event.Subscription, error) {\n\n\tlogs, sub, err := _Incmode.contract.WatchLogs(opts, \"Claim\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(IncmodeClaim)\n\t\t\t\tif err := _Incmode.contract.UnpackLog(event, \"Claim\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "222059a452e434b19f8324b1013549ee", "score": "0.44709137", "text": "func (_Identityclaim *IdentityclaimRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Identityclaim.Contract.IdentityclaimTransactor.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "17799e34afe9a52daae26c9e410e7f37", "score": "0.446864", "text": "func (_ERC20AtomicSwapper *ERC20AtomicSwapperTransactorSession) Claim(_swapID [32]byte, _randomNumber [32]byte) (*types.Transaction, error) {\n\treturn _ERC20AtomicSwapper.Contract.Claim(&_ERC20AtomicSwapper.TransactOpts, _swapID, _randomNumber)\n}", "title": "" }, { "docid": "cd9ce7122dc83b9e61d68c384af0b841", "score": "0.44682807", "text": "func (r ApiCreateApplianceDeviceClaimRequest) ApplianceDeviceClaim(applianceDeviceClaim ApplianceDeviceClaim) ApiCreateApplianceDeviceClaimRequest {\n\tr.applianceDeviceClaim = &applianceDeviceClaim\n\treturn r\n}", "title": "" }, { "docid": "1047853c54943d7f6aa21d9e177ea5bd", "score": "0.44652212", "text": "func (client VirtualMachineClient) PatchResourceSender(req *http.Request) (*http.Response, error) {\n\treturn autorest.SendWithSender(client, req,\n\t\tazure.DoRetryWithRegistration(client.Client))\n}", "title": "" }, { "docid": "464497af154045f597bc83bc620c4581", "score": "0.44592947", "text": "func (_Claimable *ClaimableTransactor) ClaimOwnership(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _Claimable.contract.Transact(opts, \"claimOwnership\")\n}", "title": "" }, { "docid": "4203edf7e82b69891455440361118c3e", "score": "0.44565168", "text": "func (c *Controller) syncHandlerClaim(key string) error {\n\t// Convert the namespace/name string into a distinct namespace and name\n\tnamespace, name, err := cache.SplitMetaNamespaceKey(key)\n\tif err != nil {\n\t\tutilruntime.HandleError(fmt.Errorf(\"invalid resource key: %s\", key))\n\t\treturn nil\n\t}\n\n\t// Get the ResourceQuotaClaims resource with this namespace/name\n\tclaim, err := c.resourceQuotaClaimLister.ResourceQuotaClaims(namespace).Get(name)\n\tif err != nil {\n\t\t// The ResourceQuotaClaims resource may no longer exist, in which case we stop\n\t\t// processing.\n\t\tif errors.IsNotFound(err) {\n\t\t\tutilruntime.HandleError(fmt.Errorf(\"ResourceQuotaClaims '%s' in work queue no longer exists\", key))\n\t\t\treturn nil\n\t\t}\n\n\t\treturn err\n\t}\n\n\t// TODO : Add feature gate\n\t// Get the managed quota\n\t// It there was an error different than not found the error is return\n\t// If it was found it's possible to check scaledown\n\tmanagedQuota, err := c.resourceQuotaLister.ResourceQuotas(claim.Namespace).Get(utils.ResourceQuotaName)\n\tif !errors.IsNotFound(err) && err != nil {\n\t\treturn err\n\t} else if !errors.IsNotFound(err) {\n\t\t// List pod in the claim ns\n\t\tpods, err := c.podsLister.Pods(claim.Namespace).List(utils.DefaultLabelSelector())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// Check is the quota is scaling down\n\t\t// If scaling down checks if the claim is higher than the total amount of request on the NS\n\t\tif isDownscale := isDownscaleQuota(claim, managedQuota); isDownscale {\n\t\t\tif msg := canDownscaleQuota(claim, utils.TotalRequestNS(pods)); msg != utils.EmptyMsg {\n\t\t\t\terr = c.claimPending(claim, msg)\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\t// Gather Nodes and ResourceQuota ResourceList to evaluate if there is enough capacity to accept\n\t// the ResourceQuotaClaim\n\tavailableResources, err := c.nodesTotalCapacity()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Gather ResourceQuotas on the cluster minus the one of the namespace that is being evaluated\n\treservedResources, err := c.totalResourceQuota(claim)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Check that the claim respect the allocation limit\n\t// If it does not the claim is rejected\n\tif msg := c.checkAllocationLimit(claim, availableResources); msg != utils.EmptyMsg {\n\t\terr := c.claimRejected(claim, msg)\n\t\treturn err\n\t}\n\n\t// Check that there are enough resources to fit the claim\n\t// If it does not the claim is rejected\n\tif msg := c.checkResourceFit(claim, availableResources, reservedResources); msg != utils.EmptyMsg {\n\t\terr := c.claimRejected(claim, msg)\n\t\treturn err\n\t}\n\n\t// The claim has passed the verification\n\n\t// The managed quota is updated\n\terr = c.updateResourceQuota(claim)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// The claim is removed\n\terr = c.deleteResourceQuotaClaim(claim)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tutils.ClaimCounter.WithLabelValues(\"success\").Inc()\n\n\t// Everything went well\n\treturn nil\n}", "title": "" }, { "docid": "cafd57f94d047c2aa136d4fd31f648db", "score": "0.44489214", "text": "func acquire(state interface{}) (newResource interface{}, newExpiration time.Time, err error) {\n\ts := state.(acquiringResourceState)\n\ttk, err := s.p.cred.GetToken(s.req.Raw().Context(), shared.TokenRequestOptions{Scopes: s.p.scopes})\n\tif err != nil {\n\t\treturn nil, time.Time{}, err\n\t}\n\treturn tk, tk.ExpiresOn, nil\n}", "title": "" }, { "docid": "3085e07729544b1ec3c0a899b7d6d26d", "score": "0.4447546", "text": "func bindClaimable(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(ClaimableABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}", "title": "" }, { "docid": "20da7d445defae07958782ca2bd6f1ad", "score": "0.44445217", "text": "func (s *Server) UpdateResource(ctx context.Context, req *UpdateResourceRequest, resp *UpdateResourceResponse) {\n\tif req == nil {\n\t\treturn\n\t}\n\n\tif resourceWithConfigure, ok := req.Resource.(resource.ResourceWithConfigure); ok {\n\t\tlogging.FrameworkTrace(ctx, \"Resource implements ResourceWithConfigure\")\n\n\t\tconfigureReq := resource.ConfigureRequest{\n\t\t\tProviderData: s.ResourceConfigureData,\n\t\t}\n\t\tconfigureResp := resource.ConfigureResponse{}\n\n\t\tlogging.FrameworkDebug(ctx, \"Calling provider defined Resource Configure\")\n\t\tresourceWithConfigure.Configure(ctx, configureReq, &configureResp)\n\t\tlogging.FrameworkDebug(ctx, \"Called provider defined Resource Configure\")\n\n\t\tresp.Diagnostics.Append(configureResp.Diagnostics...)\n\n\t\tif resp.Diagnostics.HasError() {\n\t\t\treturn\n\t\t}\n\t}\n\n\tnullSchemaData := tftypes.NewValue(req.ResourceSchema.Type().TerraformType(ctx), nil)\n\n\tupdateReq := resource.UpdateRequest{\n\t\tConfig: tfsdk.Config{\n\t\t\tSchema: req.ResourceSchema,\n\t\t\tRaw: nullSchemaData,\n\t\t},\n\t\tPlan: tfsdk.Plan{\n\t\t\tSchema: req.ResourceSchema,\n\t\t\tRaw: nullSchemaData,\n\t\t},\n\t\tState: tfsdk.State{\n\t\t\tSchema: req.ResourceSchema,\n\t\t\tRaw: nullSchemaData,\n\t\t},\n\t}\n\tupdateResp := resource.UpdateResponse{\n\t\tState: tfsdk.State{\n\t\t\tSchema: req.ResourceSchema,\n\t\t\tRaw: nullSchemaData,\n\t\t},\n\t}\n\n\tif req.Config != nil {\n\t\tupdateReq.Config = *req.Config\n\t}\n\n\tif req.PlannedState != nil {\n\t\tupdateReq.Plan = *req.PlannedState\n\t}\n\n\tif req.PriorState != nil {\n\t\tupdateReq.State = *req.PriorState\n\t\t// Require explicit provider updates for tracking successful updates.\n\t\tupdateResp.State = *req.PriorState\n\t}\n\n\tif req.ProviderMeta != nil {\n\t\tupdateReq.ProviderMeta = *req.ProviderMeta\n\t}\n\n\tprivateProviderData := privatestate.EmptyProviderData(ctx)\n\n\tupdateReq.Private = privateProviderData\n\tupdateResp.Private = privateProviderData\n\n\tif req.PlannedPrivate != nil {\n\t\tif req.PlannedPrivate.Provider != nil {\n\t\t\tupdateReq.Private = req.PlannedPrivate.Provider\n\t\t\tupdateResp.Private = req.PlannedPrivate.Provider\n\t\t}\n\n\t\tresp.Private = req.PlannedPrivate\n\t}\n\n\tlogging.FrameworkDebug(ctx, \"Calling provider defined Resource Update\")\n\treq.Resource.Update(ctx, updateReq, &updateResp)\n\tlogging.FrameworkDebug(ctx, \"Called provider defined Resource Update\")\n\n\tresp.Diagnostics = updateResp.Diagnostics\n\tresp.NewState = &updateResp.State\n\n\tif !resp.Diagnostics.HasError() && updateResp.State.Raw.Equal(nullSchemaData) {\n\t\tresp.Diagnostics.AddError(\n\t\t\t\"Missing Resource State After Update\",\n\t\t\t\"The Terraform Provider unexpectedly returned no resource state after having no errors in the resource update. \"+\n\t\t\t\t\"This is always an issue in the Terraform Provider and should be reported to the provider developers.\",\n\t\t)\n\t}\n\n\tif updateResp.Private != nil {\n\t\tif resp.Private == nil {\n\t\t\tresp.Private = &privatestate.Data{}\n\t\t}\n\n\t\tresp.Private.Provider = updateResp.Private\n\t}\n\n\tif resp.Diagnostics.HasError() {\n\t\treturn\n\t}\n\n\tsemanticEqualityReq := SchemaSemanticEqualityRequest{\n\t\tPriorData: fwschemadata.Data{\n\t\t\tDescription: fwschemadata.DataDescriptionPlan,\n\t\t\tSchema: req.PlannedState.Schema,\n\t\t\tTerraformValue: req.PlannedState.Raw.Copy(),\n\t\t},\n\t\tProposedNewData: fwschemadata.Data{\n\t\t\tDescription: fwschemadata.DataDescriptionState,\n\t\t\tSchema: resp.NewState.Schema,\n\t\t\tTerraformValue: resp.NewState.Raw.Copy(),\n\t\t},\n\t}\n\tsemanticEqualityResp := &SchemaSemanticEqualityResponse{\n\t\tNewData: semanticEqualityReq.ProposedNewData,\n\t}\n\n\tSchemaSemanticEquality(ctx, semanticEqualityReq, semanticEqualityResp)\n\n\tresp.Diagnostics.Append(semanticEqualityResp.Diagnostics...)\n\n\tif resp.Diagnostics.HasError() {\n\t\treturn\n\t}\n\n\tif semanticEqualityResp.NewData.TerraformValue.Equal(resp.NewState.Raw) {\n\t\treturn\n\t}\n\n\tlogging.FrameworkDebug(ctx, \"State updated due to semantic equality\")\n\n\tresp.NewState.Raw = semanticEqualityResp.NewData.TerraformValue\n}", "title": "" }, { "docid": "5a6bad3729febf7364c1171e36f15624", "score": "0.44347194", "text": "func (m *Provider) FixResource(arg0 string, arg1 cty.Value) (cty.Value, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"FixResource\", arg0, arg1)\n\tret0, _ := ret[0].(cty.Value)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "e4c9a734afa89cfe8a8718f5378c2c80", "score": "0.44222498", "text": "func (r *VpSavepointReconciler) updateResource(resource *v1beta1.VpSavepoint, savepoint *appmanagerapi.Savepoint) error {\n\tctx := context.Background()\n\n\tresource.Annotations = annotations.Set(resource.Annotations,\n\t\tannotations.Pair(annotations.ID, savepoint.Metadata.Id),\n\t\tannotations.Pair(annotations.DeploymentID, savepoint.Metadata.DeploymentId),\n\t\tannotations.Pair(annotations.JobID, savepoint.Metadata.JobId))\n\n\tif err := r.Update(ctx, resource); err != nil {\n\t\treturn err\n\t}\n\n\tstate, err := nativeconverters.SavepointStateToNative(savepoint.Status.State)\n\tif err != nil {\n\t\treturn err\n\t}\n\tresource.Status.State = state\n\n\tif err := r.Status().Update(ctx, resource); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "af4a85cb8be2159329e2888ac3bb2daf", "score": "0.4393809", "text": "func bindIdentityclaim(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(IdentityclaimABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil\n}", "title": "" }, { "docid": "99c220916690f18dead0843f656002b0", "score": "0.43927035", "text": "func (c *Controller) claimPending(claim *cagipv1.ResourceQuotaClaim, msg string) (err error) {\n\t// Notify via an event\n\tc.recorder.Event(claim, v1.EventTypeWarning, cagipv1.PhasePending, msg)\n\t// Update ResourceQuotaClaim Status to Rejected Phase\n\t_, err = c.updateResourceQuotaClaimStatus(claim, cagipv1.PhasePending, msg)\n\tutils.ClaimCounter.WithLabelValues(\"pending\").Inc()\n\treturn\n}", "title": "" }, { "docid": "340c0b7fef4cd23dcaba2dd15e684298", "score": "0.43903852", "text": "func (r *ResourceRequestReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {\n\tvar resourceRequest discoveryv1alpha1.ResourceRequest\n\terr := r.Get(ctx, req.NamespacedName, &resourceRequest)\n\tif err != nil {\n\t\tklog.Errorf(\"%s -> unable to get resourceRequest %s: %s\", r.ClusterID, req.NamespacedName, err)\n\t\treturn ctrl.Result{}, nil\n\t}\n\n\tofferErr := r.generateResourceOffer(&resourceRequest)\n\tif offerErr != nil {\n\t\tklog.Errorf(\"%s -> Error generating resourceOffer: %s\", r.ClusterID, offerErr)\n\t\treturn ctrl.Result{}, offerErr\n\t}\n\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "781c2457ae40d9ef04a19a54be27073d", "score": "0.43812194", "text": "func (r *Resource) deploy(resource types.Resource, saveStateF func(types.Resource)) error {\n\tif !r.Confirmed {\n\t\tconfirmed, err := askForConfirmation()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"asking for confirmation: %w\", err)\n\t\t}\n\n\t\tif !confirmed {\n\t\t\tfmt.Println(\"Aborted\")\n\n\t\t\treturn nil\n\t\t}\n\t}\n\n\tdeployErr := resource.Deploy()\n\n\tif r.State == nil {\n\t\tr.State = &ResourceState{}\n\t}\n\n\tsaveStateF(resource)\n\n\treturn r.StateToFile(deployErr)\n}", "title": "" }, { "docid": "ae86c3336e805eea906c01cce65fa3b6", "score": "0.43792623", "text": "func (t *Ticket) Claim(host string) (*Ticket, error) {\n\tstatus, rev, err := t.conn.Get(t.Path.Prefix(\"status\"), nil)\n\tif err != nil {\n\t\treturn t, err\n\t}\n\tif TicketStatus(status) != TicketStatusUnClaimed {\n\t\treturn t, fmt.Errorf(\"ticket status is '%s'\", string(status))\n\t}\n\n\t_, err = t.conn.Set(t.Path.Prefix(\"status\"), rev, []byte(TicketStatusClaimed))\n\tif err != nil {\n\t\treturn t, err\n\t}\n\tt.Status = TicketStatusClaimed\n\n\trev, err = t.conn.Set(t.claimPath(host), rev, []byte(time.Now().UTC().String()))\n\tif err != nil {\n\t\treturn t, err\n\t}\n\n\treturn t.FastForward(rev), err\n}", "title": "" }, { "docid": "378a44e0933920052f74a52cc5f3abe6", "score": "0.43759334", "text": "func (r ApiUpdateApplianceDeviceClaimRequest) ApplianceDeviceClaim(applianceDeviceClaim ApplianceDeviceClaim) ApiUpdateApplianceDeviceClaimRequest {\n\tr.applianceDeviceClaim = &applianceDeviceClaim\n\treturn r\n}", "title": "" }, { "docid": "00a2f9641092c5ff472b0fa9353cd9bc", "score": "0.4370091", "text": "func (d *ResourceDetector) Reconcile(key util.QueueKey) error {\n\tclusterWideKey, ok := key.(keys.ClusterWideKey)\n\tif !ok {\n\t\tklog.Error(\"Invalid key\")\n\t\treturn fmt.Errorf(\"invalid key\")\n\t}\n\tklog.Infof(\"Reconciling object: %s\", clusterWideKey)\n\n\tobject, err := d.GetUnstructuredObject(clusterWideKey)\n\tif err != nil {\n\t\tif apierrors.IsNotFound(err) {\n\t\t\t// The resource may no longer exist, in which case we try (may not exist in waiting list) remove it from waiting list and stop processing.\n\t\t\td.RemoveWaiting(clusterWideKey)\n\n\t\t\t// Once resource be deleted, the derived ResourceBinding or ClusterResourceBinding also need to be cleaned up,\n\t\t\t// currently we do that by setting owner reference to derived objects.\n\t\t\treturn nil\n\t\t}\n\t\tklog.Errorf(\"Failed to get unstructured object(%s), error: %v\", clusterWideKey, err)\n\t\treturn err\n\t}\n\n\treturn d.propagateResource(object, clusterWideKey)\n}", "title": "" }, { "docid": "73836a4fa50b32ffc0f1069e524f5f8f", "score": "0.4361135", "text": "func SetContextFromResource(ctx context.Context, r MultitenantResource) context.Context {\n\tctx = context.WithValue(ctx, EnvironmentKey, r.GetEnvironment())\n\tctx = context.WithValue(ctx, OrganizationKey, r.GetOrganization())\n\treturn ctx\n}", "title": "" }, { "docid": "79bdf89a5309187c83c39a7a8eef156f", "score": "0.43397036", "text": "func resourceRead(d *schema.ResourceData, m interface{}) error{\n\treturn nil\n}", "title": "" }, { "docid": "b06146c0e294f153d7ad62d2f78894a3", "score": "0.43348077", "text": "func (_Bancor *BancorSession) ClaimAndConvertFor(_path []common.Address, _amount *big.Int, _minReturn *big.Int, _beneficiary common.Address) (*types.Transaction, error) {\n\treturn _Bancor.Contract.ClaimAndConvertFor(&_Bancor.TransactOpts, _path, _amount, _minReturn, _beneficiary)\n}", "title": "" }, { "docid": "e7118e5e821e33e659a4c4fa1c9f0207", "score": "0.43321925", "text": "func updateResourceToMatch(newResource pcommon.Resource, originResource pcommon.Resource, requiredAttributes pcommon.Map) {\n\n\toriginResource.CopyTo(newResource)\n\n\t// This prioritizes required attributes over the original resource attributes, if they overlap\n\tattrs := newResource.Attributes()\n\trequiredAttributes.Range(func(k string, v pcommon.Value) bool {\n\t\tv.CopyTo(attrs.PutEmpty(k))\n\t\treturn true\n\t})\n\n}", "title": "" }, { "docid": "5613e772faaf93cf06eb25258a147f54", "score": "0.43237716", "text": "func (c *TriggerController) syncResource(key string) error {\n\tif glog.V(4) {\n\t\tstartTime := time.Now()\n\t\tglog.Infof(\"Started syncing resource %q\", key)\n\t\tdefer func() {\n\t\t\tglog.Infof(\"Finished syncing resource %q (%v)\", key, time.Since(startTime))\n\t\t}()\n\t}\n\n\tparts := strings.SplitN(key, \"/\", 2)\n\tsource := c.triggerSources[parts[0]]\n\tobj, exists, err := source.Store.GetByKey(parts[1])\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to retrieve %s %s from store: %v\", parts[0], parts[1], err)\n\t}\n\tif !exists {\n\t\treturn nil\n\t}\n\n\treturn source.Reactor.ImageChanged(obj.(runtime.Object), c.tagRetriever)\n}", "title": "" }, { "docid": "0ffbfffada791d98d4eccea3989541b3", "score": "0.43194652", "text": "func (r *DBaaSProviderReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {\n\tlogger := ctrl.LoggerFrom(ctx, \"DBaaS Provider\", req.NamespacedName)\n\n\tvar provider v1alpha1.DBaaSProvider\n\tif err := r.Get(ctx, req.NamespacedName, &provider); err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// CR deleted since request queued, child objects getting GC'd, no requeue\n\t\t\tlogger.Info(\"DBaaS Provider resource not found, has been deleted\")\n\t\t\treturn ctrl.Result{}, nil\n\t\t}\n\t\tlogger.Error(err, \"Error fetching DBaaS Provider for reconcile\")\n\t\treturn ctrl.Result{}, err\n\t}\n\n\tif err := r.watchDBaaSProviderObject(r.InventoryCtrl, &v1alpha1.DBaaSInventory{}, provider.Spec.InventoryKind); err != nil {\n\t\tlogger.Error(err, \"Error watching Provider Inventory CR\")\n\t\treturn ctrl.Result{}, err\n\t}\n\tlogger.Info(\"Watching Provider Inventory CR\")\n\n\tif err := r.watchDBaaSProviderObject(r.ConnectionCtrl, &v1alpha1.DBaaSConnection{}, provider.Spec.ConnectionKind); err != nil {\n\t\tlogger.Error(err, \"Error watching Provider Connection CR\")\n\t\treturn ctrl.Result{}, err\n\t}\n\tlogger.Info(\"Watching Provider Connection CR\")\n\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "ca9573fc5f986eed9217940f78e86c98", "score": "0.4315711", "text": "func (r *Resource) EnsureCreated(ctx context.Context, obj interface{}) error {\n\tcr, err := key.ToCustomResource(obj)\n\tif err != nil {\n\t\treturn microerror.Mask(err)\n\t}\n\n\tvar newState state.State\n\tvar currentState state.State\n\t{\n\t\ts, err := r.GetResourceStatus(ctx, cr, Stage)\n\t\tif err != nil {\n\t\t\treturn microerror.Mask(err)\n\t\t}\n\t\tcurrentState = state.State(s)\n\n\t\tr.Logger.Debugf(ctx, \"current state: %s\", currentState)\n\t\tnewState, err = r.StateMachine.Execute(ctx, obj, currentState)\n\t\tif err != nil {\n\t\t\treturn microerror.Mask(err)\n\t\t}\n\t}\n\n\tif newState != currentState {\n\t\tr.Logger.Debugf(ctx, \"new state: %s\", newState)\n\t\tr.Logger.Debugf(ctx, \"setting resource status to '%s/%s'\", Stage, newState)\n\t\terr = r.SetResourceStatus(ctx, cr, Stage, string(newState))\n\t\tif err != nil {\n\t\t\treturn microerror.Mask(err)\n\t\t}\n\t\tr.Logger.Debugf(ctx, \"set resource status to '%s/%s'\", Stage, newState)\n\t\tr.Logger.Debugf(ctx, \"canceling reconciliation\")\n\t} else {\n\t\tr.Logger.Debugf(ctx, \"no state change\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "b7f7121fd16eb56bc6f3a0ad53f55e3c", "score": "0.42973015", "text": "func (s *QSvc) Claim(ctx context.Context, req *pb.ClaimRequest) (*pb.ClaimResponse, error) {\n\tif err := s.Authorize(ctx, s.claimAuthz(ctx, req)); err != nil {\n\t\treturn nil, err // don't wrap, has status codes\n\t}\n\n\tclaimant, err := uuid.Parse(req.ClaimantId)\n\tif err != nil {\n\t\treturn nil, codeErrorf(codes.InvalidArgument, \"failed to parse claimant ID: %w\", err)\n\t}\n\tduration := time.Duration(req.DurationMs) * time.Millisecond\n\tpollTime := time.Duration(0)\n\tif req.PollMs > 0 {\n\t\tpollTime = time.Duration(req.PollMs) * time.Millisecond\n\t}\n\n\ttask, err := s.impl.Claim(ctx,\n\t\tentroq.From(req.Queues...),\n\t\tentroq.ClaimFor(duration),\n\t\tentroq.ClaimAs(claimant),\n\t\tentroq.ClaimPollTime(pollTime))\n\tif err != nil {\n\t\treturn nil, autoCodeErrorf(\"qsvc claim: %w\", err)\n\t}\n\tif task == nil {\n\t\treturn new(pb.ClaimResponse), nil\n\t}\n\treturn &pb.ClaimResponse{Task: protoFromTask(task)}, nil\n}", "title": "" }, { "docid": "9045f5e2d35e40c7212694d2ef8b8ef0", "score": "0.42960015", "text": "func (_Identityclaim *IdentityclaimCallerSession) Claim() ([32]byte, error) {\n\treturn _Identityclaim.Contract.Claim(&_Identityclaim.CallOpts)\n}", "title": "" }, { "docid": "c28aa5067f300232708a6312d532cec4", "score": "0.4292254", "text": "func CreateAndReadResource(name string, resource *schema.Resource, d *schema.ResourceData, m interface{}) error {\n\n\tobj := make(map[string]interface{})\n\tattrs := GetAttrs(resource)\n\tfor _, attr := range attrs {\n\t\tkey := attr.Name\n\t\tlog.Println(\"Found attribute: \", key)\n\t\tif v, ok := d.GetOk(key); ok {\n\t\t\tattr.Value = v\n\t\t\tobj[key] = GetValue(attr)\n\t\t}\n\t}\n\n\tparams := m.(map[string]interface{})\n\tclient := params[\"ibxClient\"].(*skyinfoblox.Client)\n\n\tlog.Printf(\"Going to create an %s object: %+v\", name, obj)\n\tcreatedObj, err := client.CreateAndRead(name, obj)\n\tif err != nil {\n\t\td.SetId(\"\")\n\t\treturn err\n\t}\n\n\td.SetId(createdObj[\"_ref\"].(string))\n\tdelete(createdObj, \"_ref\")\n\tfor key := range createdObj {\n\t\tif isScalar(createdObj[key]) == true {\n\t\t\tlog.Printf(\"Setting key %s to %+v\\n\", key, createdObj[key])\n\t\t\td.Set(key, createdObj[key])\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "90c477bfbd5003693503b808c9497027", "score": "0.42917028", "text": "func (m *Trending) SetResource(value Entityable)() {\n m.resource = value\n}", "title": "" }, { "docid": "6c99933eae6c7516e55c73ba5c78b1d5", "score": "0.42910823", "text": "func (m *UnstructClaimManager) adopt(attachment *unstructured.Unstructured) error {\n\tif err := m.CanAdopt(); err != nil {\n\t\treturn errors.Wrapf(\n\t\t\terr,\n\t\t\t\"%s: Failed to adopt child %s/%s (%v)\",\n\t\t\tm,\n\t\t\tattachment.GetNamespace(),\n\t\t\tattachment.GetName(),\n\t\t\tattachment.GetUID(),\n\t\t)\n\t}\n\tglog.Infof(\n\t\t\"%s %s/%s: adopting %s/%s (%v)\",\n\t\tm,\n\t\tm.Watched.GetNamespace(),\n\t\tm.Watched.GetName(),\n\t\tattachment.GetNamespace(),\n\t\tattachment.GetName(),\n\t\tattachment.GetUID(),\n\t)\n\tcontrollerRef := metav1.OwnerReference{\n\t\tAPIVersion: m.WatchedKind.GroupVersion().String(),\n\t\tKind: m.WatchedKind.Kind,\n\t\tName: m.Watched.GetName(),\n\t\tUID: m.Watched.GetUID(),\n\t\tController: k8s.BoolPtr(true),\n\t\tBlockOwnerDeletion: k8s.BoolPtr(true),\n\t}\n\treturn atomicUpdate(m.attachmentClient, attachment, func(obj *unstructured.Unstructured) bool {\n\t\townerRefs := addOwnerReference(obj.GetOwnerReferences(), controllerRef)\n\t\tobj.SetOwnerReferences(ownerRefs)\n\t\treturn true\n\t})\n}", "title": "" }, { "docid": "1732d25e49c98ff4f22362afdd95d004", "score": "0.4290013", "text": "func (r *CrunchyBridgeConnectionReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {\n\tlogger := log.FromContext(ctx, \"CrunchyBridgeConnection\", req.NamespacedName)\n\n\tvar connection dbaasredhatcomv1alpha1.CrunchyBridgeConnection\n\terr := r.Get(ctx, req.NamespacedName, &connection)\n\tif err != nil {\n\t\tif apierrors.IsNotFound(err) {\n\t\t\t// CR deleted since request queued, child objects getting GC'd, no requeue\n\t\t\tlogger.Info(\"CrunchyBridgeConnection resource not found, has been deleted\")\n\t\t\treturn ctrl.Result{}, nil\n\t\t}\n\t\tlogger.Error(err, \"Error fetching CrunchyBridgeConnection for reconcile\")\n\t\treturn ctrl.Result{}, err\n\t}\n\tinventory := dbaasredhatcomv1alpha1.CrunchyBridgeInventory{}\n\tif err := r.Get(ctx, types.NamespacedName{Namespace: connection.Spec.InventoryRef.Namespace, Name: connection.Spec.InventoryRef.Name}, &inventory); err != nil {\n\t\tif apierrors.IsNotFound(err) {\n\t\t\t// CR deleted since request queued, child objects getting GC'd, no requeue\n\t\t\tlogger.Info(\"inventory resource not found, has been deleted\")\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\t\tlogger.Error(err, \"Error fetching CrunchyBridgeConnection for reconcile\")\n\t\treturn ctrl.Result{}, err\n\t}\n\n\tinstance, err := getInstance(&inventory, connection.Spec.InstanceID)\n\tif instance == nil {\n\t\tstatusErr := r.updateStatus(ctx, connection, metav1.ConditionFalse, NotFound, err.Error())\n\t\tif statusErr != nil {\n\t\t\tlogger.Error(statusErr, \"Error in updating CrunchyBridgeConnection status\")\n\t\t\treturn ctrl.Result{Requeue: true}, statusErr\n\t\t}\n\t\treturn ctrl.Result{}, err\n\t}\n\tbridgeapiClient, err := setupClient(r.Client, inventory, r.APIBaseURL, logger)\n\tif err != nil {\n\t\tstatusErr := r.updateStatus(ctx, connection, metav1.ConditionFalse, BackendError, err.Error())\n\t\tif statusErr != nil {\n\t\t\tlogger.Error(statusErr, \"Error in updating CrunchyBridgeConnection status\")\n\t\t\treturn ctrl.Result{Requeue: true}, statusErr\n\t\t}\n\t\tlogger.Error(err, \"Error while setting up CrunchyBridge Client\")\n\t\treturn ctrl.Result{}, err\n\t}\n\n\tlogger.Info(\"Crunchy Bridge Client Configured \")\n\terr = r.connectionDetails(instance.InstanceID, &connection, bridgeapiClient, req, logger)\n\tif err != nil {\n\t\tstatusErr := r.updateStatus(ctx, connection, metav1.ConditionFalse, BackendError, err.Error())\n\t\tif statusErr != nil {\n\t\t\tlogger.Error(statusErr, \"Error in updating CrunchyBridgeConnection status\")\n\t\t\treturn ctrl.Result{Requeue: true}, statusErr\n\t\t}\n\t\tlogger.Error(err, \"Error while getting connection details\")\n\t\treturn ctrl.Result{}, err\n\t}\n\tstatusErr := r.updateStatus(ctx, connection, metav1.ConditionTrue, Ready, SuccessConnection)\n\tif statusErr != nil {\n\t\tlogger.Error(statusErr, \"Error in updating CrunchyBridgeInventory status\")\n\t\treturn ctrl.Result{Requeue: true}, statusErr\n\t}\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "d64ec0ab3d8d7edc6f001254671bd9e2", "score": "0.4285644", "text": "func (_Claimable *ClaimableRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _Claimable.Contract.ClaimableTransactor.contract.Transfer(opts)\n}", "title": "" }, { "docid": "40cefdfc96b9210eb75c7b820cc18036", "score": "0.42854097", "text": "func (_Tellor *TellorTransactor) ClaimOwnership(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _Tellor.contract.Transact(opts, \"claimOwnership\")\n}", "title": "" }, { "docid": "85cdef76f5f65de86641f51931548d6c", "score": "0.42831427", "text": "func UpdateAndReadResource(resource *schema.Resource, d *schema.ResourceData, m interface{}) error {\n\n\tneedsUpdate := false\n\n\tparams := m.(map[string]interface{})\n\tclient := params[\"ibxClient\"].(*skyinfoblox.Client)\n\n\tref := d.Id()\n\tobj := make(map[string]interface{})\n\n\tattrs := GetAttrs(resource)\n\tfor _, attr := range attrs {\n\t\tkey := attr.Name\n\t\tif d.HasChange(key) {\n\t\t\tattr.Value = d.Get(key)\n\t\t\tobj[key] = GetValue(attr)\n\t\t\tlog.Printf(\"Updating field %s, value: %+v\\n\", key, obj[key])\n\t\t\tneedsUpdate = true\n\t\t}\n\t}\n\n\tlog.Printf(\"UPDATE: going to update reference %s with obj: \\n%+v\\n\", ref, obj)\n\n\tif needsUpdate {\n\t\tnewObject, err := client.UpdateAndRead(ref, obj)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\td.SetId(newObject[\"_ref\"].(string))\n\t\tdelete(newObject, \"_ref\")\n\t\tfor key := range newObject {\n\t\t\tif isScalar(newObject[key]) == true {\n\t\t\t\tlog.Printf(\"Updating key %s to %+v\\n\", key, newObject[key])\n\t\t\t\td.Set(key, newObject[key])\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "78ae2b2c669ab8c5447021c6a1b1efba", "score": "0.42826805", "text": "func (c *NitroClient) BindResource(bindToResourceType string, bindToResourceName string, bindingResourceType string, bindingResourceName string, bindingStruct interface{}) error {\n\tif !c.ResourceExists(bindToResourceType, bindToResourceName) {\n\t\treturn fmt.Errorf(\"[ERROR] nitro-go: BindTo Resource %s of type %s does not exist\", bindToResourceType, bindToResourceName)\n\t}\n\n\tif !c.ResourceExists(bindingResourceType, bindingResourceName) {\n\t\treturn fmt.Errorf(\"[ERROR] nitro-go: Binding Resource %s of type %s does not exist\", bindingResourceType, bindingResourceName)\n\t}\n\tbindingName := bindToResourceType + \"_\" + bindingResourceType + \"_binding\"\n\tnsBinding := make(map[string]interface{})\n\tnsBinding[bindingName] = bindingStruct\n\n\tresourceJSON, _ := JSONMarshal(nsBinding)\n\n\tbody, err := c.createResource(bindingName, resourceJSON)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[ERROR] nitro-go: Failed to bind resource %s to resource %s, err=%s\", bindToResourceName, bindingResourceName, err)\n\t}\n\t_ = body\n\treturn nil\n}", "title": "" }, { "docid": "d2cd8a6ee0db2fb85df76b21eb52c9e7", "score": "0.42791957", "text": "func (r ApiPatchApplianceDeviceClaimRequest) ApplianceDeviceClaim(applianceDeviceClaim ApplianceDeviceClaim) ApiPatchApplianceDeviceClaimRequest {\n\tr.applianceDeviceClaim = &applianceDeviceClaim\n\treturn r\n}", "title": "" }, { "docid": "63642b52c0dd60e131ac624f142c752a", "score": "0.4274963", "text": "func (_Incmode *IncmodeTransactorSession) Claim() (*types.Transaction, error) {\n\treturn _Incmode.Contract.Claim(&_Incmode.TransactOpts)\n}", "title": "" }, { "docid": "e6a70f501fb88da4d60be2e9ddae77a9", "score": "0.4274033", "text": "func (_Identityclaim *IdentityclaimTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {\n\treturn _Identityclaim.Contract.contract.Transact(opts, method, params...)\n}", "title": "" }, { "docid": "a041070bcb61c8ce2068c18f303a8035", "score": "0.42705628", "text": "func (rm *RsrcMgr) RequestResource(req *rproto.ResourceRequest) (*rproto.ResourceConsumer, error) {\n\t// find the resource list from type\n\trlist, ok := rm.RsrcDB[req.ResourceType]\n\tif !ok {\n\t\tlog.Errorf(\"No providers for resource type %s\", req.ResourceType)\n\t\treturn nil, errors.New(\"No providers for resource type\")\n\t}\n\n\t// read lock the rlist\n\trlist.Lock()\n\tdefer rlist.Unlock()\n\n\t// apply constraints and make a short list of providers\n\tmatchedProviders, err := rm.applyConstraints(rlist, req)\n\tif err != nil {\n\t\tlog.Errorf(\"Error applying constraints from req{%+v}. Err: %v\", req, err)\n\t\treturn nil, err\n\t}\n\n\tlog.Debugf(\"Providers %+v matched constraints for req: %+v\", matchedProviders, req)\n\n\t// find the scheduling algorithm to run\n\tschedFunc := rm.schedulers[req.Scheduler]\n\n\t// run scheduling algorithm on matched providers\n\tprovider, err := schedFunc(req, matchedProviders)\n\tif err != nil {\n\t\tlog.Errorf(\"Scheduler %s returned error: %v\", req.Scheduler, err)\n\t\treturn nil, err\n\t}\n\n\t// consume resource from the provider(i.e, atomically allocate)\n\tconsumer, err := provider.consumeRsrc(req)\n\tif err != nil {\n\t\tlog.Errorf(\"Failed to consume resource from provider: %s. Err: %v\", provider.ProviderID, err)\n\t\treturn nil, err\n\t}\n\n\treturn consumer, nil\n\n}", "title": "" }, { "docid": "1d1ce6a5191bfc4e6c55081db01f4e9a", "score": "0.4267801", "text": "func (r *DatabaseReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {\n\t_ = log.FromContext(ctx)\n\tlogger := log.Log\n\n\tdb := &actionsv1alpha1.Database{}\n\terr := r.Get(ctx, req.NamespacedName, db)\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\tlogger.Info(\"Database resource not found. Ignoring since object must be deleted\")\n\t\t\treturn ctrl.Result{}, client.IgnoreNotFound(err)\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\tlogger.Error(err, \"failed to get Database\")\n\t\treturn ctrl.Result{}, err\n\t}\n\n\t/*******************************************************************************************************************\n\t* Quering the defined secret for the database connection\n\t*******************************************************************************************************************/\n\tmi, err := ms.QuerySQLManagedInstance(ctx, db.Namespace, db.Spec.SQLManagedInstance)\n\tif err != nil {\n\t\treturn ctrl.Result{}, err\n\t}\n\tlogger.V(1).Info(\"successfully found the managed instance\", \"managed-instance-name\", mi.Metadata.Name)\n\tif mi.Status.State != \"Ready\" {\n\t\tmeta.SetStatusCondition(&db.Status.Conditions, *db.ErroredCondition())\n\t\tr.updateDatabaseStatus(db, \"Error\")\n\t\treturn ctrl.Result{}, fmt.Errorf(\"the sql managed instance is not in a `Ready` state, current status is: %v\", mi.Status)\n\t}\n\tsec := &corev1.Secret{}\n\n\terr = r.Client.Get(context.TODO(), types.NamespacedName{Name: mi.Spec.LoginRef.Name, Namespace: mi.Spec.LoginRef.Namespace}, sec)\n\tif err != nil {\n\t\tlogger.Error(err, \"secrets credentials resource not found\", \"secret-name\", mi.Spec.LoginRef.Name)\n\t\treturn ctrl.Result{}, err\n\t}\n\n\tusername := sec.Data[\"username\"]\n\tpassword := sec.Data[\"password\"]\n\t/******************************************************************************************************************/\n\n\t// This is the creating a MSSql Server `Provider`\n\tmsSQL := ms.NewMSSql(db.Spec.Server, string(username), string(password), db.Spec.Port)\n\n\t// Let's look at the status here first\n\n\t/*******************************************************************************************************************\n\t* Finalizer to check what to do if we're deleting the resource\n\t*******************************************************************************************************************/\n\tif db.ObjectMeta.DeletionTimestamp.IsZero() {\n\t\t// Add finalizer for this CR\n\t\tif !controllerutil.ContainsFinalizer(db, databaseFinalizer) {\n\t\t\tcontrollerutil.AddFinalizer(db, databaseFinalizer)\n\t\t\terr = r.Update(ctx, db)\n\t\t\tif err != nil {\n\t\t\t\treturn ctrl.Result{}, err\n\t\t\t}\n\t\t}\n\t} else {\n\t\tif controllerutil.ContainsFinalizer(db, databaseFinalizer) {\n\t\t\tif err = r.finalizeDatabase(ctx, logger, db, msSQL); err != nil {\n\t\t\t\treturn ctrl.Result{}, err\n\t\t\t}\n\t\t}\n\t\tcontrollerutil.RemoveFinalizer(db, databaseFinalizer)\n\t\tif err := r.Update(ctx, db); err != nil {\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\n\t\t// Stop reconciliation as the item is being deleted\n\t\treturn ctrl.Result{}, nil\n\t}\n\t/******************************************************************************************************************/\n\n\tvar dbName *string\n\n\tsetID := db.Annotations[\"mssql/db_id\"]\n\tif setID != \"\" {\n\t\tvalId, _ := strconv.Atoi(setID)\n\t\tdbName, err = msSQL.FindDatabaseName(ctx, valId)\n\n\t\t// This means that the database has probably been deleted outside of the CRD\n\t\tif err != nil {\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\t\tif dbName == nil {\n\t\t\treturn ctrl.Result{}, fmt.Errorf(\"database not found, has it been deleted?\")\n\t\t}\n\n\t\tmeta.SetStatusCondition(&db.Status.Conditions, *db.UpdatingCondition())\n\n\t\terr := msSQL.AlterDatabase(ctx, db)\n\t\tif err != nil {\n\t\t\tmeta.SetStatusCondition(&db.Status.Conditions, *db.ErroredCondition())\n\t\t\tr.updateDatabaseStatus(db, \"Error\")\n\t\t\tlogger.Info(\"failed to alter the database\", \"name\", err.Error())\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\n\t\tmeta.SetStatusCondition(&db.Status.Conditions, *db.UpdatedCondition())\n\t} else {\n\t\tvar dbID string\n\t\tmeta.SetStatusCondition(&db.Status.Conditions, *db.CreatingCondition())\n\t\tid, err := msSQL.CreateDatabase(ctx, db)\n\t\tif err != nil {\n\t\t\tmeta.SetStatusCondition(&db.Status.Conditions, *db.ErroredCondition())\n\t\t\tr.updateDatabaseStatus(db, \"Error\")\n\t\t\tlogger.Info(\"failed to create the database\", \"name\", err.Error())\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\t\tif id != nil {\n\t\t\tdbID = *id\n\t\t} else {\n\t\t\treturn ctrl.Result{}, fmt.Errorf(\"failed to return the database id for name: %s\", db.Spec.Name)\n\t\t}\n\t\tpw := AnnotationPatch{\n\t\t\tLogger: logger,\n\t\t\tDatabaseID: dbID,\n\t\t}\n\t\terr = r.Patch(ctx, db, pw)\n\t\tif err != nil {\n\t\t\tlogger.Info(\"failed to patch the database with the database id\", \"error\", err.Error())\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\t\tmeta.SetStatusCondition(&db.Status.Conditions, *db.CreatedCondition())\n\t}\n\n\tr.updateDatabaseStatus(db, \"Created\")\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "507939c8e44db7df4921c285e8ca0e26", "score": "0.4266604", "text": "func (_Bancor *BancorTransactorSession) ClaimAndConvertFor(_path []common.Address, _amount *big.Int, _minReturn *big.Int, _beneficiary common.Address) (*types.Transaction, error) {\n\treturn _Bancor.Contract.ClaimAndConvertFor(&_Bancor.TransactOpts, _path, _amount, _minReturn, _beneficiary)\n}", "title": "" }, { "docid": "52fd08053c804c03523de0b430c26d1b", "score": "0.42662388", "text": "func (m *ExpressRouteOtherSubscriptionConnection) Redact(ctx context.Context) error {\n\t// clear fields with confidential option set (at message or field level)\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif err := m.GetAuthorizedKey().Redact(ctx); err != nil {\n\t\treturn errors.Wrapf(err, \"Redacting ExpressRouteOtherSubscriptionConnection.authorized_key\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "ed753d2978b3981acbbc091ee26845a8", "score": "0.42594177", "text": "func (_Identityclaim *IdentityclaimRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _Identityclaim.Contract.IdentityclaimTransactor.contract.Transfer(opts)\n}", "title": "" }, { "docid": "2a27ebeef13e55f56b2ecf67abe1f0ea", "score": "0.4258865", "text": "func ProphecyClaimToSignedOracleClaim(event types.ProphecyClaimEvent, key *ecdsa.PrivateKey) (OracleClaim, error) {\n\toracleClaim := OracleClaim{}\n\n\t// Generate a hashed claim message which contains ProphecyClaim's data\n\tfmt.Println(\"Generating unique message for ProphecyClaim\", event.ProphecyID)\n\tmessage := GenerateClaimMessage(event)\n\n\t// Sign the message using the validator's private key\n\tfmt.Println(\"Signing message...\")\n\tsignature, err := SignClaim(PrefixMsg(message), key)\n\tif err != nil {\n\t\treturn oracleClaim, err\n\t}\n\tfmt.Println(\"Signature generated:\", hexutil.Encode(signature))\n\n\toracleClaim.ProphecyID = event.ProphecyID\n\tvar message32 [32]byte\n\tcopy(message32[:], message)\n\toracleClaim.Message = message32\n\toracleClaim.Signature = signature\n\treturn oracleClaim, nil\n}", "title": "" }, { "docid": "695350b5ea910b9fd1a74278087ce661", "score": "0.42547804", "text": "func (s *resourceServiceImpl) Register(ctx context.Context, resourceTypeName string, resourceID, parentResourceID *string, identityID *uuid.UUID) (*resource.Resource, error) {\n\n\tvar res *resource.Resource\n\n\terr := s.ExecuteInTransaction(func() error {\n\n\t\t// Lookup the resource type\n\t\tresourceType, err := s.Repositories().ResourceTypeRepository().Lookup(ctx, resourceTypeName)\n\t\tif err != nil {\n\t\t\treturn errors.NewBadParameterErrorFromString(\"type\", resourceTypeName, err.Error())\n\t\t}\n\n\t\t// Lookup the parent resource if it's specified\n\t\tvar parentResource *resource.Resource\n\n\t\tif parentResourceID != nil {\n\t\t\tparentResource, err = s.Repositories().ResourceRepository().Load(ctx, *parentResourceID)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.NewBadParameterErrorFromString(\"parent resource ID\", *parentResourceID, err.Error())\n\t\t\t}\n\t\t}\n\n\t\tvar rID string\n\t\tif resourceID != nil {\n\t\t\trID = *resourceID\n\t\t} else {\n\t\t\trID = uuid.NewV4().String()\n\t\t}\n\n\t\t// Create a new resource instance\n\t\tres = &resource.Resource{\n\t\t\tResourceID: rID,\n\t\t\tParentResourceID: parentResourceID,\n\t\t\tResourceType: *resourceType,\n\t\t\tResourceTypeID: resourceType.ResourceTypeID,\n\t\t\tParentResource: parentResource,\n\t\t}\n\n\t\t// Persist the resource\n\t\terr = s.Repositories().ResourceRepository().Create(ctx, res)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Search for any default role mappings for the resource type\n\t\tdefaultRoleMappings, err := s.Repositories().DefaultRoleMappingRepository().FindForResourceType(ctx, resourceType.ResourceTypeID)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// For each default role mapping for the same resource type, create a role mapping for the resource\n\t\tfor _, m := range defaultRoleMappings {\n\t\t\troleMapping := &repository.RoleMapping{\n\t\t\t\tResourceID: rID,\n\t\t\t\tFromRoleID: m.FromRoleID,\n\t\t\t\tToRoleID: m.ToRoleID,\n\t\t\t}\n\n\t\t\terr = s.Repositories().RoleMappingRepository().Create(ctx, roleMapping)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\tif identityID != nil {\n\t\t\terr = s.Repositories().Identities().CheckExists(ctx, (*identityID).String())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif resourceType.DefaultRoleID != nil {\n\t\t\t\tdefaultRole, err := s.Repositories().RoleRepository().Load(ctx, *resourceType.DefaultRoleID)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tif defaultRole != nil {\n\t\t\t\t\terr = s.Services().RoleManagementService().ForceAssign(ctx, *identityID, defaultRole.Name, *res)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n\n\treturn res, err\n}", "title": "" }, { "docid": "727b9a2a213dea4702bfa478bcfde4d3", "score": "0.42496666", "text": "func (p *Provider) ReadResource(req providers.ReadResourceRequest) providers.ReadResourceResponse {\n\tlog.Print(\"[TRACE] moduletest.Provider: ReadResource\")\n\n\tvar res providers.ReadResourceResponse\n\tif req.TypeName != \"test_assertions\" { // we only have one resource type\n\t\tres.Diagnostics = res.Diagnostics.Append(fmt.Errorf(\"unsupported resource type %s\", req.TypeName))\n\t\treturn res\n\t}\n\t// Test assertions are not a real remote object, so there isn't actually\n\t// anything to refresh here.\n\tres.NewState = req.PriorState\n\treturn res\n}", "title": "" }, { "docid": "eae6a0c13e8fa5110a50ebd2a9e378eb", "score": "0.42468357", "text": "func (cs *CredentialSpecResource) SetDesiredStatus(status resourcestatus.ResourceStatus) {\n}", "title": "" }, { "docid": "5a4b68473293e194f3f5a9c24df2560c", "score": "0.4243708", "text": "func MarshalResourceReview(object *ResourceReview, writer io.Writer) error {\n\tstream := helpers.NewStream(writer)\n\twriteResourceReview(object, stream)\n\tstream.Flush()\n\treturn stream.Error\n}", "title": "" }, { "docid": "e54e791df31554664e5a931c4fe00f77", "score": "0.42428964", "text": "func ResourceAndAuthenticity(c router.Context, r ResourceModel) error {\n\n\t// Check the authenticity token first\n\terr := AuthenticityToken(c)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Now authorise the resource as normal\n\treturn Resource(c, r)\n}", "title": "" }, { "docid": "e2630ccdda25d346978ba537c36399a9", "score": "0.4241868", "text": "func (_Bancor *BancorTransactor) ClaimAndConvert(opts *bind.TransactOpts, _path []common.Address, _amount *big.Int, _minReturn *big.Int) (*types.Transaction, error) {\n\treturn _Bancor.contract.Transact(opts, \"claimAndConvert\", _path, _amount, _minReturn)\n}", "title": "" }, { "docid": "9396a65247f76568ddd50f1cd1dcf177", "score": "0.42414877", "text": "func (in *ResourceClaim) DeepCopy() *ResourceClaim {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ResourceClaim)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "title": "" }, { "docid": "d1afde2e79198b667bfac5fb8e5382e3", "score": "0.42396998", "text": "func (ao *assembledOrg) registerResource(src resourceHandler, dst *referenceYAML) error {\n\tif _, err := ao.resourceMap.getInit(src.resId(), src); err != nil {\n\t\treturn err\n\t}\n\n\tif dst == nil { // no outgoing reference from src\n\t\treturn nil\n\t}\n\n\t// initialize a resource on the resource map, pending future definition on YAML\n\tvar dstYAML resourceHandler\n\tif dst.TargetType() == Organization {\n\t\tif ao.org.Spec.Id == \"\" {\n\t\t\tao.org.Spec.Id = dst.TargetId\n\t\t} else if ao.org.Spec.Id != dst.TargetId {\n\t\t\tlog.Printf(\"fatal: org is identified as %s, cannot remap to %s\\n\", ao.org.Spec.Id, dst.TargetId)\n\t\t\treturn errConflictDefinition\n\t\t}\n\t\tdstYAML = &ao.org // allow future resolution to pick up finalized org directly\n\t}\n\n\tdstRes, err := ao.resourceMap.getInit(dst.resId(), dstYAML)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// update referenceTracker for references from src\n\tdstRes.inRefs = append(dstRes.inRefs, src)\n\treturn nil\n}", "title": "" }, { "docid": "493770c3ecc34fe7a7979471c388f5ac", "score": "0.42269853", "text": "func (resource *MongoDBDatabaseResource) PopulateFromARM(owner genruntime.ArbitraryOwnerReference, armInput interface{}) error {\n\ttypedInput, ok := armInput.(MongoDBDatabaseResource_ARM)\n\tif !ok {\n\t\treturn fmt.Errorf(\"unexpected type supplied for PopulateFromARM() function. Expected MongoDBDatabaseResource_ARM, got %T\", armInput)\n\t}\n\n\t// Set property \"Id\":\n\tif typedInput.Id != nil {\n\t\tid := *typedInput.Id\n\t\tresource.Id = &id\n\t}\n\n\t// No error\n\treturn nil\n}", "title": "" }, { "docid": "0cb5a90521082cc8ecd97695349c3daa", "score": "0.4226899", "text": "func (r *ReconcileCustomResourceDefinition) Reconcile(request reconcile.Request) (reconcile.Result, error) {\n\treqLogger := log.WithValues(\"Request.Namespace\", request.Namespace, \"Request.Name\", request.Name)\n\treqLogger.Info(\"Reconciling CustomResourceDefinition\")\n\n\t// Fetch the CustomResourceDefinition instance\n\tcrd := &apiextensions.CustomResourceDefinition{}\n\terr := r.client.Get(context.TODO(), request.NamespacedName, crd)\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\t//return reconcile.Result{}, err\n\t}\n\n\tcrdName := crd.Spec.Names.Plural\n\tgroupName := crd.Spec.Group\n\tclusterRole := &rbac.ClusterRole{}\n\tclusterRoleName := \"\"\n\n\t//Determine if the crd is namespace scoped or cluster scoped\n\tif crd.Spec.Scope == \"namespaced\" {\n\t\tclusterRoleName = \"dedicated-admins-project-crds\"\n\t} else {\n\t\tclusterRoleName = \"dedicated-admins-cluster-crds\"\n\t}\n\n\terr = r.client.Get(context.TODO(), types.NamespacedName{Name: clusterRoleName}, clusterRole)\n\tif err != nil {\n\t\tfailedToGetCRDMsg := fmt.Sprintf(\"Failed to get Cluster Role %s\", clusterRoleName)\n\t\treqLogger.Error(err, failedToGetCRDMsg)\n\t\treturn reconcile.Result{}, err\n\t}\n\n\t// if found = true, break.\n\t// if found = false, add permission. via appending a new object, use r.client.update to update the role.\n\tfound := isPermissionInClusterrole(crdName, groupName, clusterRole)\n\tif found == true {\n\t\t//Permission is already present\n\t\treturn reconcile.Result{}, nil\n\t} else {\n\t\t// Mapping to store what will be added to the role/clusterrole\n\t\tnewRule := ruleTemplate\n\n\t\tnewRule.APIGroups = []string{\n\t\t\tgroupName,\n\t\t}\n\n\t\tnewRule.Resources = []string{\n\t\t\tcrdName,\n\t\t}\n\n\t\tclusterRole.Rules = append(clusterRole.Rules, newRule)\n\t\t//Logic to add the newRule to the clusterrole\n\n\t\treturn reconcile.Result{}, err\n\t}\n}", "title": "" }, { "docid": "e0f3a14f9e8ef4519b32de610eb4d42f", "score": "0.42267013", "text": "func updateResource(kclient client.Client, object, owner metav1.Object, scheme *runtime.Scheme, merge bool) (controllerutil.OperationResult, error) {\n\texistingVal := reflect.New(reflect.TypeOf(object).Elem())\n\texistingObj := existingVal.Interface().(metav1.Object)\n\texistingObj.SetNamespace(object.GetNamespace())\n\texistingObj.SetName(object.GetName())\n\texistingRuntimeObj, ok := existingObj.(runtime.Object)\n\tif !ok {\n\t\treturn controllerutil.OperationResultNone, fmt.Errorf(\"not a runtime Object\")\n\t}\n\n\tkey, err := client.ObjectKeyFromObject(existingRuntimeObj)\n\tif err != nil {\n\t\treturn controllerutil.OperationResultNone, err\n\t}\n\n\t// create new if existing is not found\n\tif err := kclient.Get(context.TODO(), key, existingRuntimeObj); err != nil {\n\t\tif !errors.IsNotFound(err) {\n\t\t\treturn controllerutil.OperationResultNone, err\n\t\t}\n\t\tif owner != nil && scheme != nil {\n\t\t\tif err = controllerutil.SetControllerReference(owner, object, scheme); err != nil {\n\t\t\t\treturn controllerutil.OperationResultNone, err\n\t\t\t}\n\t\t}\n\t\tif err := kclient.Create(context.TODO(), object.(runtime.Object)); err != nil {\n\t\t\treturn controllerutil.OperationResultNone, err\n\t\t}\n\t\treturn controllerutil.OperationResultCreated, nil\n\t}\n\n\tchanged := false\n\tif !apiequality.Semantic.DeepEqual(existingObj.GetAnnotations(), object.GetAnnotations()) {\n\t\texistingObj.SetAnnotations(object.GetAnnotations())\n\t\tchanged = true\n\t}\n\tif !apiequality.Semantic.DeepEqual(existingObj.GetLabels(), object.GetLabels()) {\n\t\texistingObj.SetLabels(object.GetLabels())\n\t\tchanged = true\n\t}\n\n\t// deep copy spec so we can apply and detect changes\n\t// particularly with using merge, it's difficult to know what's changed, so we'd have to apply\n\t// updates and confirm\n\texistingCopy := existingRuntimeObj.DeepCopyObject()\n\n\texistingSpec := existingVal.Elem().FieldByName(\"Spec\")\n\ttargetSpec := reflect.ValueOf(object).Elem().FieldByName(\"Spec\")\n\tif merge {\n\t\tobjects.MergeObject(existingSpec.Addr().Interface(), targetSpec.Addr().Interface())\n\t} else {\n\t\texistingSpec.Set(targetSpec)\n\t}\n\tcopiedSpec := reflect.ValueOf(existingCopy).Elem().FieldByName(\"Spec\")\n\tif !apiequality.Semantic.DeepEqual(existingSpec.Addr().Interface(), copiedSpec.Addr().Interface()) {\n\t\t//log.Info(\"changes detected\", \"old\", copiedSpec.Addr().Interface(), \"new\", existingSpec.Addr().Interface())\n\t\tchanged = true\n\t}\n\n\t// copy over status if available\n\texistingStatus := existingVal.Elem().FieldByName(\"Status\")\n\ttargetStatus := reflect.ValueOf(object).Elem().FieldByName(\"Status\")\n\tif targetStatus.IsValid() {\n\t\texistingStatus.Set(targetStatus)\n\t}\n\n\tres := controllerutil.OperationResultNone\n\tif changed {\n\t\tif err := kclient.Update(context.TODO(), existingRuntimeObj); err != nil {\n\t\t\treturn res, err\n\t\t}\n\t\tres = controllerutil.OperationResultUpdated\n\t}\n\n\t// use existing value\n\treflect.ValueOf(object).Elem().Set(existingVal.Elem())\n\treturn res, nil\n}", "title": "" }, { "docid": "0a7d619cfb4e643b848b18f0a86ec116", "score": "0.42260107", "text": "func (l *Lock) Acquire(resource string, ttl time.Duration) (bool, error) {\n\tif ttl == 0 {\n\t\tttl = defaultTTL\n\t}\n\tttlSec := int(ttl.Seconds())\n\tvar name, owner string\n\tapplied, err := l.session.Query(cqlInsertLock, resource, l.tenantID, ttlSec).ScanCAS(&name, &owner)\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"failed to acquire resource lock due to cassandra error: %w\", err)\n\t}\n\tif applied {\n\t\t// The lock was successfully created\n\t\treturn true, nil\n\t}\n\tif owner == l.tenantID {\n\t\t// This host already owns the lock, extend the lease\n\t\tif err = l.extendLease(resource, ttl); err != nil {\n\t\t\treturn false, fmt.Errorf(\"failed to extend lease on resource lock: %w\", err)\n\t\t}\n\t\treturn true, nil\n\t}\n\treturn false, nil\n}", "title": "" }, { "docid": "6bb5fe7c5948aedbb4afecf76e670bcf", "score": "0.4225852", "text": "func (s *peerStore) Claim(id NodeID) *peerInfo {\n\ts.mtx.Lock()\n\tdefer s.mtx.Unlock()\n\tif s.claimed[id] {\n\t\treturn nil\n\t}\n\tpeer, ok := s.peers[id]\n\tif !ok {\n\t\tpeer = newStorePeer(id)\n\t\ts.peers[id] = peer\n\t}\n\ts.claimed[id] = true\n\treturn peer\n}", "title": "" }, { "docid": "a65030783f5bd3ec0a18f3d56004e562", "score": "0.42232716", "text": "func (r *Request) Resource(resource string) *Request {\n\tr.resource = resource\n\treturn r\n}", "title": "" }, { "docid": "654bac888264b46535d60d02255fd3f1", "score": "0.42224216", "text": "func (r *VpDeploymentTargetReconciler) updateResource(resource *v1beta2.VpDeploymentTarget, depTarget *appmanagerapi.DeploymentTarget) error {\n\tctx := context.Background()\n\n\tresource.Annotations = annotations.Set(resource.Annotations,\n\t\tannotations.Pair(annotations.ID, depTarget.Metadata.Id),\n\t\tannotations.Pair(annotations.ResourceVersion, strconv.Itoa(int(depTarget.Metadata.ResourceVersion))))\n\n\tif err := r.Update(ctx, resource); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" } ]
675edb866435a55ca57a805a2038281c
Working helpers ContentType : get a content type of requested file/value
[ { "docid": "df5f697cfd858fda42dfc38ca44760c1", "score": "0.70880836", "text": "func ContentType(filename string, filesize int64, contbuffer []byte, csizebuffer int) (conttype string, err error) {\n\n\tconttype = mime.TypeByExtension(filepath.Ext(filename))\n\n\tif conttype == \"\" && filesize >= 512 {\n\n\t\tconttype = http.DetectContentType(contbuffer[:csizebuffer])\n\t\treturn conttype, err\n\n\t}\n\n\treturn conttype, err\n\n}", "title": "" } ]
[ { "docid": "aea6381fe6a0c3ed2f2a2cc5a6f0cf5b", "score": "0.72675747", "text": "func (a *Artifact) ContentType() string {\n\tctype := mime.TypeByExtension(path.Ext(a.Path.From))\n\tif ctype != \"\" {\n\t\treturn ctype\n\t}\n\n\tf, err := os.Open(a.Path.Fullpath())\n\tif err != nil {\n\t\treturn defaultCtype\n\t}\n\n\tvar buf bytes.Buffer\n\n\t_, err = io.CopyN(&buf, f, int64(512))\n\tif err != nil && err != io.EOF {\n\t\treturn defaultCtype\n\t}\n\n\treturn http.DetectContentType(buf.Bytes())\n}", "title": "" }, { "docid": "6ec45409facbce5af48e078c0a3232da", "score": "0.71256524", "text": "func rawContentType(value ContentReader) string {\n\tcontentTyped, ok := value.(ContentTypeReader)\n\tif !ok {\n\t\treturn \"application/octet-stream\"\n\t}\n\n\tcontentType := contentTyped.ContentType()\n\tif contentType == \"\" {\n\t\treturn \"application/octet-stream\"\n\t}\n\n\treturn contentType\n}", "title": "" }, { "docid": "669874ad9b657a45dcc0a52a60ec7019", "score": "0.7093679", "text": "func getContentType(file string) string {\n\text := path.Ext(file)\n\tmType := mime.TypeByExtension(ext)\n\tif mType == \"\" {\n\t\treturn \"application/octet-stream\"\n\t}\n\treturn mType\n}", "title": "" }, { "docid": "6e70eca914f556a2a3e17c256fb70bea", "score": "0.7030888", "text": "func getMIMEType(p string, cts MIMETypes) string {\n\text := filepath.Ext(p)\n\tswitch ext {\n\tcase \".aac\":\n\t\treturn \"audio/aac\"\n\tcase \".avi\":\n\t\treturn \"video/x-msvideo\"\n\tcase \".bmp\":\n\t\treturn \"image/bmp\"\n\tcase \".css\":\n\t\treturn \"text/css\"\n\tcase \".csv\":\n\t\treturn \"text/csv\"\n\tcase \".gif\":\n\t\treturn \"image/gif\"\n\tcase \".html\", \".htm\":\n\t\treturn \"text/html\"\n\tcase \".jpeg\", \".jpg\":\n\t\treturn \"image/jpeg\"\n\tcase \".js\":\n\t\treturn \"text/javascript\"\n\tcase \".json\":\n\t\treturn \"text/json\"\n\tcase \".mpeg\":\n\t\treturn \"video/mpeg\"\n\tcase \".png\":\n\t\treturn \"image/png\"\n\tcase \".pdf\":\n\t\treturn \"application/pdf\"\n\tcase \".svg\":\n\t\treturn \"image/svg+xml\"\n\tcase \".txt\":\n\t\treturn \"text/plain\"\n\tcase \".xhtml\":\n\t\treturn \"application/xhtml-xml\"\n\tcase \".xml\":\n\t\treturn \"application/xml\"\n\tcase \".zip\":\n\t\treturn \"application/zip\"\n\t}\n\n\t// Load custom content type if it exists\n\tif val, ok := cts.ResponseTypes[p]; ok {\n\t\treturn val\n\t}\n\n\t// Default Content-Type is octet-stream\n\treturn \"application/octet-stream\"\n}", "title": "" }, { "docid": "35929c85296f5031199cf76f54e4c6bf", "score": "0.6910159", "text": "func (f *cachedFile) Type() string {\n\treturn mime.TypeByExtension(filepath.Ext(f.name))\n\n\t// var buf [1024]byte\n\t// n, _ := io.ReadFull(content, buf[:])\n\t// b := buf[:n]\n\t// ctype = DetectContentType(b)\n\t// _, err := content.Seek(0, os.SEEK_SET)\n}", "title": "" }, { "docid": "d4e3b31529c00cbb4fb27c5d7f7e98f8", "score": "0.6891044", "text": "func (m Mode) ContentType() string {\n\tswitch m {\n\tcase ModeBinary:\n\t\treturn \"\"\n\tcase ModeStructure:\n\t\treturn \"application/cloudevents\"\n\tcase ModeBatch:\n\t\treturn \"application/cloudevents-batch\"\n\tdefault:\n\t\treturn \"\"\n\t}\n}", "title": "" }, { "docid": "21a3489036a152296b388e89f8f99436", "score": "0.6875204", "text": "func (sm SkyfileMetadata) ContentType() string {\n\tif len(sm.Subfiles) == 1 {\n\t\tfor _, sf := range sm.Subfiles {\n\t\t\treturn sf.ContentType\n\t\t}\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "c3a03f456bf07d3909b2674ff62188a3", "score": "0.684398", "text": "func get_content_type(str string) int {\n\tjson_template := make(map[string]interface{})\n\terr := json.Unmarshal([]byte(str), &json_template)\n\n\tif err == nil {\n\t\treturn CONTENT_TYPE_JSON\n\t}\n\treturn CONTENT_TYPE_OTHER\n}", "title": "" }, { "docid": "6baef610e833079a901b0d9ca2d68b70", "score": "0.6831914", "text": "func (r *Reader) ContentType() string {\n\treturn r.contentType\n}", "title": "" }, { "docid": "066fcda1bbb01b31a573a4d1aa258f12", "score": "0.6795004", "text": "func (m MediaType) ContentType() string {\n\tif len(m.Type) > 0 && m.Charset != \"\" {\n\t\treturn fmt.Sprintf(\"%s; charset=%s\", m.Type, m.Charset)\n\t}\n\treturn m.Type\n}", "title": "" }, { "docid": "e1eb550e81d2b0b781fc8fd5b151e2b1", "score": "0.6792251", "text": "func (o *Attachment) ContentType() string {\n\tif o != nil && o.bitmap_&8 != 0 {\n\t\treturn o.contentType\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "b7dae0fec7b9f2c612ac7e337ff185b2", "score": "0.67723465", "text": "func getFileContentType(file *os.File) (string, error) {\n\t// Using the first 512 bytes to detect the content type.\n\tbuffer := make([]byte, 512)\n\t_, err := file.Read(buffer)\n\t// Reset the file pointer\n\tfile.Seek(0, io.SeekStart)\n\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn http.DetectContentType(buffer), nil\n}", "title": "" }, { "docid": "1fdfd25d9b419d0cc5c1c675986c1430", "score": "0.67601645", "text": "func ContentType(r io.Reader, name string) (ctype string, bufferedContent io.Reader, err error) {\n\tctype = mime.TypeByExtension(filepath.Ext(name))\n\tif ctype == \"\" && r != nil {\n\t\t// read a chunk to decide between utf-8 text and binary\n\t\tvar buf [sniffLen]byte\n\t\tvar n int\n\t\tif readSeeker, ok := r.(io.Seeker); ok {\n\t\t\tn, _ = io.ReadFull(r, buf[:])\n\t\t\t_, err = readSeeker.Seek(0, io.SeekStart) // rewind to output whole file\n\t\t\tif err != nil {\n\t\t\t\terr = errors.New(\"seeker can't seek\")\n\t\t\t\treturn \"\", r, err\n\t\t\t}\n\t\t} else {\n\t\t\tcontentBuffer := bufio.NewReader(r)\n\t\t\tsniffed, err := contentBuffer.Peek(sniffLen)\n\t\t\tif err != nil {\n\t\t\t\terr = errors.New(\"reader can't read\")\n\t\t\t\treturn \"\", contentBuffer, err\n\t\t\t}\n\t\t\tn = copy(buf[:], sniffed)\n\t\t\tr = contentBuffer\n\t\t}\n\t\tctype = http.DetectContentType(buf[:n])\n\t}\n\treturn ctype, r, nil\n}", "title": "" }, { "docid": "b8c599899f4e2b83b4bc4f1d1c6721fd", "score": "0.67519635", "text": "func contentType(url string) (string, error) {\n\tresp, err := http.Get(url)\n\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\t// make sure body is clsoed\n\tdefer resp.Body.Close()\n\n\tctype := resp.Header.Get(\"Content-Type\")\n\tif ctype == \"\" {\n\t\treturn \"\", fmt.Errorf(\"can't find content-type\")\n\t}\n\n\treturn ctype, nil\n}", "title": "" }, { "docid": "f845a39df1d72988ad10572fc729c28f", "score": "0.67256504", "text": "func getContentType(pvc *corev1.PersistentVolumeClaim) string {\n\tcontentType, found := pvc.Annotations[AnnContentType]\n\tif !found {\n\t\tcontentType = \"\"\n\t}\n\tswitch contentType {\n\tcase\n\t\tstring(cdiv1.DataVolumeKubeVirt),\n\t\tstring(cdiv1.DataVolumeArchive):\n\tdefault:\n\t\tcontentType = string(cdiv1.DataVolumeKubeVirt)\n\t}\n\treturn contentType\n}", "title": "" }, { "docid": "e520ba7ca643dc2354defcc060dbf344", "score": "0.6711457", "text": "func (t *Ting) ContentType(name string) (int, response.JSend) {\n ty, err := t.Backend.ContentType(name)\n if err != nil {\n return response.Error(err).Wrap()\n }\n\n return response.Success(ty).Wrap()\n}", "title": "" }, { "docid": "fc31f7bfbb5a5243af4b0787aef66f18", "score": "0.66971296", "text": "func (o LookupBlobResultOutput) ContentType() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupBlobResult) string { return v.ContentType }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "45e86a80bac5440942f111e6cee4e1c7", "score": "0.6688695", "text": "func (c *ClipboardService) ContentType() (string, error) {\n\tvar format uint32\n\terr := c.withOpenClipboard(func() error {\n\t\tfor _, f := range Formats {\n\t\t\tisAvaliable := win.IsClipboardFormatAvailable(f)\n\t\t\tif isAvaliable {\n\t\t\t\tformat = f\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\treturn lastError(\"get content type of clipboard\")\n\t})\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tswitch format {\n\tcase win.CF_HDROP:\n\t\treturn \"file\", nil\n\tcase win.CF_DIBV5:\n\t\treturn \"CF_DIBV5\", nil\n\tcase win.CF_UNICODETEXT:\n\t\treturn \"text\", nil\n\tdefault:\n\t\treturn \"unknown\", nil\n\t}\n}", "title": "" }, { "docid": "93a2bbd88f7298cdcb25aef1606c0aeb", "score": "0.6672452", "text": "func ResolveContentType(path string) string {\n\tswitch filepath.Ext(path) {\n\tcase \".aac\":\n\t\treturn \"audio/aac\"\n\tcase \".abw\":\n\t\treturn \"application/x-abiword\"\n\tcase \".arc\":\n\t\treturn \"application/x-freearc\"\n\tcase \".avi\":\n\t\treturn \"video/x-msvideo\"\n\tcase \".azw\":\n\t\treturn \"application/vnd.amazon.ebook\"\n\tcase \".bin\":\n\t\treturn \"application/octet-stream\"\n\tcase \".bmp\":\n\t\treturn \"image/bmp\"\n\tcase \".bz\":\n\t\treturn \"application/x-bzip\"\n\tcase \".bz2\":\n\t\treturn \"application/x-bzip2\"\n\tcase \".csh\":\n\t\treturn \"application/x-csh\"\n\tcase \".csv\":\n\t\treturn \"text/csv\"\n\tcase \".doc\":\n\t\treturn \"application/msword\"\n\tcase \".docx\":\n\t\treturn \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\"\n\tcase \".eot\":\n\t\treturn \"application/vnd.ms-fontobject\"\n\tcase \".epub\":\n\t\treturn \"application/epub+zip\"\n\tcase \".gz\":\n\t\treturn \"application/gzip\"\n\tcase \".gif\":\n\t\treturn \"image/gif\"\n\tcase \".htm\":\n\t\treturn \"text/html\"\n\tcase \".html\":\n\t\treturn \"text/html\"\n\tcase \".ico\":\n\t\treturn \"image/vnd.microsoft.icon\"\n\tcase \".ics\":\n\t\treturn \"text/calendar\"\n\tcase \".jar\":\n\t\treturn \"application/java-archive\"\n\tcase \".jpg\", \".jpeg\":\n\t\treturn \"image/jpeg\"\n\tcase \".jsonld\":\n\t\treturn \"application/ld+json\"\n\tcase \".midi\", \".mid\":\n\t\treturn \"audio/midi\"\n\tcase \".mp3\":\n\t\treturn \"audio/mpeg\"\n\tcase \".mpeg\":\n\t\treturn \"video/mpeg\"\n\tcase \".mpkg\":\n\t\treturn \"application/vnd.apple.installer+xml\"\n\tcase \".odp\":\n\t\treturn \"application/vnd.oasis.opendocument.presentation\"\n\tcase \".ods\":\n\t\treturn \"application/vnd.oasis.opendocument.spreadsheet\"\n\tcase \".odt\":\n\t\treturn \"application/vnd.oasis.opendocument.text\"\n\tcase \".oga\":\n\t\treturn \"audio/ogg\"\n\tcase \".ogv\":\n\t\treturn \"video/ogg\"\n\tcase \".ogx\":\n\t\treturn \"application/ogg\"\n\tcase \".opus\":\n\t\treturn \"audio/opus\"\n\tcase \".otf\":\n\t\treturn \"font/otf\"\n\tcase \".png\":\n\t\treturn \"image/png\"\n\tcase \".pdf\":\n\t\treturn \"application/pdf\"\n\tcase \".php\":\n\t\treturn \"application/x-httpd-php\"\n\tcase \".ppt\":\n\t\treturn \"application/vnd.ms-powerpoint\"\n\tcase \".pptx\":\n\t\treturn \"application/vnd.openxmlformats-officedocument.presentationml.presentation\"\n\tcase \".rar\":\n\t\treturn \"application/vnd.rar\"\n\tcase \".rtf\":\n\t\treturn \"application/rtf\"\n\tcase \".sh\":\n\t\treturn \"application/x-sh\"\n\tcase \".svg\":\n\t\treturn \"image/svg+xml\"\n\tcase \".swf\":\n\t\treturn \"application/x-shockwave-flash\"\n\tcase \".tar\":\n\t\treturn \"application/x-tar\"\n\tcase \".tiff\", \".tif\":\n\t\treturn \"image/tiff\"\n\tcase \".ts\":\n\t\treturn \"video/mp2t\"\n\tcase \".ttf\":\n\t\treturn \"font/ttf\"\n\tcase \".vsd\":\n\t\treturn \"application/vnd.visio\"\n\tcase \".wav\":\n\t\treturn \"audio/wav\"\n\tcase \".weba\":\n\t\treturn \"audio/webm\"\n\tcase \".webm\":\n\t\treturn \"video/webm\"\n\tcase \".webp\":\n\t\treturn \"image/webp\"\n\tcase \".woff\":\n\t\treturn \"font/woff\"\n\tcase \".woff2\":\n\t\treturn \"font/woff2\"\n\tcase \".xhtml\":\n\t\treturn \"application/xhtml+xml\"\n\tcase \".xls\":\n\t\treturn \"application/vnd.ms-excel\"\n\tcase \".xlsx\":\n\t\treturn \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\"\n\tcase \".xml\":\n\t\treturn \"application/xml\"\n\tcase \".xul\":\n\t\treturn \"application/vnd.mozilla.xul+xml\"\n\tcase \".zip\":\n\t\treturn \"application/zip\"\n\tcase \".3gp\":\n\t\treturn \"video/3gpp\"\n\tcase \".3g2\":\n\t\treturn \"video/3gpp2\"\n\tcase \".7z\":\n\t\treturn \"application/x-7z-compressed\"\n\tcase \".js\", \".mjs\":\n\t\treturn \"text/javascript; charset=UTF-8\"\n\tcase \".css\":\n\t\treturn \"text/css; charset=UTF-8\"\n\tcase \".json\":\n\t\treturn \"application/json; charset=UTF-8\"\n\tcase \".txt\":\n\t\treturn \"text/plain; charset=UTF-8\"\n\tdefault:\n\t\treturn \"text/html; charset=UTF-8\"\n\t}\n}", "title": "" }, { "docid": "11398fdfe02371446237618a3a85e746", "score": "0.6587833", "text": "func FixContentType(body []byte, contentType, fileName string) string {\n\tswitch contentType {\n\tcase \"application/x-zip-compressed\":\n\t\treturn \"application/zip\"\n\tcase \"application/x-rar-compressed\":\n\t\treturn \"application/rar\"\n\tcase \"\", \"application/octet-stream\", \"application/pdf\", \"application/x-as400attachment\", \"application/save-as\":\n\t\t//log.Printf(\"body=%s\", body)\n\t\tif nct := mimemagic.Match(contentType, body); nct != \"\" {\n\t\t\treturn nct\n\t\t}\n\t}\n\tif GetConverter(contentType, nil) == nil { // no converter for this\n\t\tif nct := mimemagic.Match(contentType, body); nct != \"\" {\n\t\t\treturn nct\n\t\t}\n\t}\n\tif fileName != \"\" &&\n\t\t(contentType == \"\" || contentType == \"application/octet-stream\" ||\n\t\t\tGetConverter(contentType, nil) == nil) {\n\t\tif i := strings.LastIndex(fileName, \".\"); i >= 0 {\n\t\t\tif nct, ok := ExtContentType[fileName[i+1:]]; ok {\n\t\t\t\treturn nct\n\t\t\t}\n\t\t\tif nct := mime.TypeByExtension(fileName[i:]); nct != \"\" {\n\t\t\t\treturn nct\n\t\t\t}\n\t\t}\n\t}\n\t//log.Printf(\"ct=%s ==> %s\", ct, contentType)\n\treturn contentType\n}", "title": "" }, { "docid": "6ad47c6dcc01d957e953095def18e9fd", "score": "0.6575407", "text": "func (r *echoRequest) ContentType() string {\n\treturn r.echoCtx.Request().Header.Get(\"Content-Type\")\n}", "title": "" }, { "docid": "e986db500db541b65e8235b8d0cc9f21", "score": "0.6487067", "text": "func (_class SRClass) GetContentType(sessionID SessionRef, self SRRef) (_retval string, _err error) {\n\t_method := \"SR.get_content_type\"\n\t_sessionIDArg, _err := convertSessionRefToXen(fmt.Sprintf(\"%s(%s)\", _method, \"session_id\"), sessionID)\n\tif _err != nil {\n\t\treturn\n\t}\n\t_selfArg, _err := convertSRRefToXen(fmt.Sprintf(\"%s(%s)\", _method, \"self\"), self)\n\tif _err != nil {\n\t\treturn\n\t}\n\t_result, _err := _class.client.APICall(_method, _sessionIDArg, _selfArg)\n\tif _err != nil {\n\t\treturn\n\t}\n\t_retval, _err = convertStringToGo(_method + \" -> \", _result.Value)\n\treturn\n}", "title": "" }, { "docid": "f72e7bb4ec254eaa45fd28836901b040", "score": "0.6456153", "text": "func getContentType(slc []string) string {\r\n\tif len(slc) > 0 {\r\n\t\traw := slc[0]\r\n\t\tsemicolon := strings.IndexRune(raw, ';')\r\n\t\tif semicolon > 0 {\r\n\t\t\traw = raw[:semicolon]\r\n\t\t}\r\n\t\treturn raw\r\n\t}\r\n\treturn \"none\"\r\n}", "title": "" }, { "docid": "86f4715a43179f9f27430fa42f7277e8", "score": "0.64135975", "text": "func (o LookupFeedResultOutput) ContentType() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupFeedResult) string { return v.ContentType }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "b8d8fbc130556aae5185ebe534a5f76e", "score": "0.63867486", "text": "func (v Volume) ContentType() ContentType {\n\treturn v.contentType\n}", "title": "" }, { "docid": "34437827583b0ec6428bdf638ad16219", "score": "0.6379348", "text": "func ContentType(headers http.Header) (string, string, error) {\n\tct := headers.Get(HeaderContentType)\n\torig := ct\n\tif ct == \"\" {\n\t\tct = DefaultMime\n\t}\n\tif ct == \"\" {\n\t\treturn \"\", \"\", nil\n\t}\n\n\tmt, opts, err := mime.ParseMediaType(ct)\n\tif err != nil {\n\t\treturn \"\", \"\", errors.NewParseError(HeaderContentType, \"header\", orig, err)\n\t}\n\n\tif cs, ok := opts[charsetKey]; ok {\n\t\treturn mt, cs, nil\n\t}\n\n\treturn mt, \"\", nil\n}", "title": "" }, { "docid": "405f6ea2e7a1c486f3bb3deff82d3f9b", "score": "0.63398993", "text": "func (o FolderFeedOutput) ContentType() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *FolderFeed) pulumi.StringPtrOutput { return v.ContentType }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "685f7dcebc1615dfa52980a8188f88d6", "score": "0.63357556", "text": "func (t contentType) String() string {\n\treturn contentTypes[t]\n}", "title": "" }, { "docid": "48bbad005fc75f0246d601981b89503c", "score": "0.6332783", "text": "func (o *Object) MimeType(ctx context.Context) string {\n\terr := o.readEntryAndSetMetadata(ctx)\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\treturn o.file.ContentType\n}", "title": "" }, { "docid": "b8aa0ffd378a8e692c42d22106811c57", "score": "0.63154167", "text": "func contentType(resp *http.Response) string {\n\tv := resp.Header.Get(\"Content-Type\")\n\tvSplit := strings.SplitN(v, \";\", 2)\n\tvTrimmed := strings.TrimSpace(vSplit[0])\n\treturn vTrimmed\n}", "title": "" }, { "docid": "3719ff9624ede01a2b8c648d206b4e83", "score": "0.6308085", "text": "func (_this *MimeType) Type() string {\n\tvar ret string\n\tvalue := _this.Value_JS.Get(\"type\")\n\tret = (value).String()\n\treturn ret\n}", "title": "" }, { "docid": "cf852762d699fa1e8fba06e69492c078", "score": "0.62972975", "text": "func (o *Attachment) GetContentType() (value string, ok bool) {\n\tok = o != nil && o.bitmap_&8 != 0\n\tif ok {\n\t\tvalue = o.contentType\n\t}\n\treturn\n}", "title": "" }, { "docid": "3f625e9c17b9e81b58ee743d118bc4c8", "score": "0.62814194", "text": "func (sec Security) HTTPFileType() *HTTPFileTypeResource {\n\treturn &sec.hTTPFileType\n}", "title": "" }, { "docid": "e39727a4172d7d111deaee8378576c00", "score": "0.62658465", "text": "func (imr *InvokeMethodRequest) ContentType() string {\n\tm := imr.r.Message\n\tif m == nil {\n\t\treturn \"\"\n\t}\n\n\treturn m.GetContentType()\n}", "title": "" }, { "docid": "052337b51b2c30cb73aaf9cce56d32ec", "score": "0.626399", "text": "func (r *Response) ContentType() string {\n\treturn r.Headers()[\"Content-Type\"]\n}", "title": "" }, { "docid": "a6815088a396af2c85381d4f26213e8b", "score": "0.62567925", "text": "func (c *Copier) ContentType(val string) {\n\tc.copier.ContentType = val\n}", "title": "" }, { "docid": "a72a8e55a085ebbb60f28fd0668eeb57", "score": "0.6236962", "text": "func (Content) Type() string {\n\treturn \"Content\"\n}", "title": "" }, { "docid": "f7fd02e06f24ee2acbfb11c26fbd63be", "score": "0.6235006", "text": "func (h responseHeaderMap) ContentType() volatile.String {\n\tvar value C.GoStr\n\tC.ResponseHeaderMap_ContentType(h.ptr, &value)\n\treturn CStrN(value.data, value.len)\n}", "title": "" }, { "docid": "8c0e7da1db11abe7fc01645e4d49bd0b", "score": "0.6234315", "text": "func (mclean *MarshalClean) ContentType() string {\n\treturn mclean.m.ContentType()\n}", "title": "" }, { "docid": "5b7e1951cd1abb4fad412a8d5517fba3", "score": "0.6197482", "text": "func ContentTypeGetMimeType(type_ string) (return__ string) {\n\t__cgo__type_ := (*C.gchar)(unsafe.Pointer(C.CString(type_)))\n\tvar __cgo__return__ *C.gchar\n\t__cgo__return__ = C.g_content_type_get_mime_type(__cgo__type_)\n\tC.free(unsafe.Pointer(__cgo__type_))\n\treturn__ = C.GoString((*C.char)(unsafe.Pointer(__cgo__return__)))\n\treturn\n}", "title": "" }, { "docid": "41ede67937fb8cac5f2669fc1a979620", "score": "0.6134131", "text": "func (c *Content) GetType() string {\n\tif c == nil || c.Type == nil {\n\t\treturn \"\"\n\t}\n\treturn *c.Type\n}", "title": "" }, { "docid": "916b0a32abcc9d02a5198311f12634ba", "score": "0.6130846", "text": "func (o HostedConfigurationVersionOutput) ContentType() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *HostedConfigurationVersion) pulumi.StringOutput { return v.ContentType }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "491bbf1822f552dcdee7c0fb53c98b75", "score": "0.61168295", "text": "func (ct SniffedType) GetMimeType() string {\n\treturn strings.SplitN(ct.contentType, \";\", 2)[0]\n}", "title": "" }, { "docid": "5ebd0b43888d20cf9b3fdda6e9da3602", "score": "0.6087099", "text": "func overrideContentType(metadata map[string]string) string {\n\tct := \"application/vnd.redhat.catalog.filename+tgz\"\n\tif val, ok := metadata[\"task_url\"]; ok {\n\t\tparts := strings.Split(val, \"/\")\n\t\ttaskID := parts[len(parts)-1]\n\t\tct = fmt.Sprintf(\"application/vnd.redhat.catalog.%s+tgz\", taskID)\n\t}\n\treturn ct\n}", "title": "" }, { "docid": "5ad446f11de199b5d4458578dee8d588", "score": "0.608406", "text": "func getFileType(b []byte) (string) {\n m := http.DetectContentType(b)\n sl := strings.Split(m, \"/\")\n\n return sl[len(sl)-1]\n}", "title": "" }, { "docid": "2e1494505bbc94dad918908ba6dff4bd", "score": "0.60754305", "text": "func (t *Ting) ValidateContentType(name string, structure []byte) (*backend.ContentType, error) {\n var err error\n\n //Check if the type is reserved.\n if ReservedType(name) {\n\n //We can't edit the structure of reserved types.\n return nil, errors.New(fmt.Sprintf(\"'%s' is a reserved content type id.\", name))\n }\n\n s := &backend.ContentType{}\n s.Id = name\n\n //Attempt to unmarshal the structure into the *ContentType.\n err = json.Unmarshal(structure, &s.Structure)\n if err != nil {\n return nil, err\n }\n\n //Attempt to get existing content types.\n types, err := t.Backend.ContentTypes()\n if err != nil {\n return nil, err\n }\n\n //Reserved types, existing types and our own name form a slice.\n types = append(types, ReservedTypes()...)\n types = append(types, name)\n\n //Make sure that every field is valid by ensuring every field is in the slice.\n for _, field := range s.Structure {\n found := false\n for _, ty := range types {\n if field.Type == ty {\n found = true\n }\n }\n\n if !found {\n return nil, errors.New(fmt.Sprintf(\"Content type '%s' does not exist.\", field.Type))\n }\n }\n\n //Return our content type.\n return s, nil\n}", "title": "" }, { "docid": "3e4029fed1d88e9a315af3c34b9e948b", "score": "0.60648793", "text": "func ResolveContentType(req *http.Request) string {\n\tcontentType := req.Header.Get(\"Content-Type\")\n\tif contentType == \"\" {\n\t\treturn \"text/html\"\n\t}\n\treturn strings.ToLower(strings.TrimSpace(strings.Split(contentType, \";\")[0]))\n}", "title": "" }, { "docid": "e648f751e694aad6c1caa3a8aabacbe2", "score": "0.6063227", "text": "func isContentType(h http.Header, contentType string) bool {\n\tct := h.Get(\"Content-Type\")\n\tif i := strings.IndexRune(ct, ';'); i != -1 {\n\t\tct = ct[0:i]\n\t}\n\treturn ct == contentType\n}", "title": "" }, { "docid": "718b8539a0d4f0d41b75b117df25253d", "score": "0.6055043", "text": "func (self Header) GetMediaType() (mediatype string, params map[string]string, err error) {\n\treturn mime.ParseMediaType(self.Get(\"Content-Type\", \"text/plain\"))\n}", "title": "" }, { "docid": "b3e27948c4488ae0bdf8e0e6bf466ec9", "score": "0.60513467", "text": "func (d *Resource) MimeType() string {\n\treturn d.JSON.GetS(\"hasMimeType\")\n}", "title": "" }, { "docid": "44208e338f8fc504b893840c92dcca89", "score": "0.60477096", "text": "func ContentTypeFromMimeType(mime_type string) (return__ string) {\n\t__cgo__mime_type := (*C.gchar)(unsafe.Pointer(C.CString(mime_type)))\n\tvar __cgo__return__ *C.gchar\n\t__cgo__return__ = C.g_content_type_from_mime_type(__cgo__mime_type)\n\tC.free(unsafe.Pointer(__cgo__mime_type))\n\treturn__ = C.GoString((*C.char)(unsafe.Pointer(__cgo__return__)))\n\treturn\n}", "title": "" }, { "docid": "dc04d8a079f7ddf9588a7a149fe9b3e6", "score": "0.6028167", "text": "func FormFileDetectContentType(L *lua.LState) int {\n\t// Get form file\n\tformFile := getFormFileObject(L)\n\n\t// Push file content type\n\tL.Push(lua.LString(http.DetectContentType(formFile.File)))\n\n\treturn 1\n}", "title": "" }, { "docid": "c5ae518dec9f885c02d4f3b04131a3a8", "score": "0.60250837", "text": "func GetDefinition(contentType string, language ...string) (ContentType, error) {\n\tlanguageStr := \"default\"\n\tif len(language) > 0 {\n\t\tlanguageStr = language[0]\n\t}\n\tdefinition, ok := contentTypeDefinition[languageStr]\n\tif !ok {\n\t\tlog.Println(\"Language \" + languageStr + \" doesn't exist. use default.\")\n\t\tdefinition = contentTypeDefinition[\"default\"]\n\t}\n\tresult, ok := definition[contentType]\n\tif ok {\n\t\treturn result, nil\n\t} else {\n\t\treturn ContentType{}, errors.New(\"Content type doesn't exist: \" + contentType)\n\t}\n}", "title": "" }, { "docid": "3c33ed6686ec87b6f0d26a0fadd68235", "score": "0.6024869", "text": "func (r Data) ContentType() []string {\n\treturn r.CType\n}", "title": "" }, { "docid": "39fb35225d95173aa37ab978858c53df", "score": "0.60225147", "text": "func (me TContentTypeType) String() string { return xsdt.String(me).String() }", "title": "" }, { "docid": "c5aec304833494efabfe94bc589feaf1", "score": "0.60140383", "text": "func (i *FileInfo) detectType(modify, saveContent, readHeader bool) error {\n\tif IsNamedPipe(i.Mode) {\n\t\ti.Type = \"blob\"\n\t\treturn nil\n\t}\n\t// failing to detect the type should not return error.\n\t// imagine the situation where a file in a dir with thousands\n\t// of files couldn't be opened: we'd have immediately\n\t// a 500 even though it doesn't matter. So we just log it.\n\n\tmimetype := mime.TypeByExtension(i.Extension)\n\n\tvar buffer []byte\n\tif readHeader {\n\t\tbuffer = i.readFirstBytes()\n\n\t\tif mimetype == \"\" {\n\t\t\tmimetype = http.DetectContentType(buffer)\n\t\t}\n\t}\n\n\tswitch {\n\tcase strings.HasPrefix(mimetype, \"video\"):\n\t\ti.Type = \"video\"\n\t\ti.detectSubtitles()\n\t\treturn nil\n\tcase strings.HasPrefix(mimetype, \"audio\"):\n\t\ti.Type = \"audio\"\n\t\treturn nil\n\tcase strings.HasPrefix(mimetype, \"image\"):\n\t\ti.Type = \"image\"\n\t\treturn nil\n\tcase strings.HasSuffix(mimetype, \"pdf\"):\n\t\ti.Type = \"pdf\"\n\t\treturn nil\n\tcase (strings.HasPrefix(mimetype, \"text\") || !isBinary(buffer)) && i.Size <= 10*1024*1024: // 10 MB\n\t\ti.Type = \"text\"\n\n\t\tif !modify {\n\t\t\ti.Type = \"textImmutable\"\n\t\t}\n\n\t\tif saveContent {\n\t\t\tafs := &afero.Afero{Fs: i.Fs}\n\t\t\tcontent, err := afs.ReadFile(i.Path)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\ti.Content = string(content)\n\t\t}\n\t\treturn nil\n\tdefault:\n\t\ti.Type = \"blob\"\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "486f91337c19a08080ffa51ff828d404", "score": "0.60006684", "text": "func (s *Selection) GetMimeType() (val string) {\n\tif len(s.Nodes) == 0 {\n\t\treturn\n\t}\n\tnode_type := strings.ToLower(s.Nodes[0].Data)\n\tif node_type == \"object\" {\n\t\tval = strings.Trim(s.AttrOr(\"codetype\", \"\"), \" \\n\\t\\r\")\n\t\tif len(val) == 0 {\n\t\t\tval = strings.Trim(s.Find(\"param[type #= (.*)]\").AttrOr(\"type\", \"\"), \" \\n\\t\\r\")\n\t\t}\n\t\tif len(val) == 0 {\n\t\t\tval = strings.Trim(s.Find(\"embed[type #= (.*)]\").AttrOr(\"type\", \"\"), \" \\n\\t\\r\")\n\t\t}\n\t} else if node_type == \"embed\" {\n\t\tval = strings.Trim(s.AttrOr(\"type\", \"\"), \" \\n\\t\\r\")\n\t} else if node_type == \"param\" {\n\t\tval = strings.Trim(s.AttrOr(\"type\", \"\"), \" \\n\\t\\r\")\n\t}\n\tval = strings.ToLower(val)\n\treturn\n}", "title": "" }, { "docid": "ecef011835d17248cb4c906049064fd2", "score": "0.59710115", "text": "func (c *HttpClient) GetContentType(url core.URL) (string, error) {\n\tclient := http.DefaultClient\n\treq, _ := http.NewRequest(\"HEAD\", url, nil)\n\n\tfor k, v := range c.headers {\n\t\treq.Header.Set(k, v)\n\t}\n\n\tres, err := client.Do(req)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif header, ok := res.Header[\"Content-Type\"]; ok {\n\t\treturn header[0], nil\n\t}\n\treturn \"\", fmt.Errorf(\"content-type not found\")\n}", "title": "" }, { "docid": "7bd92ffb0c91ec982ae59b9ccc31d8cf", "score": "0.5956501", "text": "func (*JSONPb) ContentType() string {\n\treturn \"application/json\"\n}", "title": "" }, { "docid": "c51627eeface19711001dbc9feae0f76", "score": "0.5950828", "text": "func (c *Action) SetContentType(val string) string {\r\n\tvar ctype string\r\n\tif strings.ContainsRune(val, '/') {\r\n\t\tctype = val\r\n\t} else {\r\n\t\tif !strings.HasPrefix(val, \".\") {\r\n\t\t\tval = \".\" + val\r\n\t\t}\r\n\t\tctype = mime.TypeByExtension(val)\r\n\t}\r\n\tif ctype != \"\" {\r\n\t\tc.SetHeader(\"Content-Type\", ctype)\r\n\t}\r\n\treturn ctype\r\n}", "title": "" }, { "docid": "ba1f9f3012d7ca139b3f2e9b177d264c", "score": "0.59426177", "text": "func getMediaType(ext string) (mt string) {\n\n\tswitch ext {\n\tcase \".mp3\":\n\t\tmt = \"audio/mpeg\"\n\tcase \".aac\":\n\t\tmt = \"audio/aac\"\n\tcase \".opus\":\n\t\tmt = \"audio/ogg\"\n\tcase \".wav\":\n\t\tmt = \"audio/wav\"\n\tcase \".jpeg\":\n\t\tmt = \"image/jpeg\"\n\tcase \".jpg\":\n\t\tmt = \"image/jpeg\"\n\tcase \".png\":\n\t\tmt = \"image/png\"\n\tcase \".gif\":\n\t\tmt = \"image/gif\"\n\tcase \".webp\":\n\t\tmt = \"image/webp\"\n\tcase \".json\":\n\t\tmt = \"application/json\"\n\tcase \".html\":\n\t\tmt = \"text/html\"\n\tcase \".css\":\n\t\tmt = \"text/css\"\n\tcase \".js\":\n\t\tmt = \"application/javascript\"\n\tcase \".epub\":\n\t\tmt = \"application/epub+zip\"\n\tcase \".pdf\":\n\t\tmt = \"application/pdf\"\n\t}\n\treturn\n}", "title": "" }, { "docid": "a052902e05ce303a074148e03e4be2cd", "score": "0.5939058", "text": "func (o BucketObjectOutput) ContentType() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *BucketObject) pulumi.StringOutput { return v.ContentType }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "3899c5294a16ffacc7698ba6882753d3", "score": "0.59376246", "text": "func haveType(w http.ResponseWriter) bool {\n\t_, ok := w.Header()[\"Content-Type\"]\n\treturn ok\n}", "title": "" }, { "docid": "5ba9d548a4b47acb69dd81c0d097e8c1", "score": "0.59334266", "text": "func requestContent(client *http.Client, tgtURL string) (mime string, body []byte, err error) {\n\tvar resp *http.Response\n\tresp, err = client.Get(tgtURL)\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn validateContent(resp)\n}", "title": "" }, { "docid": "f8576c75d6b805a08d63ec6c7fcfa145", "score": "0.59264296", "text": "func DetectContentType(filePath string) (ct string, err error) {\n\tf, err := os.Open(filePath)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer f.Close()\n\t// Only the first 512 bytes are used to sniff the content type.\n\tbuffer := make([]byte, 512)\n\t_, err = f.Read(buffer)\n\tif err != nil {\n\t\treturn\n\t}\n\tct = http.DetectContentType(buffer)\n\treturn\n}", "title": "" }, { "docid": "afc1af049a86e1afa8c169dec86951f4", "score": "0.59256506", "text": "func (o *Object) MimeType(ctx context.Context) string {\n\terr := o.readMetaData(ctx)\n\tif err != nil {\n\t\tfs.Logf(o, \"Failed to read metadata: %v\", err)\n\t\treturn \"\"\n\t}\n\treturn o.mimeType\n}", "title": "" }, { "docid": "319ab4c5f374e872edf3ff5b1032bc5f", "score": "0.5895174", "text": "func (c ContentType) Execute(t *Test) error {\n\tif t.Response.Response == nil || t.Response.Response.Header == nil {\n\t\treturn fmt.Errorf(\"no proper response available\")\n\t}\n\tct := t.Response.Response.Header[\"Content-Type\"]\n\tif len(ct) == 0 {\n\t\treturn fmt.Errorf(\"no Content-Type header received\")\n\t}\n\tif len(ct) > 1 {\n\t\t// This is technically not a failure, but if someone sends\n\t\t// mutliple Content-Type headers something is a bit odd.\n\t\treturn fmt.Errorf(\"received %d Content-Type headers\", len(ct))\n\t}\n\tparts := strings.Split(ct[0], \";\")\n\tgot := strings.TrimSpace(parts[0])\n\twant := c.Is\n\tif strings.Index(want, \"/\") == -1 {\n\t\twant = \"/\" + want\n\t}\n\tif !strings.HasSuffix(got, want) {\n\t\treturn fmt.Errorf(\"Content-Type is %s\", ct[0])\n\t}\n\n\tif c.Charset != \"\" {\n\t\tif len(parts) < 2 {\n\t\t\treturn fmt.Errorf(\"no charset in %s\", ct[0])\n\t\t}\n\t\tgot := strings.TrimSpace(parts[1])\n\t\twant := \"charset=\" + c.Charset\n\t\tif got != want {\n\t\t\treturn fmt.Errorf(\"bad charset in %s\", ct[0])\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "bee8fdcbdcaa3116ae69abfc7ff299fb", "score": "0.58871555", "text": "func (o SpacesBucketObjectOutput) ContentType() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *SpacesBucketObject) pulumi.StringOutput { return v.ContentType }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "ae41cf37651cfadb4a593e403eaf4f89", "score": "0.5886839", "text": "func ContentTypeGuess(filename string, data []byte, data_size int64) (result_uncertain bool, return__ string) {\n\t__cgo__filename := (*C.gchar)(unsafe.Pointer(C.CString(filename)))\n\t__header__data := (*reflect.SliceHeader)(unsafe.Pointer(&data))\n\tvar __cgo__result_uncertain C.gboolean\n\tvar __cgo__return__ *C.gchar\n\t__cgo__return__ = C.g_content_type_guess(__cgo__filename, (*C.guchar)(unsafe.Pointer(__header__data.Data)), C.gsize(data_size), &__cgo__result_uncertain)\n\tC.free(unsafe.Pointer(__cgo__filename))\n\tresult_uncertain = __cgo__result_uncertain == C.gboolean(1)\n\treturn__ = C.GoString((*C.char)(unsafe.Pointer(__cgo__return__)))\n\treturn\n}", "title": "" }, { "docid": "2bc8988f057c8cd103762daca6d700e4", "score": "0.5886255", "text": "func GetFileContentsOfType(file string, extension string) (string, error) {\r\n\tfilename, err := GetValidatedFileName(file, extension)\r\n\tif err != nil {\r\n\t\treturn \"\", err\r\n\t}\r\n\r\n\tbody, err := GetFileContents(filename)\r\n\tif err != nil {\r\n\t\treturn \"\", err\r\n\t}\r\n\treturn body, nil\r\n}", "title": "" }, { "docid": "b9f2d2aef7d927b6d2e3c7ad284ca369", "score": "0.5886107", "text": "func (o *HttpEventAllOf) GetContentTypeOk() (*string, bool) {\n\tif o == nil || o.ContentType == nil {\n\t\treturn nil, false\n\t}\n\treturn o.ContentType, true\n}", "title": "" }, { "docid": "d8d15c36f2ee75cd663e667ca1eff345", "score": "0.5881774", "text": "func (t *Ting) ContentTypes() (int, response.JSend) {\n types, err := t.Backend.ContentTypes()\n if err != nil {\n return response.Error(err).Wrap()\n }\n\n return response.Success(types).Wrap()\n}", "title": "" }, { "docid": "92b372480ad11fb754dbcb636c9d69ab", "score": "0.5867431", "text": "func (m *MediaFile) FileType() fs.FileType {\n\treturn fs.GetFileType(m.fileName)\n}", "title": "" }, { "docid": "e4c95af8a411b228a675528f58cbee36", "score": "0.58666104", "text": "func Type(name string) error {\n\tf, err := os.Open(name)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"type cannot open the named file: %w: %s\", err, name)\n\t}\n\tdefer f.Close()\n\tdst := &bytes.Buffer{}\n\tinf, err := f.Stat()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"type cannot stat the named file: %w: %s\", err, name)\n\t}\n\t// DetectContentType only needs the first 512B of a file\n\tconst maxSize = int64(512)\n\tn := maxSize\n\tif inf.Size() < maxSize {\n\t\tn = inf.Size()\n\t}\n\tif _, err = io.CopyN(dst, f, n); err != nil {\n\t\treturn fmt.Errorf(\"type cannot copy the first %dB of the named file: %w: %s\", n, err, name)\n\t}\n\tmime := http.DetectContentType(dst.Bytes())\n\ts := strings.Split(mime, \"/\")\n\tif len(s) == 0 {\n\t\treturn fmt.Errorf(\"type cannot copy the named file: %w: %s\", err, name)\n\t}\n\tconst fallback = \"application/octet-stream\"\n\tif mime == fallback {\n\t\t// fallback is often returned by ANSI encoded files etc\n\t\treturn nil\n\t}\n\tif t := s[0]; t == \"text\" {\n\t\treturn nil\n\t}\n\treturn fmt.Errorf(\"%w: %s: %s\", ErrType, mime, name)\n}", "title": "" }, { "docid": "36e678091bc0f8de1865a3d6038ea0f1", "score": "0.58511496", "text": "func MimeType(extention string) (string, error) {\n\tconst op errors.Op = \"types.MimeType\"\n\n\tif mime, ok := Extentions[extention]; ok {\n\t\treturn mime, nil\n\t}\n\treturn \"\", errors.E(op, \"content type not supported\")\n}", "title": "" }, { "docid": "34def9d2ec194d9911891106a6964db0", "score": "0.58472073", "text": "func (fileType *fileType) Type() string {\n\treturn \"fileType\"\n}", "title": "" }, { "docid": "338dc407f62b15409c53af3de2d4e049", "score": "0.5820251", "text": "func matchesContentType(contentType, expectedType string) bool {\n\tmimetype, _, err := mime.ParseMediaType(contentType)\n\tif err != nil {\n\t\tlogrus.Errorf(\"Error parsing media type: %s error: %v\", contentType, err)\n\t}\n\treturn err == nil && mimetype == expectedType\n}", "title": "" }, { "docid": "2d836b52b30d26fcab6e1244997fa9c5", "score": "0.5812843", "text": "func (JSONSerializer) ContentType() string {\n\treturn \"application/json\"\n}", "title": "" }, { "docid": "7d01fadf9e878e5af43e46641833a77f", "score": "0.5811293", "text": "func MidContentType(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {\n\t\tct := req.Header.Get(\"Content-Type\")\n\t\tif ct == cjsonapi.MimeType {\n\t\t\tnext.ServeHTTP(rw, req)\n\t\t\treturn\n\t\t}\n\t\trw.WriteHeader(http.StatusUnsupportedMediaType)\n\t})\n}", "title": "" }, { "docid": "a0cfa99d1ba89c7e7037b9c8bba18549", "score": "0.57935023", "text": "func ContentType(t *testing.T, w *httptest.ResponseRecorder, contentType string) {\n\tHeader(t, w, \"Content-Type\", contentType)\n}", "title": "" }, { "docid": "17b0e8a6ef6d9a8be7b3b9b2a3661803", "score": "0.5782946", "text": "func (outputGenerator *HTMLOutputGenerator) ContentType() string {\n\treturn \"text/html\"\n}", "title": "" }, { "docid": "1e2d7c1e2aef9c086ed9f1251c8f6846", "score": "0.5781224", "text": "func (o *HttpEventAllOf) GetContentType() string {\n\tif o == nil || o.ContentType == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.ContentType\n}", "title": "" }, { "docid": "9b2327c3575e63b4cf49ed8dd856a004", "score": "0.5768983", "text": "func getCT(\n\theader map[string][]string,\n) (\n\tcontentType string,\n\tparams map[string]string,\n\tdecoder func(io.Reader) io.Reader,\n\terr error,\n) {\n\tdecoder = func(r io.Reader) io.Reader {\n\t\treturn r\n\t}\n\tcontentType = mail.Header(header).Get(\"Content-Type\")\n\tif contentType == \"\" {\n\t\treturn\n\t}\n\tvar nct string\n\tnct, params, err = mime.ParseMediaType(contentType)\n\tif err != nil {\n\t\terr = errors.Errorf(\"cannot parse Content-Type %s: %w\", contentType, err)\n\t\treturn\n\t}\n\tcontentType = nct\n\tte := strings.ToLower(mail.Header(header).Get(\"Content-Transfer-Encoding\"))\n\tswitch te {\n\tcase \"\":\n\tcase \"base64\":\n\t\tdecoder = func(r io.Reader) io.Reader {\n\t\t\t//return &b64ForceDecoder{Encoding: base64.StdEncoding, r: r}\n\t\t\t//return B64FilterReader(r, base64.StdEncoding)\n\t\t\treturn NewB64Decoder(base64.StdEncoding, r)\n\t\t}\n\tcase \"quoted-printable\":\n\t\tdecoder = func(r io.Reader) io.Reader {\n\t\t\tbr := bufio.NewReaderSize(r, 1024)\n\t\t\tfirst, _ := br.Peek(1024)\n\t\t\tenc := qprintable.BinaryEncoding\n\t\t\tif len(first) > 0 {\n\t\t\t\tenc = qprintable.DetectEncoding(string(first))\n\t\t\t}\n\t\t\treturn qprintable.NewDecoder(enc, br)\n\t\t}\n\tdefault:\n\t\tinfof(\"unknown transfer-encoding %q\", te)\n\t}\n\treturn\n}", "title": "" }, { "docid": "96a46e705d65973c3625b4abd989bc35", "score": "0.57664514", "text": "func GetFileContentType(file multipart.File) (string, error) {\n\t// Only the first 512 bytes are used to sniff the content type.\n\tbuffer := make([]byte, 512)\n\t_, err := file.Read(buffer)\n\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\t// Use the net/http package's handy DectectContentType function. Always returns a valid\n\t// content-type by returning \"application/octet-stream\" if no others seemed to match.\n\tcontentType := http.DetectContentType(buffer)\n\n\treturn contentType, nil\n}", "title": "" }, { "docid": "12a73575270d2c559650c091a46bd5b0", "score": "0.575908", "text": "func (s *FsObjectStore) DetectContentType(hash string) string {\n\tpath := filepath.Join(s.path, hash)\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\treturn \"application/octet-stream\"\n\t}\n\tdefer f.Close()\n\t\n\t// Only the first 512 bytes are used to sniff the content type.\n\tb := make([]byte, 512)\n\t_, err = f.Read(b)\n\tif err != nil {\n\t\treturn \"application/octet-stream\"\n\t}\n\tcontentType := http.DetectContentType(b)\n\treturn contentType\n}", "title": "" }, { "docid": "6bde2e7e4c70bb3090ce83fbacd90c23", "score": "0.57528204", "text": "func (f *Media) findMimeType() error {\n\tif f.MimeType != \"\"{\n\t\treturn nil\n\t}\n\tlog.Debug(\"finding file extension for '\", f.Path, \"'\")\n\text := filepath.Ext(f.Path)\n\tlog.Debug(fmt.Sprintf(\"file has extension '%s'\", ext))\n\tf.MimeType = MediaTypeMapping[ext]\n\tif f.MimeType == \"\" {\n\t\treturn fmt.Errorf(\"upsupported media type, please refer '%s/api/media'\", FacebookSupportUrl)\n\t}\n\tlog.Debug(fmt.Sprintf(\"file has mime-type '%s'\", f.MimeType))\n\treturn nil\n}", "title": "" }, { "docid": "d48a7b91051333c1fe451a086b8247c0", "score": "0.57515854", "text": "func GetMIME(file string) (string, error) {\n\tbuffMIME, err := os.Open(file)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tbuff := make([]byte, 512)\n\t// why 512 bytes ? see http://golang.org/pkg/net/http/#DetectContentType\n\t_, err = buffMIME.Read(buff)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tfileType := http.DetectContentType(buff)\n\tbuffMIME.Close()\n\treturn fileType, nil\n}", "title": "" }, { "docid": "dd999ebdb449b1cbd65eeee1f50c5f2c", "score": "0.5749533", "text": "func OpenAPIMimeType(compression, version string) string {\n\treturn fmt.Sprintf(\"application/x.openapi%s;version=%s\", compression, version)\n}", "title": "" }, { "docid": "32389dc986d6bbe303d924733af30723", "score": "0.5742122", "text": "func (m *memfile) MIMEType() string {\n\treturn m.mime\n}", "title": "" }, { "docid": "153f0725ba7d9db3ad205793f6bd2497", "score": "0.57396996", "text": "func MetaForContentType(ctype string) map[string]string {\n\treturn map[string]string{\"Content-Type\": ctype}\n}", "title": "" }, { "docid": "b05e8dd4f549a5d8904874d2c2fca1f7", "score": "0.57299125", "text": "func (r *Response) GetContentType() string {\n\tif r.Response == nil {\n\t\treturn \"\"\n\t}\n\treturn r.Header.Get(header.ContentType)\n}", "title": "" }, { "docid": "f481844c7f2c4c25c2551e38f66c936a", "score": "0.5718697", "text": "func (JSONSerializer) ContentType() string {\n\treturn \"application/json;charset=UTF-8\"\n}", "title": "" }, { "docid": "0a014f62ada81b0794f6ed436a397144", "score": "0.57022", "text": "func validateContent(resp *http.Response) (mime string, body []byte, err error) {\n\tmime = resp.Header.Get(\"Content-Type\")\n\tif mime == \"\" {\n\t\tmime = \"application/octet-stream\"\n\t}\n\tif i := strings.Index(mime, \";\"); i >= 0 {\n\t\tmime = mime[:i]\n\t}\n\n\tif !strings.HasPrefix(mime, \"text\") {\n\t\t// If this is not a text document there is no point reading the body\n\t\treturn mime, nil, nil\n\t}\n\n\tbuf := bytes.Buffer{}\n\t// TODO this should be limited to a sane max length\n\tif _, err := buf.ReadFrom(resp.Body); err != nil {\n\t\treturn \"\", nil, err\n\t}\n\n\treturn mime, buf.Bytes(), nil\n}", "title": "" }, { "docid": "b349ea224aef2984798d5f753210efce", "score": "0.57001644", "text": "func fileNameToContentType(fileName string) string {\n\textPeriod := strings.LastIndex(fileName, \".\")\n\tif extPeriod < 0 {\n\t\treturn \"application/octet-stream\"\n\t}\n\n\tmimeType := mime.TypeByExtension(fileName[extPeriod:])\n\tif mimeType == \"\" {\n\t\treturn \"application/octet-stream\"\n\t}\n\treturn mimeType\n}", "title": "" }, { "docid": "2eee90f6a8ca9cc4cf2b4e93405c3af4", "score": "0.5694324", "text": "func (o ProjectFilePropertiesResponseOutput) MediaType() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ProjectFilePropertiesResponse) *string { return v.MediaType }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "4a70c028858c322b51180f4ff8f0da1a", "score": "0.5676638", "text": "func (hu *HostUtil) GetFileType(pathname string) (FileType, error) {\n\treturn FileType(\"fake\"), errUnsupported\n}", "title": "" }, { "docid": "cdca45402ebaf068688a5374ad2d1ad1", "score": "0.5674037", "text": "func DetectFileType(fn string) (typ string, err kv.Error) {\n\tfile, errOs := os.Open(filepath.Clean(fn))\n\tif errOs != nil {\n\t\treturn \"\", kv.Wrap(errOs).With(\"filename\", fn).With(\"stack\", stack.Trace().TrimRuntime())\n\t}\n\tdefer file.Close()\n\n\t// Only the first 512 bytes are used to sniff the content type.\n\tbuffer := make([]byte, 512)\n\tif _, errOs = file.Read(buffer); errOs != nil && errOs != io.EOF {\n\t\treturn \"\", kv.Wrap(errOs).With(\"filename\", fn).With(\"stack\", stack.Trace().TrimRuntime())\n\t}\n\n\t// Always returns a valid content-type and \"application/octet-stream\" if no others seemed to match.\n\treturn http.DetectContentType(buffer), nil\n}", "title": "" }, { "docid": "2e51616f6d4ab4acb9c63c7e2b238943", "score": "0.5665677", "text": "func (r *Request) ContentType(contentType string) Interface {\n\tr.header.Add(header.ContentType, contentType)\n\treturn r\n}", "title": "" }, { "docid": "bebfa942f218203e339a0f9f8ded941d", "score": "0.5663408", "text": "func NormalizedMIMEType(input string) string {\n\tswitch input {\n\t// \"application/json\" is a valid v2s1 value per https://github.com/docker/distribution/blob/master/docs/spec/manifest-v2-1.md .\n\t// This works for now, when nothing else seems to return \"application/json\"; if that were not true, the mapping/detection might\n\t// need to happen within the ImageSource.\n\tcase \"application/json\":\n\t\treturn DockerV2Schema1SignedMediaType\n\tcase DockerV2Schema1MediaType, DockerV2Schema1SignedMediaType,\n\t\timgspecv1.MediaTypeImageManifest,\n\t\tDockerV2Schema2MediaType,\n\t\tDockerV2ListMediaType:\n\t\treturn input\n\tdefault:\n\t\t// If it's not a recognized manifest media type, or we have failed determining the type, we'll try one last time\n\t\t// to deserialize using v2s1 as per https://github.com/docker/distribution/blob/master/manifests.go#L108\n\t\t// and https://github.com/docker/distribution/blob/master/manifest/schema1/manifest.go#L50\n\t\t//\n\t\t// Crane registries can also return \"text/plain\", or pretty much anything else depending on a file extension “recognized” in the tag.\n\t\t// This makes no real sense, but it happens\n\t\t// because requests for manifests are\n\t\t// redirected to a content distribution\n\t\t// network which is configured that way. See https://bugzilla.redhat.com/show_bug.cgi?id=1389442\n\t\treturn DockerV2Schema1SignedMediaType\n\t}\n}", "title": "" }, { "docid": "8c6a83d41597fd72d5496f6ab41bc9ea", "score": "0.56634", "text": "func IsFileType(b bundle.Bundle, def *definition.Schema) bool {\n\treturn SupportsFileParameters(b) &&\n\t\tdef.Type == \"string\" && def.ContentEncoding == \"base64\"\n}", "title": "" } ]
9647b873210526979d097a3177472b54
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be nonnil.
[ { "docid": "18831d64bf7e0ccb98f65545f3e17d0b", "score": "0.0", "text": "func (in *ConditionalBranch) DeepCopyInto(out *ConditionalBranch) {\n\t*out = *in\n}", "title": "" } ]
[ { "docid": "6d3c7f706ada34b172e55056fd08da9b", "score": "0.82795733", "text": "func (in *Ipxe) DeepCopyInto(out *Ipxe) {\n\t*out = *in\n}", "title": "" }, { "docid": "1c2d70d782e9a1af879ef1c56794f835", "score": "0.8241801", "text": "func (in *Data) DeepCopyInto(out *Data) {\n\t*out = *in\n}", "title": "" }, { "docid": "c6850f95c4b786dfb720021fee99aac3", "score": "0.81081915", "text": "func (in *Version) DeepCopyInto(out *Version) {\n\t*out = *in\n}", "title": "" }, { "docid": "ba7c8360408ac096e3adb773d7271afe", "score": "0.810772", "text": "func (in *Fluentd) DeepCopyInto(out *Fluentd) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "b819c01a4847e2b7250bf24ee066a2a6", "score": "0.8098036", "text": "func (in *PachydermRestore) DeepCopyInto(out *PachydermRestore) {\n\t*out = *in\n}", "title": "" }, { "docid": "d25ee8020a516841733ada81b932cfb8", "score": "0.80924106", "text": "func (in *Osie) DeepCopyInto(out *Osie) {\n\t*out = *in\n}", "title": "" }, { "docid": "ca54ebd80220b5981408fecbbe316176", "score": "0.8091042", "text": "func (in *DBInfo) DeepCopyInto(out *DBInfo) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "fd68cc5c404d65fa5c03c64949665da7", "score": "0.80908763", "text": "func (in *Size) DeepCopyInto(out *Size) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "be58e6c182cda9967f881999acd59c2f", "score": "0.80799496", "text": "func (in *Debug) DeepCopyInto(out *Debug) {\n\t*out = *in\n}", "title": "" }, { "docid": "afe2a3f4b3b9253dedc6bb9e45e3d492", "score": "0.8079581", "text": "func (in *AchievedStep) DeepCopyInto(out *AchievedStep) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "566b517300fca63a0f999b4e20011d0f", "score": "0.8077623", "text": "func (in *Keto) DeepCopyInto(out *Keto) {\n\t*out = *in\n}", "title": "" }, { "docid": "0402379222701ce5cbc47a58147b675c", "score": "0.80757505", "text": "func (in *EclipseChe) DeepCopyInto(out *EclipseChe) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "8c157f4759e96806dd0b74060472d480", "score": "0.80640954", "text": "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "8c157f4759e96806dd0b74060472d480", "score": "0.80640954", "text": "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "a082503dcbc413c6f686bfa34ed34463", "score": "0.80567133", "text": "func (in *NodeInterface) DeepCopyInto(out *NodeInterface) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "d4907b7e87f96cc9f592e887afb9927d", "score": "0.8052064", "text": "func (in *Node) DeepCopyInto(out *Node) {\n\t*out = *in\n}", "title": "" }, { "docid": "bf7b4b522057ae01ab0d93d4522fe30a", "score": "0.80489683", "text": "func (in *ObjectRef) DeepCopyInto(out *ObjectRef) {\n\t*out = *in\n}", "title": "" }, { "docid": "4d63c7d1929a6b53ca46be5738e434a5", "score": "0.8031264", "text": "func (in *OutputEntry) DeepCopyInto(out *OutputEntry) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "6eec4c5e3d639e136aafd047e93f99cd", "score": "0.8030623", "text": "func (in *JSONCheck) DeepCopyInto(out *JSONCheck) {\n\t*out = *in\n}", "title": "" }, { "docid": "1c3455c7f0a06e92261a5b10b622ca06", "score": "0.8025656", "text": "func (in *Fateflow) DeepCopyInto(out *Fateflow) {\n\t*out = *in\n}", "title": "" }, { "docid": "5b24c3ee0cabe62c71b4995b5197f3e8", "score": "0.8021609", "text": "func (in *ObjectSelector) DeepCopyInto(out *ObjectSelector) {\n\t*out = *in\n}", "title": "" }, { "docid": "554e9c3fc35ac1866e0c1f4c3107b730", "score": "0.80118173", "text": "func (in *PathTest) DeepCopyInto(out *PathTest) {\n\t*out = *in\n}", "title": "" }, { "docid": "2f2cf86d90535cd828decdd6d982e087", "score": "0.8003635", "text": "func (in *Output) DeepCopyInto(out *Output) {\n\t*out = *in\n\tif in.output != nil {\n\t\tin, out := &in.output, &out.output\n\t\t*out = make([]OutputEntry, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "title": "" }, { "docid": "e1c2499dce71eea1ba530425d987e65d", "score": "0.8000427", "text": "func (in *OutputType) DeepCopyInto(out *OutputType) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "873314226084acb05455ef131ac0e4e6", "score": "0.7989827", "text": "func (in *Nodeport) DeepCopyInto(out *Nodeport) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "3dfe5656fa02ed58112cacdc9eac4f47", "score": "0.7980265", "text": "func (in *VMSpec) DeepCopyInto(out *VMSpec) {\n\t*out = *in\n}", "title": "" }, { "docid": "8edce588d322f861d4f46b2f3aae92a8", "score": "0.7979291", "text": "func (in *TaskRef) DeepCopyInto(out *TaskRef) {\n\t*out = *in\n}", "title": "" }, { "docid": "8edce588d322f861d4f46b2f3aae92a8", "score": "0.7979291", "text": "func (in *TaskRef) DeepCopyInto(out *TaskRef) {\n\t*out = *in\n}", "title": "" }, { "docid": "09a622ae41a2172d4fd07d376190f880", "score": "0.79733783", "text": "func (in *Runtime) DeepCopyInto(out *Runtime) {\n\t*out = *in\n}", "title": "" }, { "docid": "be82863d9978b104237d2472cfdf3614", "score": "0.7960795", "text": "func (in *CrossVersionObjectReference) DeepCopyInto(out *CrossVersionObjectReference) {\n\t*out = *in\n}", "title": "" }, { "docid": "fcd61e82f34db96b76efba6276644908", "score": "0.79570514", "text": "func (in *ObjectSelector) DeepCopyInto(out *ObjectSelector) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "b37c436bd18ef0f2ea068cd1e94529fb", "score": "0.79538494", "text": "func (in *Partitions) DeepCopyInto(out *Partitions) {\n\t*out = *in\n}", "title": "" }, { "docid": "ee7f1891e9e6a3a794787c1609ae9fd7", "score": "0.79529464", "text": "func (in *Exchange) DeepCopyInto(out *Exchange) {\n\t*out = *in\n}", "title": "" }, { "docid": "a62d2491925276d66a89576351b7bdb4", "score": "0.7941823", "text": "func (in *Replicas) DeepCopyInto(out *Replicas) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "77fc7d629fd026ad04ef51dfccebef4b", "score": "0.7941193", "text": "func (in *LiveRef) DeepCopyInto(out *LiveRef) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "ac71660e3edf3bf0db01580f6440913a", "score": "0.7940609", "text": "func (in *L2FibEntry) DeepCopyInto(out *L2FibEntry) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "9a15d89300bd7975c23b970cd7e97f3d", "score": "0.79287124", "text": "func (in *File) DeepCopyInto(out *File) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "bf6832a027499f414e4fefd23952a785", "score": "0.79269856", "text": "func (in *ZookeeperNode) DeepCopyInto(out *ZookeeperNode) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "b7c82ab7f0b3eea65c1bf495c6e516a5", "score": "0.79161805", "text": "func (in *BackupProgress) DeepCopyInto(out *BackupProgress) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "57d7989b793af6d5839b019cc8c81aaa", "score": "0.7914557", "text": "func (in *ZookeeperNode) DeepCopyInto(out *ZookeeperNode) {\n\t*out = *in\n}", "title": "" }, { "docid": "6a74df2be2dbe701a0ee65d8e03053f6", "score": "0.79130125", "text": "func (in *IPXE) DeepCopyInto(out *IPXE) {\n\t*out = *in\n}", "title": "" }, { "docid": "ebde97abbe9aa8149c7ca6159e15bd33", "score": "0.79022104", "text": "func (in *CPUAndMem) DeepCopyInto(out *CPUAndMem) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "0153348545a3d712d327a2aac1b6c396", "score": "0.789811", "text": "func (in *HostPort) DeepCopyInto(out *HostPort) {\n\t*out = *in\n}", "title": "" }, { "docid": "6502eb4511d14b46184710050bf0959d", "score": "0.7896113", "text": "func (in *Docker) DeepCopyInto(out *Docker) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "a00993b464b505c32d052ed0ac29e929", "score": "0.789424", "text": "func (in *RolloutStrategyStep) DeepCopyInto(out *RolloutStrategyStep) {\n\t*out = *in\n\tout.Capacity = in.Capacity\n\tout.Traffic = in.Traffic\n\treturn\n}", "title": "" }, { "docid": "8cb26178b91e9e46e2760b02c39719bd", "score": "0.7893756", "text": "func (in *FileInfo) DeepCopyInto(out *FileInfo) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "6ac1005d81e338e324c2b485bacbcd96", "score": "0.789106", "text": "func (in *FileSourceCommandOutput) DeepCopyInto(out *FileSourceCommandOutput) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "dddf435977ebcd6bfaf0b4c062ea8042", "score": "0.78905755", "text": "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "title": "" }, { "docid": "dddf435977ebcd6bfaf0b4c062ea8042", "score": "0.78905755", "text": "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n}", "title": "" }, { "docid": "7e9a1e6442b0e3e96a5fbeee635960c2", "score": "0.7887171", "text": "func (in *TiltBuild) DeepCopyInto(out *TiltBuild) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "23db226639df1ec5bfce6ed220d55ed3", "score": "0.78859967", "text": "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n}", "title": "" }, { "docid": "a65c4b2c1d2be68d2c000a97d8eedfc2", "score": "0.7884897", "text": "func (in *Custom) DeepCopyInto(out *Custom) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "a80bfc6073fb7569830178e573ccf4ff", "score": "0.7884449", "text": "func (in *CanaryStepInfo) DeepCopyInto(out *CanaryStepInfo) {\n\t*out = *in\n\tif in.Pods != nil {\n\t\tin, out := &in.Pods, &out.Pods\n\t\t*out = make([]Pod, len(*in))\n\t\tcopy(*out, *in)\n\t}\n}", "title": "" }, { "docid": "834926315fc14d71c5b8c19960384fb8", "score": "0.7882781", "text": "func (in *NodeToDelete) DeepCopyInto(out *NodeToDelete) {\n\t*out = *in\n}", "title": "" }, { "docid": "df764ce7904760faaa8b2c0ff322edf9", "score": "0.7880429", "text": "func (in *NamespacedObjectReference) DeepCopyInto(out *NamespacedObjectReference) {\n\t*out = *in\n}", "title": "" }, { "docid": "b0f2d00d7b4d64e85270beb9b90cd6ab", "score": "0.7879534", "text": "func (in *KoupletObjectStorage) DeepCopyInto(out *KoupletObjectStorage) {\n\t*out = *in\n}", "title": "" }, { "docid": "786362133c8c7c161acd95331377635b", "score": "0.78765666", "text": "func (in *NoteAffectedVersionEnd) DeepCopyInto(out *NoteAffectedVersionEnd) {\n\t*out = *in\n\tif in.Epoch != nil {\n\t\tin, out := &in.Epoch, &out.Epoch\n\t\t*out = new(int)\n\t\t**out = **in\n\t}\n\tif in.FullName != nil {\n\t\tin, out := &in.FullName, &out.FullName\n\t\t*out = new(string)\n\t\t**out = **in\n\t}\n\tif in.Name != nil {\n\t\tin, out := &in.Name, &out.Name\n\t\t*out = new(string)\n\t\t**out = **in\n\t}\n\tif in.Revision != nil {\n\t\tin, out := &in.Revision, &out.Revision\n\t\t*out = new(string)\n\t\t**out = **in\n\t}\n\treturn\n}", "title": "" }, { "docid": "dfd9b82d38c6fee08dd3cf0209bd1133", "score": "0.7875957", "text": "func (in *Disk) DeepCopyInto(out *Disk) {\n\t*out = *in\n}", "title": "" }, { "docid": "fa756ae28922fed925a87b236587b156", "score": "0.7875656", "text": "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n}", "title": "" }, { "docid": "d04effc735adc59932f839adb9a5fcca", "score": "0.78715914", "text": "func (in *Target) DeepCopyInto(out *Target) {\n\t*out = *in\n}", "title": "" }, { "docid": "87da18a2ecb0daf97f6ceebabd38dbce", "score": "0.7871561", "text": "func (in *ArgoCDDexSpec) DeepCopyInto(out *ArgoCDDexSpec) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "c20aacaaa0cb697cdf50015f05812d0b", "score": "0.7869766", "text": "func (in *Persistent) DeepCopyInto(out *Persistent) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "1e1088954cae99b323e1b34730ad0490", "score": "0.7865111", "text": "func (in *Proxy) DeepCopyInto(out *Proxy) {\n\t*out = *in\n}", "title": "" }, { "docid": "a676d91e11264164f55179f00108d1a4", "score": "0.7864939", "text": "func (in *IPamEntry) DeepCopyInto(out *IPamEntry) {\n\t*out = *in\n\tout.Config = in.Config\n\treturn\n}", "title": "" }, { "docid": "130014cc93330670791582c6062e4db7", "score": "0.78648865", "text": "func (in *RestoreOptions) DeepCopyInto(out *RestoreOptions) {\n\t*out = *in\n\tout.Pachyderm = in.Pachyderm\n}", "title": "" }, { "docid": "5186b8d1bfd3abf2fb983694f27e4aed", "score": "0.7863912", "text": "func (in *Image) DeepCopyInto(out *Image) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "249004da5f8f92f3538948abf74c7c43", "score": "0.7863844", "text": "func (in *ChainMatch) DeepCopyInto(out *ChainMatch) {\n\t*out = *in\n}", "title": "" }, { "docid": "e39896940442e2145aef5b05f65f5935", "score": "0.78631425", "text": "func (in *PodInfo) DeepCopyInto(out *PodInfo) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "e6970237e6216981baddfbdbd7abccc6", "score": "0.7862662", "text": "func (in *Tap) DeepCopyInto(out *Tap) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "7cc05f62bb39d146b68b80d858d6a207", "score": "0.7862287", "text": "func (in *Vector) DeepCopyInto(out *Vector) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "a510fbe7bc41427b104d0eaa08202270", "score": "0.78600705", "text": "func (in *Host) DeepCopyInto(out *Host) {\n\t*out = *in\n}", "title": "" }, { "docid": "e315197e0d9ead19327c6af4ed12a3cb", "score": "0.7856319", "text": "func (in *Run) DeepCopyInto(out *Run) {\n\t*out = *in\n\tin.CollectorMeta.DeepCopyInto(&out.CollectorMeta)\n\tif in.Command != nil {\n\t\tin, out := &in.Command, &out.Command\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Args != nil {\n\t\tin, out := &in.Args, &out.Args\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.ImagePullSecret != nil {\n\t\tin, out := &in.ImagePullSecret, &out.ImagePullSecret\n\t\t*out = new(ImagePullSecrets)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n}", "title": "" }, { "docid": "5e7b3746576fd5b9b7b41728c6111924", "score": "0.78550553", "text": "func (in *UnidleInfo) DeepCopyInto(out *UnidleInfo) {\n\t*out = *in\n\tout.CrossGroupObjectReference = in.CrossGroupObjectReference\n\treturn\n}", "title": "" }, { "docid": "96083041971f9626da3004536999b74d", "score": "0.7855008", "text": "func (in *Instance) DeepCopyInto(out *Instance) {\n\t*out = *in\n\tout.Info = in.Info\n\treturn\n}", "title": "" }, { "docid": "2a009d2984c86675e264e6dada29ff80", "score": "0.78547835", "text": "func (in *KV) DeepCopyInto(out *KV) {\n\t*out = *in\n}", "title": "" }, { "docid": "ea91a35f5981150f1fd702f1fe925417", "score": "0.7852249", "text": "func (in *FFTEstimator) DeepCopyInto(out *FFTEstimator) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "da7f24e673d9d09df8d3a9d9f9ee7150", "score": "0.78519595", "text": "func (in *ClusterObjectVersion) DeepCopyInto(out *ClusterObjectVersion) {\n\t*out = *in\n}", "title": "" }, { "docid": "e4913de28c2145467caf2d40a4f1654c", "score": "0.7849457", "text": "func (in *ConsoleType) DeepCopyInto(out *ConsoleType) {\n\t*out = *in\n}", "title": "" }, { "docid": "b0adf7eb2ad49b2db1b146fe5f0c63c8", "score": "0.78473425", "text": "func (in *DataToBackup) DeepCopyInto(out *DataToBackup) {\n\t*out = *in\n\tif in.Paths != nil {\n\t\tin, out := &in.Paths, &out.Paths\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "title": "" }, { "docid": "24bd47aa15850c3e5d10ce0c3cdb5c1a", "score": "0.78445554", "text": "func (in *TwinProperty) DeepCopyInto(out *TwinProperty) {\n\t*out = *in\n\tif in.Metadata != nil {\n\t\tin, out := &in.Metadata, &out.Metadata\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "a79f2a418acb2bd0e96737a5c3c73fa6", "score": "0.7843635", "text": "func (in *SetupNode) DeepCopyInto(out *SetupNode) {\n\t*out = *in\n\tout.Ccow = in.Ccow\n\tin.Ccowd.DeepCopyInto(&out.Ccowd)\n\tout.Auditd = in.Auditd\n\tif in.ClusterNodes != nil {\n\t\tin, out := &in.ClusterNodes, &out.ClusterNodes\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tin.Rtrd.DeepCopyInto(&out.Rtrd)\n\tif in.RtrdSlaves != nil {\n\t\tin, out := &in.RtrdSlaves, &out.RtrdSlaves\n\t\t*out = make([]RTDevices, len(*in))\n\t\tfor i := range *in {\n\t\t\t(*in)[i].DeepCopyInto(&(*out)[i])\n\t\t}\n\t}\n\tin.Rtlfs.DeepCopyInto(&out.Rtlfs)\n\tin.Rtkvs.DeepCopyInto(&out.Rtkvs)\n\treturn\n}", "title": "" }, { "docid": "906c07db0ca76d2700f3f766a3da7142", "score": "0.78421164", "text": "func (in *ConfigLookup) DeepCopyInto(out *ConfigLookup) {\n\t*out = *in\n\tif in.Config != nil {\n\t\tin, out := &in.Config, &out.Config\n\t\t*out = new(Config)\n\t\t(*in).DeepCopyInto(*out)\n\t}\n\tout.Display = in.Display\n}", "title": "" }, { "docid": "7cae9d5836b9ff88d8f2685e755e15d2", "score": "0.7839457", "text": "func (in *Mirroring) DeepCopyInto(out *Mirroring) {\n\t*out = *in\n\tif in.MaxBodySize != nil {\n\t\tin, out := &in.MaxBodySize, &out.MaxBodySize\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.Mirrors != nil {\n\t\tin, out := &in.Mirrors, &out.Mirrors\n\t\t*out = make([]MirrorService, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.HealthCheck != nil {\n\t\tin, out := &in.HealthCheck, &out.HealthCheck\n\t\t*out = new(HealthCheck)\n\t\t**out = **in\n\t}\n\treturn\n}", "title": "" }, { "docid": "2dc7abef201d917589f010f613212296", "score": "0.78389263", "text": "func (in *ArgoCDTLSSpec) DeepCopyInto(out *ArgoCDTLSSpec) {\n\t*out = *in\n\tout.CA = in.CA\n\treturn\n}", "title": "" }, { "docid": "a77814382dc5efa98656ab59390c4583", "score": "0.7836989", "text": "func (in *KeyValue) DeepCopyInto(out *KeyValue) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "87b2689ec313f65bf8b34547d5fe1c22", "score": "0.78360295", "text": "func (in *ImpersonationConfig) DeepCopyInto(out *ImpersonationConfig) {\n\t*out = *in\n\tif in.Groups != nil {\n\t\tin, out := &in.Groups, &out.Groups\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\tif in.Extra != nil {\n\t\tin, out := &in.Extra, &out.Extra\n\t\t*out = make(map[string][]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\tif val == nil {\n\t\t\t\t(*out)[key] = nil\n\t\t\t} else {\n\t\t\t\t(*out)[key] = make([]string, len(val))\n\t\t\t\tcopy((*out)[key], val)\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "3ba2a8b441a37bfd038c30f2cd4bd608", "score": "0.7835628", "text": "func (in *NoteBuild) DeepCopyInto(out *NoteBuild) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "bef878ec968416a156e7ebe2543d4ca2", "score": "0.7834272", "text": "func (in *KeyToPath) DeepCopyInto(out *KeyToPath) {\n\t*out = *in\n}", "title": "" }, { "docid": "8fdfc31c802e3f9c9a872e578c5339d3", "score": "0.78325194", "text": "func (in *ArgoCDConfig) DeepCopyInto(out *ArgoCDConfig) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "f5023cb13be74cf25182a7183395c30a", "score": "0.78314745", "text": "func (in *ObjectFieldSelector) DeepCopyInto(out *ObjectFieldSelector) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "cbb68d49fd2c1144f25e0b31d5dc670a", "score": "0.7828922", "text": "func (in *BdInterface) DeepCopyInto(out *BdInterface) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "8e14ffa5d59641d2c993b17ea5ba3246", "score": "0.7828464", "text": "func (in *Delegate) DeepCopyInto(out *Delegate) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "74a105a690c14ec8e268cba9618912f1", "score": "0.78267485", "text": "func (in *K8sVersion) DeepCopyInto(out *K8sVersion) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "9ec11989c5ce05665e2c4f7c6180862a", "score": "0.7826527", "text": "func (in *NodepoolGvnic) DeepCopyInto(out *NodepoolGvnic) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "783f47424c191839399d4e44b906dd71", "score": "0.78259516", "text": "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "783f47424c191839399d4e44b906dd71", "score": "0.78259516", "text": "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "783f47424c191839399d4e44b906dd71", "score": "0.78259516", "text": "func (in *Port) DeepCopyInto(out *Port) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "8ff4edb62aac78b649c6382f24a79f61", "score": "0.78258586", "text": "func (in *Database) DeepCopyInto(out *Database) {\n\t*out = *in\n\treturn\n}", "title": "" }, { "docid": "0d0b1cb857e61e9b21e4a27a1ea1d959", "score": "0.78243697", "text": "func (in *ExternalData) DeepCopyInto(out *ExternalData) {\n\t*out = *in\n}", "title": "" }, { "docid": "dc9cccc7cfa6b224aed958820b379d64", "score": "0.78228027", "text": "func (in *Blob) DeepCopyInto(out *Blob) {\n\t*out = *in\n}", "title": "" }, { "docid": "5787cd6847ddd85de272065e4c4ef750", "score": "0.7821951", "text": "func (in *GitRef) DeepCopyInto(out *GitRef) {\n\t*out = *in\n\treturn\n}", "title": "" } ]
a8da0dffc5266dc1e4a1e4b1d9f102b4
SetObject sets field value
[ { "docid": "2b2fe4be690c42fec51b81e905b04b4d", "score": "0.0", "text": "func (o *CheckoutResponse) SetObject(v string) {\n\to.Object = v\n}", "title": "" } ]
[ { "docid": "9db76222b8fa982fcec4f95a934f10d7", "score": "0.7337963", "text": "func SetObjectField(env *C.JNIEnv, obj C.jobject, fieldID C.jfieldID, val C.jobject) {\n\tC._GoJniSetObjectField(env, obj, fieldID, val)\n}", "title": "" }, { "docid": "6f16b98b1e365da98966b3d98787c9db", "score": "0.7075321", "text": "func (obj *Value) SetObject(v IObject) {\n\tobj.Candy().Guify(\"g_value_set_object\", obj, v)\n}", "title": "" }, { "docid": "8fedccfd961e98babe2f3043796af7f7", "score": "0.70123667", "text": "func (suite *TestModelsSuite) TestSetObjectField() {\n\tjobConfigRecord := &JobConfigRecord{\n\t\tVersion: 1,\n\t}\n\terr := SetObjectField(jobConfigRecord, \"Version\", 2)\n\tsuite.NoError(err)\n\tsuite.Equal(2, jobConfigRecord.Version)\n\n\t// Test setting invalid field\n\terr = SetObjectField(jobConfigRecord, \"name\", \"myjob\")\n\tsuite.Error(err)\n\n\t// Test setting field to incorrect type\n\terr = SetObjectField(jobConfigRecord, \"Version\", \"invalid\")\n\tsuite.Error(err)\n}", "title": "" }, { "docid": "ea30ad4895068ac5ce2b5f57e11c76c4", "score": "0.70006746", "text": "func (a *chunkBasedAtlas) setObject(chunk int, local maths.Vector, object Object) {\n\ta.populate(chunk)\n\n\tc := a.Chunks[chunk]\n\ti := a.chunkTileIndex(local)\n\tif object.Type != roveapi.Object_ObjectUnknown {\n\t\tc.Objects[i] = object\n\t} else {\n\t\tdelete(c.Objects, i)\n\t}\n\ta.Chunks[chunk] = c\n}", "title": "" }, { "docid": "b9d0dbe548703c90b7f0df8a77343e48", "score": "0.68733865", "text": "func (recv *Value) SetObject(vObject *Object) {\n\tc_v_object := (C.gpointer)(C.NULL)\n\tif vObject != nil {\n\t\tc_v_object = (C.gpointer)(vObject.ToC())\n\t}\n\n\tC.g_value_set_object((*C.GValue)(recv.native), c_v_object)\n\n\treturn\n}", "title": "" }, { "docid": "b080cf99e4882f6428bc618c3b379421", "score": "0.6856059", "text": "func SetField(obj interface{}, name string, value interface{}) error {\n\tstructValue := reflect.ValueOf(obj).Elem()\n\tstructFieldValue := structValue.FieldByName(name)\n\n\tif !structFieldValue.IsValid() {\n\t\treturn fmt.Errorf(\"No such field: %s in obj\", name)\n\t}\n\n\tif !structFieldValue.CanSet() {\n\t\treturn fmt.Errorf(\"Cannot set %s field value\", name)\n\t}\n\n\tstructFieldType := structFieldValue.Type()\n\tval := reflect.ValueOf(value)\n\tif structFieldType != val.Type() {\n\t\treturn errors.New(\"Provided value type didn't match obj field type\")\n\t}\n\n\tstructFieldValue.Set(val)\n\treturn nil\n}", "title": "" }, { "docid": "54d8e262ec57a2081d5c925f44d52b1d", "score": "0.681931", "text": "func (pr *Params) SetObject(objName string) error {\n\terr := pr.SetObjectSet(objName, \"Base\")\n\tif pr.ExtraSets != \"\" && pr.ExtraSets != \"Base\" {\n\t\tsps := strings.Fields(pr.ExtraSets)\n\t\tfor _, ps := range sps {\n\t\t\terr = pr.SetObjectSet(objName, ps)\n\t\t}\n\t}\n\treturn err\n}", "title": "" }, { "docid": "5dfaab37dcdd8a211d25bc113ac2e671", "score": "0.6742789", "text": "func (v *JSONValue) SetObject(val map[string]*JSONValue) {\n\n\tvar i int\n\tv.RawMessage = []byte{'{'}\n\tfor key, value := range val {\n\n\t\tif i != 0 {\n\t\t\tv.RawMessage = append(v.RawMessage, ',')\n\t\t}\n\t\ti++\n\n\t\tv.RawMessage = append(v.RawMessage, '\"')\n\t\tv.RawMessage = append(v.RawMessage, key...)\n\t\tv.RawMessage = append(v.RawMessage, '\"', ':')\n\t\tv.RawMessage = append(v.RawMessage, value.RawMessage...)\n\t}\n\tv.RawMessage = append(v.RawMessage, '}')\n\n\tv.valObject = val\n\tv.dataType = objectDataType\n}", "title": "" }, { "docid": "61470f6131395bb102f2cf919d3ebd0b", "score": "0.67416894", "text": "func (o *ObjectRef) SetField(env *Env, fieldName string, value interface{}) error {\n\tclass, err := o.getClass(env)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvType, vClassName, err := typeOfValue(value)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar fieldSig string\n\tif env.preCalcSig != \"\" {\n\t\tfieldSig = env.preCalcSig\n\t\tenv.preCalcSig = \"\"\n\t} else {\n\t\tfieldSig = typeSignature(vType, vClassName)\n\t}\n\n\tfid, err := env.callGetFieldID(false, class, fieldName, fieldSig)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tswitch v := value.(type) {\n\tcase bool:\n\t\tsetBooleanField(env.jniEnv, o.jobject, fid, fromBool(v))\n\tcase byte:\n\t\tsetByteField(env.jniEnv, o.jobject, fid, jbyte(v))\n\tcase uint16:\n\t\tsetCharField(env.jniEnv, o.jobject, fid, jchar(v))\n\tcase int16:\n\t\tsetShortField(env.jniEnv, o.jobject, fid, jshort(v))\n\tcase int32:\n\t\tsetIntField(env.jniEnv, o.jobject, fid, jint(v))\n\tcase int:\n\t\tsetIntField(env.jniEnv, o.jobject, fid, jint(int32(v)))\n\tcase int64:\n\t\tsetLongField(env.jniEnv, o.jobject, fid, jlong(v))\n\tcase float32:\n\t\tsetFloatField(env.jniEnv, o.jobject, fid, jfloat(v))\n\tcase float64:\n\t\tsetDoubleField(env.jniEnv, o.jobject, fid, jdouble(v))\n\tcase jobj:\n\t\tsetObjectField(env.jniEnv, o.jobject, fid, v.jobj())\n\tcase []bool, []byte, []int16, []uint16, []int32, []int, []int64, []float32, []float64:\n\t\tarray, err := env.toJavaArray(v)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer deleteLocalRef(env.jniEnv, array)\n\t\tsetObjectField(env.jniEnv, o.jobject, fid, jobject(array))\n\tdefault:\n\t\treturn errors.New(\"JNIGI unknown field value\")\n\t}\n\n\tif env.exceptionCheck() {\n\t\treturn env.handleException()\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "505da7997ac95f980ef5a19d4f97c119", "score": "0.6720274", "text": "func Set(object *astext.Object, path []string, value ast.Node) error {\n\tif len(path) == 0 {\n\t\treturn errors.New(\"path was empty\")\n\t}\n\n\tcurObj := object\n\n\tfor i, k := range path {\n\t\tfield, err := findField(curObj, k)\n\t\tif err != nil {\n\t\t\tswitch err.(type) {\n\t\t\tdefault:\n\t\t\t\treturn err\n\t\t\tcase *unknownField:\n\t\t\t\tfield, err = astext.CreateField(k)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tfield.Hide = ast.ObjectFieldInherit\n\t\t\t\tcurObj.Fields = append(curObj.Fields, *field)\n\t\t\t}\n\t\t}\n\n\t\tif i == len(path)-1 {\n\t\t\tfield, _ = findField(curObj, k)\n\t\t\tif canUpdateObject(field.Expr2, value) {\n\t\t\t\treturn errors.New(\"can't set object to non object\")\n\t\t\t}\n\t\t\tfield.Expr2 = value\n\t\t\treturn nil\n\t\t}\n\n\t\tif field.Expr2 == nil {\n\t\t\tcurObj = &astext.Object{}\n\t\t\tfield.Expr2 = curObj\n\t\t} else if obj, ok := field.Expr2.(*astext.Object); ok {\n\t\t\tcurObj = obj\n\t\t} else {\n\t\t\treturn errors.Errorf(\"child is not an object at %q\", k)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "5fc0f0d50cfc60e9fbb7a172f2e3743e", "score": "0.6659694", "text": "func (a *chunkBasedAtlas) SetObject(v maths.Vector, obj Object) {\n\tc := a.worldSpaceToChunkWithGrow(v)\n\tlocal := a.worldSpaceToChunkLocal(v)\n\ta.setObject(c, local, obj)\n}", "title": "" }, { "docid": "d9bf83706b751ade60b6964d3f5a54e0", "score": "0.6646639", "text": "func SetInObject(fieldPath []string, paramsData, key string, value interface{}, root string) (string, error) {\n\tprops, err := ToMap(key, paramsData, root)\n\tif err != nil {\n\t\tprops = make(map[string]interface{})\n\t}\n\n\tchanges := make(map[string]interface{})\n\tcur := changes\n\n\tfor i, k := range fieldPath {\n\t\tif i == len(fieldPath)-1 {\n\t\t\tcur[k] = value\n\t\t} else {\n\t\t\tif _, ok := cur[k]; !ok {\n\t\t\t\tm := make(map[string]interface{})\n\t\t\t\tcur[k] = m\n\t\t\t\tcur = m\n\t\t\t}\n\t\t}\n\t}\n\n\tif err = mergeMaps(props, changes, nil); err != nil {\n\t\treturn \"\", err\n\t}\n\n\tupdatePath := []string{root}\n\tif key != \"\" {\n\t\tupdatePath = []string{root, key}\n\t}\n\n\treturn updateFn(updatePath, paramsData, props)\n}", "title": "" }, { "docid": "8616f1ff900ecb24a908e41ce199fe71", "score": "0.6588416", "text": "func (node *GoValueNode) SetObjectValueByField(field string, newValue reflect.Value) (err error) {\n\tfieldVal := node.thisValue.Elem().FieldByName(field)\n\tif fieldVal.IsValid() && fieldVal.CanAddr() && fieldVal.CanSet() {\n\t\tdefer func() {\n\t\t\tif r := recover(); r != nil {\n\t\t\t\terr = fmt.Errorf(\"recovered : %v\", r)\n\t\t\t}\n\t\t}()\n\t\tif pkg.IsNumber(fieldVal) && pkg.IsNumber(newValue) {\n\n\t\t\treturn SetNumberValue(fieldVal, newValue)\n\t\t}\n\t\tfieldVal.Set(newValue)\n\n\t\treturn nil\n\t}\n\n\treturn fmt.Errorf(\"this node identified as \\\"%s\\\" have field \\\"%s\\\" that is not valid nor addressable\", node.IdentifiedAs(), field)\n}", "title": "" }, { "docid": "1bc0f4f35bb1afefb4a5283059c2eb89", "score": "0.6586096", "text": "func setField(obj interface{}, name string, value interface{}) error {\n\tstructValue := reflect.ValueOf(obj).Elem()\n\tstructFieldValue := structValue.FieldByName(name)\n\n\tif !structFieldValue.IsValid() {\n\t\treturn fmt.Errorf(\"No such field: %s in obj\", name)\n\t}\n\n\tif !structFieldValue.CanSet() {\n\t\treturn fmt.Errorf(\"Cannot set %s field value\", name)\n\t}\n\n\tstructFieldType := structFieldValue.Type()\n\n\tvar val reflect.Value\n\tswitch structFieldType.String() {\n\tcase \"int\":\n\t\ti, _ := strconv.Atoi(value.(js.Value).String())\n\t\tval = reflect.ValueOf(i)\n\t\tbreak\n\tcase \"float64\":\n\t\ti, _ := strconv.ParseFloat(value.(js.Value).String(), 64)\n\t\tval = reflect.ValueOf(i)\n\t\tbreak\n\tcase \"bool\":\n\t\ti, _ := strconv.ParseBool(value.(js.Value).String())\n\t\tval = reflect.ValueOf(i)\n\t\tbreak\n\tcase \"string\":\n\t\tval = reflect.ValueOf(value.(js.Value).String())\n\t\tbreak\n\tdefault:\n\t\tval = reflect.ValueOf(value)\n\t\tbreak\n\t}\n\n\tstructFieldValue.Set(val)\n\treturn nil\n}", "title": "" }, { "docid": "84d0eb0f3c543411b03f05984395d097", "score": "0.6561023", "text": "func (ep *EtcdClient) SetObj(key string, value interface{}) error {\n\tlog.Infof(`objdb: setting \"%s\"`, key)\n\tkeyName := \"/contiv.io/obj/\" + key\n\n\t// JSON format the object\n\tjsonVal, err := json.Marshal(value)\n\tif err != nil {\n\t\tlog.Errorf(\"Json conversion error. Err %v\", err)\n\t\treturn err\n\t}\n\n\t// Set it via etcd client\n\t\n\tlog.Infof(`objdb: jsonVal \"%s\"`, string(jsonVal[:]))\n\t_, err = ep.client.KV.Put(context.Background(), keyName, string(jsonVal[:]))\n\tif err != nil {\n\t\t// Retry few times if cluster is unavailable\n\t\tif err.Error() == client.ErrNoAvailableEndpoints.Error() {\n\t\t\tfor i := 0; i < maxEtcdRetries; i++ {\n\t\t\t\t_, err = ep.client.KV.Put(context.Background(), keyName, string(jsonVal[:]))\n\t\t\t\tif err == nil {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\t// Retry after a delay\n\t\t\t\ttime.Sleep(time.Second)\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Error setting key %s, Err: %v\", keyName, err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "6d39700bf9d19f5cb86bd2ed394e56c9", "score": "0.6489624", "text": "func (r *Request) setBodyObject(object interface{}) error {\n\tvar err error\n\tr.body, err = r.session.codec.Marshal(object, nil)\n\treturn err\n}", "title": "" }, { "docid": "ece43c52f0fd1d8562b7187cfbedb973", "score": "0.6440977", "text": "func (b *ByteArray) SetObject(o *ObjectRef) {\n\tb.arr = jbyteArray(o.jobject)\n}", "title": "" }, { "docid": "43927694e33bde3b58fa8391728eafba", "score": "0.63629943", "text": "func (e *SettingEvent) SetObject(o Setting) {\n\te.Setting = o\n}", "title": "" }, { "docid": "42056020ad3603882c9b9d9b6e124a4a", "score": "0.63444567", "text": "func SetStaticObjectField(env *C.JNIEnv, clazz C.jclass, fieldID C.jfieldID, value C.jobject) {\n\tC._GoJniSetStaticObjectField(env, clazz, fieldID, value)\n}", "title": "" }, { "docid": "cc2017bdfe462707351fde38985c9303", "score": "0.6338919", "text": "func setJsObject(key string, v interface{}) {\n\tmodule.Set(key, v)\n}", "title": "" }, { "docid": "30c2c1d357b6ac8f4e8d396ca8dacf42", "score": "0.6321395", "text": "func (ep *EtcdClient) SetObj(key string, value interface{}) error {\n\tkeyName := \"/contiv.io/obj/\" + key\n\n\t// JSON format the object\n\tjsonVal, err := json.Marshal(value)\n\tif err != nil {\n\t\tlog.Errorf(\"Json conversion error. Err %v\", err)\n\t\treturn err\n\t}\n\n\t// Set it via etcd client\n\t_, err = ep.kapi.Set(context.Background(), keyName, string(jsonVal[:]), nil)\n\tif err != nil {\n\t\t// Retry few times if cluster is unavailable\n\t\tif err.Error() == client.ErrClusterUnavailable.Error() {\n\t\t\tfor i := 0; i < maxEtcdRetries; i++ {\n\t\t\t\t_, err = ep.kapi.Set(context.Background(), keyName, string(jsonVal[:]), nil)\n\t\t\t\tif err == nil {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\t// Retry after a delay\n\t\t\t\ttime.Sleep(time.Second)\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Error setting key %s, Err: %v\", keyName, err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "3fe01b45c5c6aa35855465e9fa7cb43c", "score": "0.6287786", "text": "func (fs *FakeSession) SetObj(oid string, value string) *FakeSession {\n\treturn fs.Set(oid, gosnmp.ObjectIdentifier, value)\n}", "title": "" }, { "docid": "6ec1fbc808983eb73a24cb2cdbd1e39b", "score": "0.624085", "text": "func (o *Contentmanagementworkspacedocumentstopicdocumentdatav2) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "55219f1840a61f8e5da9aed15844f4e5", "score": "0.6234304", "text": "func (r *RepoStore) SetEncodedObject(obj plumbing.EncodedObject) (plumbing.Hash, error) {\n\tvar key = append([]byte(\"objects/\" + obj.Hash().String()))\n\n\treader, err := obj.Reader()\n\tif err != nil {\n\t\treturn plumbing.ZeroHash, err\n\t}\n\n\tcontent, err := ioutil.ReadAll(reader)\n\tif err != nil {\n\t\treturn plumbing.ZeroHash, err\n\t}\n\n\terr = r.put(append(key, []byte(\"/type\")...), []byte(obj.Type().String()))\n\tif err != nil {\n\t\treturn plumbing.ZeroHash, err\n\t}\n\n\terr = r.put(key, content)\n\tif err != nil {\n\t\treturn plumbing.ZeroHash, err\n\t}\n\n\treturn obj.Hash(), err\n}", "title": "" }, { "docid": "62503ae822e0766f397101a00fcb0361", "score": "0.6203345", "text": "func (o *Wfmagent) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "9920b74625c232ac44928ecfc1e31051", "score": "0.6196091", "text": "func (o *Oauthclientrequest) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "8dd883e71b34d768200c335dfc00e83d", "score": "0.6193965", "text": "func setField(obj interface{}, name string, value interface{}) error {\n\tstructValue := reflect.ValueOf(obj).Elem()\n\tstructType := reflect.TypeOf(obj).Elem()\n\tstructFieldValue := structValue.FieldByName(name)\n\n\tfor i := 0; i < structType.NumField(); i++ {\n\t\tfield := structType.Field(i)\n\t\ttag := field.Tag.Get(\"query\")\n\n\t\tif tag == name {\n\t\t\tstructFieldValue = structValue.Field(i)\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif !structFieldValue.IsValid() || !structFieldValue.CanSet() {\n\t\treturn errors.New(fmt.Sprintf(\"%s is not allowed\", name))\n\t}\n\n\tstructFieldType := structFieldValue.Type()\n\tval := reflect.ValueOf(value)\n\n\tif structFieldType.Kind() == reflect.Bool {\n\t\tswitch val.String() {\n\t\tcase \"false\":\n\t\t\tstructFieldValue.SetBool(false)\n\t\t\treturn nil\n\t\tcase \"true\":\n\t\t\tstructFieldValue.SetBool(true)\n\t\t\treturn nil\n\t\tdefault:\n\t\t\treturn errors.New(fmt.Sprintf(\"%s must be a boolean\", name))\n\t\t}\n\t} else {\n\t\tstructFieldValue.Set(val)\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "a63d263edfec2b398740a7d8a1680ecc", "score": "0.61789596", "text": "func (o *FeatureRelationship) SetObject(exec boil.Executor, insert bool, related *Feature) error {\n\tvar err error\n\tif insert {\n\t\tif err = related.Insert(exec); err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to insert into foreign table\")\n\t\t}\n\t}\n\n\tupdateQuery := fmt.Sprintf(\n\t\t\"UPDATE \\\"feature_relationship\\\" SET %s WHERE %s\",\n\t\tstrmangle.SetParamNames(\"\\\"\", \"\\\"\", 1, []string{\"object_id\"}),\n\t\tstrmangle.WhereClause(\"\\\"\", \"\\\"\", 2, featureRelationshipPrimaryKeyColumns),\n\t)\n\tvalues := []interface{}{related.FeatureID, o.FeatureRelationshipID}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, updateQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, values)\n\t}\n\n\tif _, err = exec.Exec(updateQuery, values...); err != nil {\n\t\treturn errors.Wrap(err, \"failed to update local table\")\n\t}\n\n\to.ObjectID = related.FeatureID\n\n\tif o.R == nil {\n\t\to.R = &featureRelationshipR{\n\t\t\tObject: related,\n\t\t}\n\t} else {\n\t\to.R.Object = related\n\t}\n\n\tif related.R == nil {\n\t\trelated.R = &featureR{\n\t\t\tObjectFeatureRelationship: o,\n\t\t}\n\t} else {\n\t\trelated.R.ObjectFeatureRelationship = o\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "30e1e098897c12c7384fcf2d6cc01fcf", "score": "0.61716616", "text": "func SetByObj(o1 interface{}, o2 interface{}) {\n\tt1 := reflect.TypeOf(o1).Elem()\n\tt2 := reflect.TypeOf(o2).Elem()\n\tv1 := reflect.ValueOf(o1).Elem()\n\tv2 := reflect.ValueOf(o2).Elem()\n\n\tif t1 == t2 {\n\t\tfor i := 0; i < v2.NumField(); i++ {\n\t\t\tv1.Field(i).Set(v2.Field(i))\n\t\t}\n\t}\n}", "title": "" }, { "docid": "03e5a4947e65369da890aef74c1bc730", "score": "0.6171603", "text": "func (o *Createshareresponse) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "99278d98b369446c0e4b4b42b5b0563e", "score": "0.61637557", "text": "func (km *KeyValueMap) SetObject(object string) {\n\t(*km)[kmObject] = object\n}", "title": "" }, { "docid": "98157a83ac22907e4372c08e49b1ece3", "score": "0.61407214", "text": "func (autoscalingGroup *AutoscalingGroup) SetObject(asg *autoscaling.Group) error {\n\tautoscalingGroup.Name = *asg.AutoScalingGroupName\n\tautoscalingGroup.DesiredCapacity = int(*asg.DesiredCapacity)\n\tnodeHostNames, err := awsTools.GetNodeHostnames(asg.Instances)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Failed to get asg instance node names and set asg object\")\n\t}\n\tautoscalingGroup.Nodes = nodeHostNames\n\treturn nil\n}", "title": "" }, { "docid": "de8f6168c9cec52ce03c16a55a983171", "score": "0.612945", "text": "func (o *FakeObject) Set(key string, value interface{}) { o.Properties[key] = value }", "title": "" }, { "docid": "a03f5ccb55f92c273aef4fcf891ddcb9", "score": "0.6129189", "text": "func SetObjectData(sessionState *State, actionState *action.State, rawLayout json.RawMessage, objectDef *senseobjdef.ObjectDef,\n\tobj *enigmahandlers.Object, enigmaObject *enigma.GenericObject) error {\n\tswitch objectDef.DataDef.Type {\n\tcase senseobjdef.DataDefNoData:\n\t\treturn nil\n\tcase senseobjdef.DataDefListObject:\n\t\tif string(objectDef.DataDef.Path) == \"\" {\n\t\t\treturn errors.Errorf(\n\t\t\t\t\"object<%s> is defined as listobject carrier, but has not listobject path definition\", enigmaObject.GenericType)\n\t\t}\n\n\t\tif err := SetListObject(rawLayout, obj, objectDef.DataDef.Path); err != nil {\n\t\t\treturn errors.Wrapf(err, \"object<%s> type<%s>\", obj.ID, enigmaObject.GenericType)\n\t\t}\n\tcase senseobjdef.DataDefHyperCube:\n\t\tif objectDef.DataDef.Path == \"\" {\n\t\t\treturn errors.Errorf(\n\t\t\t\t\"object<%s> is defined as hypercube carrier, but has not hypercube path definition\", enigmaObject.GenericType)\n\t\t}\n\t\tif err := SetHyperCube(rawLayout, obj, objectDef.DataDef.Path); err != nil {\n\t\t\treturn errors.Wrapf(err, \"object<%s> type<%s>\", obj.ID, enigmaObject.GenericType)\n\t\t}\n\tdefault:\n\t\tsessionState.LogEntry.Logf(logger.WarningLevel, \"Get Data for object type<%s> not supported\", enigmaObject.GenericType)\n\t\treturn nil\n\t}\n\n\t// Evaluate data requests\n\tdataRequests, err := objectDef.Evaluate(rawLayout)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"object<%s> type<%s>\", obj.ID, enigmaObject.GenericType)\n\t}\n\tsessionState.LogEntry.LogDebugf(\"object<%s> type<%s> request evaluation result<%+v>\", obj.ID, enigmaObject.GenericType, dataRequests)\n\tif obj.HyperCube() != nil {\n\t\tsessionState.LogEntry.LogDebugf(\"object<%s> type<%s> hypercube mode<%s>\", obj.ID, enigmaObject.GenericType, obj.HyperCube().Mode)\n\t}\n\tif dataRequests == nil || len(dataRequests) < 1 {\n\t\treturn nil\n\t}\n\n\tfor _, r := range dataRequests {\n\t\tcolumns := false\n\t\tswitch r.Type {\n\t\tcase senseobjdef.DataTypeLayout:\n\t\tcase senseobjdef.DataTypeListObject:\n\t\t\tUpdateListObjectDataAsync(sessionState, actionState, enigmaObject, obj, r)\n\t\tcase senseobjdef.DataTypeHyperCubeDataColumns:\n\t\t\tcolumns = true\n\t\t\tfallthrough\n\t\tcase senseobjdef.DataTypeHyperCubeData:\n\t\t\tUpdateObjectHyperCubeDataAsync(sessionState, actionState, enigmaObject, obj, r, columns)\n\t\tcase senseobjdef.DataTypeHyperCubeReducedData:\n\t\t\tUpdateObjectHyperCubeReducedDataAsync(sessionState, actionState, enigmaObject, obj, r)\n\t\tcase senseobjdef.DataTypeHyperCubeBinnedData:\n\t\t\tUpdateObjectHyperCubeBinnedDataAsync(sessionState, actionState, enigmaObject, obj, r)\n\t\tcase senseobjdef.DataTypeHyperCubeStackData:\n\t\t\tUpdateObjectHyperCubeStackDataAsync(sessionState, actionState, enigmaObject, obj, r)\n\t\tcase senseobjdef.DataTypeHyperCubeContinuousData:\n\t\t\tUpdateObjectHyperCubeContinuousDataAsync(sessionState, actionState, enigmaObject, obj, r)\n\t\tcase senseobjdef.DataTypeHyperCubeTreeData:\n\t\t\tUpdateObjectHyperCubeTreeDataAsync(sessionState, actionState, enigmaObject, obj, r)\n\t\tdefault:\n\t\t\tsessionState.LogEntry.Logf(logger.WarningLevel,\n\t\t\t\t\"Get Data for object type<%s> not supported\", enigmaObject.GenericType)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8b1bab47e28176687377a28eb0bbeefe", "score": "0.61089593", "text": "func (o *Workitemwrapup) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "0a565ca01aca30efcdc4699155ea65dd", "score": "0.61058766", "text": "func (path P) Set(obj, value interface{}) error {\n\tfn := func(p P, ctx *Context) (bool, error) {\n\t\treturn false, set(p, ctx, reflect.ValueOf(value))\n\t}\n\n\treturn path.Apply(obj, &Context{CreateIfMissing: true, Fn: fn})\n}", "title": "" }, { "docid": "8ec241f3344dfbadcc91658cb42d594d", "score": "0.60939896", "text": "func (o *Integrationtype) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "8dabf551a7e67af65b5719325e47cdef", "score": "0.605941", "text": "func (O *Object) Set(name string, v interface{}) error {\n\tif data, ok := v.(*Data); ok {\n\t\treturn O.SetAttribute(name, data)\n\t}\n\td := scratch.Get()\n\tdefer scratch.Put(d)\n\tif err := d.Set(v); err != nil {\n\t\treturn err\n\t}\n\treturn O.SetAttribute(name, d)\n}", "title": "" }, { "docid": "6dff2b3e65967f6f10369c7c3dc6cd55", "score": "0.6035821", "text": "func (o *Posttextresponse) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "9d68bb88e35bcd9063bea268ac258eb5", "score": "0.6007897", "text": "func (instance *AutoChartInstance) SetObjectAndEvents(sessionState *State, actionState *action.State, obj *enigmahandlers.Object, genObj *enigma.GenericObject) {\n\tinstance.handleAutoChart(sessionState, actionState, obj, genObj)\n}", "title": "" }, { "docid": "b23d36f117224892613605593adfbbf3", "score": "0.60067564", "text": "func (v *Value) SetObj() {\n\tC.zj_SetObj(v.V)\n}", "title": "" }, { "docid": "140961ef9173d2fd0d817dc9fb4248d7", "score": "0.5994261", "text": "func putObject(frame *rtda.Frame) {\n\tvars := frame.LocalVars()\n\tfields := vars.GetRef(1).Fields()\n\toffset := vars.GetLong(2)\n\tx := vars.GetRef(4)\n\n\tif anys, ok := fields.([]Any); ok {\n\t\t// object\n\t\tanys[offset] = x\n\t} else if objs, ok := fields.([]*rtc.Obj); ok {\n\t\t// ref[]\n\t\tobjs[offset] = x\n\t} else {\n\t\tpanic(\"putObject!\")\n\t}\n}", "title": "" }, { "docid": "7bf2be0a780c7edccb029998fc35b467", "score": "0.59803104", "text": "func (o *Wfmbushorttermforecastimportcompletetopicbuforecastmodification) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "6f33a96d10d3029f8e7cd1724f89bf91", "score": "0.593994", "text": "func (ctx *context) SetValue(obj string, prop string, value interface{}) {\n\tctx.Data[obj+prop] = value\n\n}", "title": "" }, { "docid": "ae7f9c4c5432787f6dee646222c8ee28", "score": "0.5927549", "text": "func (object Object) Set(key string, value interface{}) Object {\n\tobject[key] = value\n\treturn object\n}", "title": "" }, { "docid": "895648cdda6af95637da174a112b1ab1", "score": "0.58939403", "text": "func (o *Appevent) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "b38f67612d3a4b9a42988b0061a1dab7", "score": "0.5869226", "text": "func (s *GenericStorage) Set(gvk schema.GroupVersionKind, obj runtime.Object) error {\n\tstorageKey := KeyForUID(gvk, obj.GetUID())\n\n\t// Set the serializer based on the format given by the RawStorage\n\tserializeFunc := s.serializer.EncodeJSON\n\tif s.raw.Format(storageKey) != FormatJSON {\n\t\tserializeFunc = s.serializer.EncodeYAML\n\t}\n\n\tb, err := serializeFunc(obj)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn s.raw.Write(storageKey, b)\n}", "title": "" }, { "docid": "0376ff8dce300784dce51fc5031e3922", "score": "0.5849124", "text": "func (o *Wfmintradaydataupdatetopicintradayhistoricalqueuedata) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "ef5683cee672de5c4e8d543a2e55ea65", "score": "0.582887", "text": "func (pr *Params) SetObjectSet(objName, setName string) error {\n\tpset, err := pr.Params.SetByNameTry(setName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsh, ok := pset.Sheets[objName]\n\tif !ok {\n\t\terr = fmt.Errorf(\"Params.SetObjectSet: sheet named: %s not found\", objName)\n\t\treturn err\n\t}\n\tobj, ok := pr.Objects[objName]\n\tif !ok {\n\t\terr = fmt.Errorf(\"Params.SetObjectSet: Object named: %s not found\", objName)\n\t\treturn err\n\t}\n\tsh.SelMatchReset(setName)\n\tif objName == \"Network\" {\n\t\tnet := obj.(Network)\n\t\tpr.SetNetworkSheet(net, sh, setName)\n\t} else if objName == \"NetSize\" {\n\t\tns := obj.(*NetSize)\n\t\tns.ApplySheet(sh, pr.SetMsg)\n\t} else {\n\t\tsh.Apply(obj, pr.SetMsg)\n\t}\n\terr = sh.SelNoMatchWarn(setName, objName)\n\treturn err\n}", "title": "" }, { "docid": "83638118468caead50d40f10f1ad8f49", "score": "0.58277106", "text": "func (b *box) setFieldValue(x, y, v int) {\n\t// Matrix conversion, see: https://stackoverflow.com/a/14015582\n\tb.values[x+y*3] = v\n}", "title": "" }, { "docid": "7d32dbc33989755348c73521ddb7ae7e", "score": "0.5824764", "text": "func (o *Compliance) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "8fa222405318bfc10969b94fceb50013", "score": "0.5817905", "text": "func (i *Info) SetField(column string, value interface{}) error {\n\tfieldName, ok := i.Metadata.Fields[column]\n\tif !ok {\n\t\treturn fmt.Errorf(\"SetField: column %s not found in orm info\", column)\n\t}\n\tfieldValue := reflect.ValueOf(i.Obj).Elem().FieldByName(fieldName)\n\n\tif !fieldValue.Type().AssignableTo(reflect.TypeOf(value)) {\n\t\treturn fmt.Errorf(\"column %s: native value %v (%s) is not assignable to field %s (%s)\",\n\t\t\tcolumn, value, reflect.TypeOf(value), fieldName, fieldValue.Type())\n\t}\n\tfieldValue.Set(reflect.ValueOf(value))\n\treturn nil\n}", "title": "" }, { "docid": "a47e6528cca5ef7d5f3d1796ecfae2dd", "score": "0.58043694", "text": "func (o *Webchatmemberinfo) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "8f66944362419c46b9eb4d1be102b853", "score": "0.58040535", "text": "func (e *FileEvent) SetObject(o File) {\n\te.File = o\n}", "title": "" }, { "docid": "5fbfae6393af56999cb6034130046d32", "score": "0.5788997", "text": "func (level *Level) SetObject(objectIndex int, newProperties *model.LevelObjectProperties) (properties model.LevelObjectProperties, err error) {\n\tlevel.mutex.Lock()\n\tdefer level.mutex.Unlock()\n\n\tif (objectIndex > 0) && (objectIndex < len(level.objectList)) {\n\t\tobjectEntry := &level.objectList[objectIndex]\n\n\t\tif objectEntry.IsInUse() {\n\t\t\tclassMeta := data.LevelObjectClassMetaEntry(objectEntry.Class)\n\t\t\tclassStore := level.store.Get(res.ResourceID(4000 + level.id*100 + 10 + int(objectEntry.Class)))\n\t\t\tclassTable := logic.DecodeLevelObjectClassTable(classStore.BlockData(0), classMeta.EntrySize)\n\t\t\tchangedTile := false\n\n\t\t\tif newProperties.Subclass != nil {\n\t\t\t\tobjectEntry.Subclass = res.ObjectSubclass(*newProperties.Subclass)\n\t\t\t}\n\t\t\tif newProperties.Type != nil {\n\t\t\t\tobjectEntry.Type = res.ObjectType(*newProperties.Type)\n\t\t\t}\n\t\t\tif newProperties.Z != nil {\n\t\t\t\tobjectEntry.Z = byte(*newProperties.Z)\n\t\t\t}\n\t\t\tnewTileX, newFineX := objectEntry.X.Tile(), objectEntry.X.Offset()\n\t\t\tif (newProperties.TileX != nil) && (newTileX != byte(*newProperties.TileX)) {\n\t\t\t\tnewTileX = byte(*newProperties.TileX)\n\t\t\t\tchangedTile = true\n\t\t\t}\n\t\t\tif newProperties.FineX != nil {\n\t\t\t\tnewFineX = byte(*newProperties.FineX)\n\t\t\t}\n\t\t\tobjectEntry.X = data.MapCoordinateOf(newTileX, newFineX)\n\t\t\tnewTileY, newFineY := objectEntry.Y.Tile(), objectEntry.Y.Offset()\n\t\t\tif (newProperties.TileY != nil) && (newTileY != byte(*newProperties.TileY)) {\n\t\t\t\tnewTileY = byte(*newProperties.TileY)\n\t\t\t\tchangedTile = true\n\t\t\t}\n\t\t\tif newProperties.FineY != nil {\n\t\t\t\tnewFineY = byte(*newProperties.FineY)\n\t\t\t}\n\t\t\tobjectEntry.Y = data.MapCoordinateOf(newTileY, newFineY)\n\n\t\t\tif newProperties.RotationX != nil {\n\t\t\t\tobjectEntry.Rot1 = byte(*newProperties.RotationX)\n\t\t\t}\n\t\t\tif newProperties.RotationY != nil {\n\t\t\t\tobjectEntry.Rot3 = byte(*newProperties.RotationY)\n\t\t\t}\n\t\t\tif newProperties.RotationZ != nil {\n\t\t\t\tobjectEntry.Rot2 = byte(*newProperties.RotationZ)\n\t\t\t}\n\t\t\tif newProperties.Hitpoints != nil {\n\t\t\t\tobjectEntry.Hitpoints = uint16(*newProperties.Hitpoints)\n\t\t\t}\n\t\t\tif newProperties.ExtraData != nil {\n\t\t\t\tcopy(objectEntry.Extra[:], newProperties.ExtraData)\n\t\t\t}\n\n\t\t\tif len(newProperties.ClassData) > 0 {\n\t\t\t\tclassEntry := classTable.Entry(data.LevelObjectChainIndex(objectEntry.ClassTableIndex))\n\n\t\t\t\tcopy(classEntry.Data(), newProperties.ClassData)\n\t\t\t}\n\t\t\tif changedTile {\n\t\t\t\tlocations := []logic.TileLocation{logic.AtTile(uint16(newTileX), uint16(newTileY))}\n\n\t\t\t\tif objectEntry.CrossReferenceTableIndex != 0 {\n\t\t\t\t\tlevel.crossrefList.RemoveEntriesFromMap(logic.CrossReferenceListIndex(objectEntry.CrossReferenceTableIndex), level.tileMap)\n\t\t\t\t\tobjectEntry.CrossReferenceTableIndex = 0\n\t\t\t\t}\n\n\t\t\t\tcrossrefIndex, crossrefErr := level.crossrefList.AddObjectToMap(uint16(objectIndex), level.tileMap, locations)\n\t\t\t\tif crossrefErr != nil {\n\t\t\t\t\t// This is a kind of bad (and weird) situation.\n\t\t\t\t\t// The object, which was already stored, can not be stored anymore (?) and is furthermore left\n\t\t\t\t\t// in an incorrect state.\n\t\t\t\t\terr = crossrefErr\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tcrossrefEntry := level.crossrefList.Entry(crossrefIndex)\n\n\t\t\t\tobjectEntry.CrossReferenceTableIndex = uint16(crossrefIndex)\n\t\t\t\tcrossrefEntry.LevelObjectTableIndex = uint16(objectIndex)\n\t\t\t}\n\n\t\t\tlevel.onObjectListChanged(classStore, classTable)\n\t\t\tproperties = level.objectFromRawEntry(int(objectIndex), objectEntry).Properties\n\t\t} else {\n\t\t\terr = fmt.Errorf(\"Object is not in use\")\n\t\t}\n\t} else {\n\t\terr = fmt.Errorf(\"Invalid object index\")\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "295bf0501d14a92ae0ab6709c9f9dd91", "score": "0.5779751", "text": "func (obj *SObject) setID(id string) {\n\t(*obj)[sobjectIDKey] = id\n}", "title": "" }, { "docid": "7bafef10e44ad3954ea309078b81f78d", "score": "0.57711595", "text": "func (o *Edge) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "9698d190f5e24048624bcf1aeb3615c7", "score": "0.57699406", "text": "func (o *Actionmap) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "4e2e408732faf56aef9b974e0692d9bf", "score": "0.5769884", "text": "func (o *Voicemailmessagestopicvoicemailmessage) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "b163e93999e01559fa9e3fb016ae3381", "score": "0.5766523", "text": "func (o *Directrouting) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "1c64835a98980c55a79d4e519494f1ca", "score": "0.5745004", "text": "func (p *Person) updateObject() {\n\tp.firstName = \"Gilbert\"\n}", "title": "" }, { "docid": "4a663343d00d80b86ae528a8b931bed4", "score": "0.57409704", "text": "func (instance *Instance) Set(v interface{}) error {\n\tjsonData, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn instance.SetJSON(jsonData)\n}", "title": "" }, { "docid": "b642090f503fc0d1ba32d6d41d0d92ff", "score": "0.5721777", "text": "func (ls *ledgerStore) putObject(ctx context.Context, bucket, object string, obj *Object) error {\n\toHash, err := ipfsSave(ctx, ls.dag, obj)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn ls.putObjectHash(ctx, bucket, object, oHash)\n}", "title": "" }, { "docid": "db4e0f7b457ff6c46e4b8a7c94ce000f", "score": "0.5710723", "text": "func (obj *SObject) setClient(client *Client) {\n\t(*obj)[sobjectClientKey] = client\n}", "title": "" }, { "docid": "35a3b83109668f8809bc299fd6ddd402", "score": "0.57097507", "text": "func (m *ModelStructRecord) SetField(name string, value reflect.Value) {\n\tif name == \"\" {\n\t\treturn\n\t}\n\tfieldValue := m.FieldValues[name]\n\t//if value.Kind() == reflect.Ptr {\n\t//\tpanic(\"RecordFieldSetError: value cannot be a ptr\")\n\t//}\n\tif fieldValue.IsValid() == false {\n\t\tm.VirtualFieldValues[name] = reflect.New(m.model.GetFieldWithName(name).StructField().Type).Elem()\n\t\tfieldValue = m.VirtualFieldValues[name]\n\t}\n\t//fieldValue = LoopIndirectAndNew(fieldValue)\n\tsafeSet(fieldValue, value)\n}", "title": "" }, { "docid": "0ac553b83d7df6849fe2f7b0526be505", "score": "0.57086974", "text": "func ReplaceObject(stub shim.ChaincodeStubInterface, objectType string, keys []string, objectData []byte) error {\n\n // Check how many keys\n\n err := VerifyAtLeastOneKeyIsPresent(objectType, keys )\n if err != nil {\n return err\n }\n\n\t// Convert keys to compound key\n\tcompositeKey, _ := stub.CreateCompositeKey(objectType, keys)\n\n\t// Add Party JSON to state\n\terr = stub.PutState(compositeKey, objectData)\n\tif err != nil {\n\t\tfmt.Println(\"ReplaceObject() : Error replacing Object in State Database %s\", err)\n\t\treturn err\n\t}\n\n\tfmt.Println(\"ReplaceObject() : - end init object \", objectType)\n\treturn nil\n}", "title": "" }, { "docid": "35cc30afef9855e3061f7de16c4d9e8b", "score": "0.5678689", "text": "func (s *LoadSaver) SetValue(id ident.Id, value interface{}) (err error) {\n\tif field, ok := s.data[id]; ok {\n\t\tfield.Value = value\n\t\ts.data[id] = field // record value back; it's not a pointer.\n\t\ts.changed = true\n\t} else if prop, ok := s.GetProperty(id); !ok {\n\t\terr = fmt.Errorf(\"couldnt find property %s.%s\", s, id)\n\t} else {\n\t\ts.data[id] = FieldValue{prop.GetType(), value}\n\t\ts.changed = true\n\t}\n\treturn\n}", "title": "" }, { "docid": "e43433de294facf6c21e4d694c72b2a6", "score": "0.5669949", "text": "func (u *Update) Set(obj utils.M) *Update {\n\tu.update[\"$set\"] = obj\n\treturn u\n}", "title": "" }, { "docid": "e322cd7d220bd4e3d21e5415cffabe91", "score": "0.56644535", "text": "func setObjectRevision(obj client.Object, revision int64) {\n\ta := obj.GetAnnotations()\n\tif a == nil {\n\t\ta = map[string]string{}\n\t}\n\ta[revisionAnnotation] = fmt.Sprintf(\"%d\", revision)\n\tobj.SetAnnotations(a)\n}", "title": "" }, { "docid": "3eb7616706ba4a95deac32264d20c82e", "score": "0.5658259", "text": "func (o *Initialconfiguration) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "ef74beda26d5bfdbf5e9106a76e46d75", "score": "0.5657693", "text": "func (recv *Value) SetObjectTakeOwnership(vObject uintptr) {\n\tc_v_object := (C.gpointer)(vObject)\n\n\tC.g_value_set_object_take_ownership((*C.GValue)(recv.native), c_v_object)\n\n\treturn\n}", "title": "" }, { "docid": "6d8b2b538a9af3f4bae2385f961ec08a", "score": "0.56550175", "text": "func setNestedField(obj map[string]interface{}, value interface{}, fields ...string) {\n\tm := obj\n\tif len(fields) > 1 {\n\t\tfor _, field := range fields[0 : len(fields)-1] {\n\t\t\tif _, ok := m[field].(map[string]interface{}); !ok {\n\t\t\t\tm[field] = make(map[string]interface{})\n\t\t\t}\n\t\t\tm = m[field].(map[string]interface{})\n\t\t}\n\t}\n\tm[fields[len(fields)-1]] = value\n}", "title": "" }, { "docid": "be70172e285bb059cd0de704df236d54", "score": "0.5640294", "text": "func (u *Update) Set(obj types.M) *Update {\n\tu.update[\"$set\"] = obj\n\treturn u\n}", "title": "" }, { "docid": "c75ce3ebc683aa43d98e8c73cd155422", "score": "0.5639238", "text": "func (e *UserEvent) SetObject(o User) {\n\te.User = o\n}", "title": "" }, { "docid": "dd18f03dc8e2da4b98a784f262e396f2", "score": "0.56362396", "text": "func SetFieldValue(i interface{}, field string, value interface{}) {\n\tv := reflect.ValueOf(i)\n\n\tif v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct {\n\t\tpanic(\"input must be a pointer to a struct\")\n\t}\n\n\tfieldVal := v.Elem().FieldByName(field)\n\n\tswitch fieldVal.Kind() {\n\tcase reflect.Bool:\n\t\tfieldVal.SetBool(value.(bool))\n\tcase reflect.Int:\n\tcase reflect.Int8:\n\tcase reflect.Int16:\n\tcase reflect.Int32:\n\tcase reflect.Int64:\n\t\tfieldVal.SetInt(value.(int64))\n\tcase reflect.Uint:\n\tcase reflect.Uint8:\n\tcase reflect.Uint16:\n\tcase reflect.Uint32:\n\tcase reflect.Uint64:\n\t\tfieldVal.SetUint(value.(uint64))\n\tcase reflect.Float32:\n\tcase reflect.Float64:\n\t\tfieldVal.SetFloat(value.(float64))\n\tcase reflect.Complex64:\n\tcase reflect.Complex128:\n\t\tfieldVal.SetComplex(value.(complex128))\n\tcase reflect.String:\n\t\tfieldVal.SetString(value.(string))\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"Unsupported type [%s]\", fieldVal.Kind().String()))\n\t}\n}", "title": "" }, { "docid": "9d8da9e479b8fdf7f0cdaf28470f9ae2", "score": "0.5605211", "text": "func (obj *SObject) setType(typeName string) {\n\tattributes := obj.InterfaceField(sobjectAttributesKey)\n\tswitch attributes.(type) {\n\tcase SObjectAttributes:\n\t\tattrs := obj.AttributesField()\n\t\tattrs.Type = typeName\n\t\t(*obj)[sobjectAttributesKey] = *attrs\n\tdefault:\n\t\t(*obj)[sobjectAttributesKey] = SObjectAttributes{\n\t\t\tType: typeName,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "fe67d716cc8da8beeb2671e9238647df", "score": "0.5600824", "text": "func (ll *LevelLedger) SetObjectIndex(ref *record.Reference, idx *index.ObjectLifeline) error {\n\tk := prefixkey(scopeIDLifeline, ref.Key())\n\tencoded, err := index.EncodeObjectLifeline(idx)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn ll.ldb.Put(k, encoded, nil)\n}", "title": "" }, { "docid": "d244b4e52b40a7bb05a78f26795b465e", "score": "0.5596832", "text": "func (self *_object) set(name string, value Value, throw bool) {\n\tif !self.stash.canPut(name) {\n\t\ttypeErrorResult(throw)\n\t\treturn\n\t}\n\tself.stash.put(name, value)\n}", "title": "" }, { "docid": "8781cc47f33380094cb4ec53e77ea06d", "score": "0.5583174", "text": "func (o *Object) Set(key StringOrSymbol, val, receiver Value) Boolean {\n\tif p, ok := o.fields[key]; ok {\n\t\treturn p.Set()(val)\n\t}\n\treturn False\n}", "title": "" }, { "docid": "386fff5823d384a00b465d2b203f3973", "score": "0.5566091", "text": "func Set(target, source interface{}) error {\n\tconverter := &Converter{\n\t\tTagName: \"field\",\n\t}\n\n\treturn converter.Convert(source, target)\n}", "title": "" }, { "docid": "f47b6132bcf2fed0ef224a295d461411", "score": "0.5551781", "text": "func (o *Objects) Set(key string, val interface{}, ttl int64) error {\n\tif ttl > 0 {\n\t\tttl = time.Now().Add(time.Duration(ttl) * time.Second).Unix()\n\t}\n\n\to.Lock()\n\tdefer o.Unlock()\n\to.values[key] = &Object{val, ttl}\n\n\treturn nil\n}", "title": "" }, { "docid": "5cc07554c7919af6f09978b74bda28b1", "score": "0.55513805", "text": "func (o *Object) Set(key string, val interface{}) error {\n\tif len(key) == 0 {\n\t\treturn errors.New(\"v8go: You must provide a valid property key\")\n\t}\n\treturn set(o, key, 0, val)\n}", "title": "" }, { "docid": "2a87e4f0fbc38af390fdcc718fdcd0e1", "score": "0.55511785", "text": "func (b *extendedWriter) writeObject(object *datamodel.Instance, caches *Caches) error {\n\tvar err error\n\tif object == nil {\n\t\treturn b.WriteByte(0)\n\t}\n\tif object.Ref.IsNull {\n\t\treturn b.WriteByte(0x00)\n\t}\n\terr = b.writeCachedObject(object.Ref.Scope, caches)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn b.writeUint32LE(object.Ref.Id)\n}", "title": "" }, { "docid": "e3ec07cbf861811ef64a9ab0534539d5", "score": "0.5549096", "text": "func (c *TransactionFirestore) Set(ctx context.Context, obj db.Object) error {\n\tif err := db.AssertObject(ctx, obj, false); err != nil {\n\t\treturn err\n\t}\n\tc.client.BaseClient.BeforeSet(ctx, obj)\n\tdocRef := c.client.refFromObj(ctx, obj)\n\terr := c.tx.Set(docRef, obj)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"tx set doc %v-%v\", obj.Collection(), obj.ID())\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "04ddb9c08fb42fcfbe966e06c394f8d4", "score": "0.55407536", "text": "func (r *Record) SetGobField(d *Db, number uint16, e interface{}) error {\n\tvar buffer bytes.Buffer\n\tencoder := gob.NewEncoder(&buffer)\n\terr := encoder.Encode(e)\n\tif err != nil {\n\t\treturn WDBError(fmt.Sprintf(\"Failed to add a record: %v\", err))\n\t}\n\n\tcKey := C.CString(reflect.TypeOf(e).String())\n\tdefer C.free(unsafe.Pointer(cKey))\n\tenc := C.wg_encode_blob(d.db, (*C.char)(unsafe.Pointer(&(buffer.Bytes())[0])), cKey, C.wg_int(buffer.Len()))\n\tif C.wg_set_field(d.db, r.rec, C.wg_int(number), enc) != 0 {\n\t\treturn WDBError(\"Could not set field\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e0fc8009952a6588ff4df8e6c4da4393", "score": "0.5535344", "text": "func (a *AdditionalPropertiesObject5) Set(fieldName string, value SchemaObject) {\n\tif a.AdditionalProperties == nil {\n\t\ta.AdditionalProperties = make(map[string]SchemaObject)\n\t}\n\ta.AdditionalProperties[fieldName] = value\n}", "title": "" }, { "docid": "66159eb3c94afa9b565da35d54c9cc69", "score": "0.55214965", "text": "func (f *Fieldx) Set(v interface{}) error {\n\tif !f.IsExport() {\n\t\treturn ErrNotExported\n\t}\n\n\tif !f.value.CanSet() {\n\t\treturn errNotSettable\n\t}\n\n\tvv := reflect.ValueOf(v)\n\tif f.Kind() != vv.Kind() {\n\t\treturn fmt.Errorf(\"xstruct: value kind not match, want: %s but got %s\", f.Kind(), vv.Kind())\n\t}\n\n\tf.value.Set(vv)\n\n\treturn nil\n}", "title": "" }, { "docid": "2fd4ebc88bdbb6e8a46aeaaf16b43258", "score": "0.55100256", "text": "func (o *Dialercampaignconfigchangecampaign) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "df23e850acc1e167e0d9be844af3e687", "score": "0.5509331", "text": "func (e *HTMLApplet) Object(v string) *HTMLApplet {\n\te.a[\"object\"] = v\n\treturn e\n}", "title": "" }, { "docid": "ca35c5fe39224fa3b4744c0dc97a7532", "score": "0.5487371", "text": "func (field *Field) Set(value interface{}) (err error) {\n\tif !field.Field.IsValid() {\n\t\treturn errors.New(\"field value not valid\")\n\t}\n\n\tif !field.Field.CanAddr() {\n\t\treturn ErrUnaddressable\n\t}\n\n\treflectValue, ok := value.(reflect.Value)\n\tif !ok {\n\t\treflectValue = reflect.ValueOf(value)\n\t}\n\n\tfieldValue := field.Field\n\tif reflectValue.IsValid() {\n\t\tif reflectValue.Type().ConvertibleTo(fieldValue.Type()) {\n\t\t\tfieldValue.Set(reflectValue.Convert(fieldValue.Type()))\n\t\t} else {\n\t\t\tif fieldValue.Kind() == reflect.Ptr {\n\t\t\t\tif fieldValue.IsNil() {\n\t\t\t\t\tfieldValue.Set(reflect.New(field.Struct.Type.Elem()))\n\t\t\t\t}\n\t\t\t\tfieldValue = fieldValue.Elem()\n\t\t\t}\n\n\t\t\tif reflectValue.Type().ConvertibleTo(fieldValue.Type()) {\n\t\t\t\tfieldValue.Set(reflectValue.Convert(fieldValue.Type()))\n\t\t\t} else {\n\t\t\t\terr = fmt.Errorf(\"could not convert argument of field %s from %s to %s\", field.Name, reflectValue.Type(), fieldValue.Type())\n\t\t\t}\n\t\t}\n\t} else {\n\t\tfield.Field.Set(reflect.Zero(field.Field.Type()))\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "c1eefde70db091df4ec147976e0b8573", "score": "0.5486832", "text": "func (obj *SObject) Set(key string, value interface{}) *SObject {\n\t(*obj)[key] = value\n\treturn obj\n}", "title": "" }, { "docid": "400c2314755bd2e05a8d0ab3a6ef73d9", "score": "0.5482845", "text": "func (u *UserProfile) SetObject(key string, obj interface{}) {\n\tu.objects[key] = obj\n}", "title": "" }, { "docid": "cce8f9bfd0d27041d258e6fb391b3ef3", "score": "0.54813224", "text": "func (f *Field) Set(val interface{}) error {\n\t// we can't set unexported fields, so be sure this field is exported\n\tif !f.IsExported() {\n\t\treturn errNotExported\n\t}\n\n\t// do we get here? not sure...\n\tif !f.value.CanSet() {\n\t\treturn errNotSettable\n\t}\n\n\tgiven := reflect.ValueOf(val)\n\n\tif f.value.Kind() != given.Kind() {\n\t\treturn fmt.Errorf(\"wrong kind. got: %s want: %s\", given.Kind(), f.value.Kind())\n\t}\n\n\tf.value.Set(given)\n\treturn nil\n}", "title": "" }, { "docid": "5c32d5f88a85f19db10861e708946591", "score": "0.5479612", "text": "func (a *AdditionalPropertiesObject4) Set(fieldName string, value interface{}) {\n\tif a.AdditionalProperties == nil {\n\t\ta.AdditionalProperties = make(map[string]interface{})\n\t}\n\ta.AdditionalProperties[fieldName] = value\n}", "title": "" }, { "docid": "5c32d5f88a85f19db10861e708946591", "score": "0.5479612", "text": "func (a *AdditionalPropertiesObject4) Set(fieldName string, value interface{}) {\n\tif a.AdditionalProperties == nil {\n\t\ta.AdditionalProperties = make(map[string]interface{})\n\t}\n\ta.AdditionalProperties[fieldName] = value\n}", "title": "" }, { "docid": "4c08c4753885332bade1855dcd049ba6", "score": "0.54756725", "text": "func (field *Field) Set(value interface{}) (err error) {\n if !field.Field.IsValid() {\n return errors.New(\"field value not valid\")\n }\n\n if !field.Field.CanAddr() {\n return ErrUnaddressable\n }\n\n reflectValue, ok := value.(reflect.Value)\n if !ok {\n reflectValue = reflect.ValueOf(value)\n }\n\n fieldValue := field.Field\n if reflectValue.IsValid() {\n if reflectValue.Type().ConvertibleTo(fieldValue.Type()) {\n fieldValue.Set(reflectValue.Convert(fieldValue.Type()))\n } else {\n if fieldValue.Kind() == reflect.Ptr {\n if fieldValue.IsNil() {\n fieldValue.Set(reflect.New(field.Struct.Type.Elem()))\n }\n fieldValue = fieldValue.Elem()\n }\n\n if reflectValue.Type().ConvertibleTo(fieldValue.Type()) {\n fieldValue.Set(reflectValue.Convert(fieldValue.Type()))\n } else if scanner, ok := fieldValue.Addr().Interface().(sql.Scanner); ok {\n v := reflectValue.Interface()\n if valuer, ok := v.(driver.Valuer); ok {\n if v, err = valuer.Value(); err == nil {\n err = scanner.Scan(v)\n }\n } else {\n err = scanner.Scan(v)\n }\n } else {\n err = fmt.Errorf(\"could not convert argument of field %s from %s to %s\", field.Name, reflectValue.Type(), fieldValue.Type())\n }\n }\n } else {\n field.Field.Set(reflect.Zero(field.Field.Type()))\n }\n\n field.IsBlank = isBlank(field.Field)\n return err\n}", "title": "" }, { "docid": "4b08256af5d50a5b4991393be79e8690", "score": "0.54612935", "text": "func (o *Outcomequantilecondition) SetField(field string, fieldValue interface{}) {\n\t// Get Value object for field\n\ttarget := reflect.ValueOf(o)\n\ttargetField := reflect.Indirect(target).FieldByName(field)\n\n\t// Set value\n\tif fieldValue != nil {\n\t\ttargetField.Set(reflect.ValueOf(fieldValue))\n\t} else {\n\t\t// Must create a new Value (creates **type) then get its element (*type), which will be nil pointer of the appropriate type\n\t\tx := reflect.Indirect(reflect.New(targetField.Type()))\n\t\ttargetField.Set(x)\n\t}\n\n\t// Add field to set field names list\n\tif o.SetFieldNames == nil {\n\t\to.SetFieldNames = make(map[string]bool)\n\t}\n\to.SetFieldNames[field] = true\n}", "title": "" }, { "docid": "c0e130fa6e03255b603b4011c18ee86b", "score": "0.54601", "text": "func (access ObjectAccess) Set(row int, val interface{}) {\n access.rawData[access.indices[row]] = val\n}", "title": "" }, { "docid": "e9f7d50dfb8e498739aa124084585756", "score": "0.5447049", "text": "func (o *SettingsObjectUpdate) SetValue(v map[string]interface{}) {\n\to.Value = v\n}", "title": "" }, { "docid": "d5116c3df2f1a59db1f358d1fcec1168", "score": "0.54446286", "text": "func (bucket Bucket) SetObjectACL(objectKey string, objectACL ACLType) error {\n\toptions := []Option{ObjectACL(objectACL)}\n\tparams := map[string]interface{}{}\n\tparams[\"acl\"] = nil\n\tresp, err := bucket.do(\"PUT\", objectKey, params, options, nil, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\treturn checkRespCode(resp.StatusCode, []int{http.StatusOK})\n}", "title": "" }, { "docid": "5a2ed230ce20b7f466c375d79b5a1986", "score": "0.544269", "text": "func (this *TriggerAction) Set(field string, value interface{}) {\n switch field {\n case \"id\":\n this.SetId(value.(int64))\n break\n case \"agentName\":\n this.agentName = value.(string)\n break\n case \"propertyName\":\n this.propertyName = value.(string)\n break\n case \"propertyValue\":\n this.propertyValue = value.(string)\n break\n }\n}", "title": "" } ]
219af8813c927aa387d23a638a6961f6
Search searches for issues by given conditions. Returns the matching issue IDs
[ { "docid": "1e89c7ae4ad4c5e076bf5c8d89649d6b", "score": "0.5092734", "text": "func (b *ElasticSearchIndexer) Search(keyword string, repoIDs []int64, limit, start int) (*SearchResult, error) {\n\tkwQuery := elastic.NewMultiMatchQuery(keyword, \"title\", \"content\", \"comments\")\n\tquery := elastic.NewBoolQuery()\n\tquery = query.Must(kwQuery)\n\tif len(repoIDs) > 0 {\n\t\tvar repoStrs = make([]interface{}, 0, len(repoIDs))\n\t\tfor _, repoID := range repoIDs {\n\t\t\trepoStrs = append(repoStrs, repoID)\n\t\t}\n\t\trepoQuery := elastic.NewTermsQuery(\"repo_id\", repoStrs...)\n\t\tquery = query.Must(repoQuery)\n\t}\n\tsearchResult, err := b.client.Search().\n\t\tIndex(b.indexerName).\n\t\tQuery(query).\n\t\tSort(\"id\", true).\n\t\tFrom(start).Size(limit).\n\t\tDo(context.Background())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\thits := make([]Match, 0, limit)\n\tfor _, hit := range searchResult.Hits.Hits {\n\t\tid, _ := strconv.ParseInt(hit.Id, 10, 64)\n\t\thits = append(hits, Match{\n\t\t\tID: id,\n\t\t})\n\t}\n\n\treturn &SearchResult{\n\t\tTotal: searchResult.TotalHits(),\n\t\tHits: hits,\n\t}, nil\n}", "title": "" } ]
[ { "docid": "41416737f83875462fb2f7f3b3e16c9d", "score": "0.6896891", "text": "func (tracker *issueTracker) searchIssues(\n\tqueryFormat string,\n\tv ...interface{},\n) ([]*github.Issue, error) {\n\n\tlogger := log.V(log.Debug)\n\n\t// Format the query.\n\tquery := fmt.Sprintf(queryFormat, v...)\n\n\t// Since GH API does not allow OR queries, we need to send a concurrent request\n\t// for every item in tracker.config.StoryLabels label list.\n\tch := make(chan *searchResult, len(tracker.config.StoryLabels))\n\tfor _, label := range tracker.config.StoryLabels {\n\t\tgo func(label string) {\n\t\t\t// We are only interested in issues for the given repository.\n\t\t\tinnerQuery := fmt.Sprintf(`%v type:issue repo:%v/%v label:\"%v\"`,\n\t\t\t\tquery, tracker.config.GitHubOwner, tracker.config.GitHubRepository, label)\n\n\t\t\ttask := \"Search GitHub: \" + innerQuery\n\n\t\t\tif logger {\n\t\t\t\tlogger.Go(task)\n\t\t\t}\n\n\t\t\tsearchOpts := &github.SearchOptions{}\n\t\t\tsearchOpts.Page = 1\n\t\t\tsearchOpts.PerPage = 50\n\n\t\t\tvar (\n\t\t\t\tacc []*github.Issue\n\t\t\t\tsearched int\n\t\t\t)\n\n\t\t\tclient := tracker.newClient()\n\n\t\t\tfor {\n\t\t\t\t// Fetch another page.\n\t\t\t\tvar (\n\t\t\t\t\tresult *github.IssuesSearchResult\n\t\t\t\t\terr error\n\t\t\t\t)\n\t\t\t\twithRequestAllocated(func() {\n\t\t\t\t\tresult, _, err = client.Search.Issues(innerQuery, searchOpts)\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\tch <- &searchResult{nil, errs.NewError(task, err)}\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\t// Check the issues for exact string match.\n\t\t\t\tfor i := range result.Issues {\n\t\t\t\t\tacc = append(acc, &result.Issues[i])\n\t\t\t\t}\n\n\t\t\t\t// Check whether we have reached the end or not.\n\t\t\t\tsearched += len(result.Issues)\n\t\t\t\tif searched == *result.Total {\n\t\t\t\t\tch <- &searchResult{acc, nil}\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\t// Check the next page in the next iteration.\n\t\t\t\tsearchOpts.Page += 1\n\t\t\t}\n\t\t}(label)\n\t}\n\n\t// Collect the results.\n\tvar issues []*github.Issue\n\tfor i := 0; i < cap(ch); i++ {\n\t\tres := <-ch\n\t\tif err := res.err; err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tissues = append(issues, res.issues...)\n\t}\n\n\t// Make sure there are no duplicates in the list.\n\treturn dedupeIssues(issues), nil\n}", "title": "" }, { "docid": "611a00b5f041cff5dd8d3c7db2d035f9", "score": "0.63615155", "text": "func SearchIssues(terms []string) (*IssuesSearchResult, error) {\n\t// the query may include character that need to be encodes, e.g ? or & etc\n\tq := url.QueryEscape(strings.Join(terms, \" \"))\n\tlog.Println(q)\n\tresp, err := http.Get(IssuesURL + \"?q=\" + q)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// We must close the resp.Body on all execution paths.\n\t// See documentation on defer, which ought to make this easier\n\tif resp.StatusCode != http.StatusOK {\n\t\tresp.Body.Close()\n\t\treturn nil, fmt.Errorf(\"search query (%s) failed %s\", q, resp.Status)\n\t}\n\n\tvar result IssuesSearchResult\n\tif err := json.NewDecoder(resp.Body).Decode(&result); err != nil {\n\t\tresp.Body.Close()\n\t\treturn nil, err\n\t}\n\tresp.Body.Close()\n\treturn &result, nil\n}", "title": "" }, { "docid": "f080722931594aa8e96425377998ba52", "score": "0.62984234", "text": "func (c *Client) IssueSearch(query string, opts *jira.SearchOptions) ([]jira.Issue, error) {\n\tissues, _, err := c.jira.Issue.Search(query, opts)\n\treturn issues, err\n}", "title": "" }, { "docid": "980102ea0082b01c01e86e180a270e02", "score": "0.6286921", "text": "func SearchIssues(terms []string) (*IssuesSearchResult, error) {\n\tq := url.QueryEscape(strings.Join(terms, \" \"))\n\tresp, err := http.Get(IssuesURL + \"?q=\" + q)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// We must close resp.Body on all execution paths.\n\t// (Chapter 5 presents 'defer', which makes this simpler.)\n\tif resp.StatusCode != http.StatusOK {\n\t\tresp.Body.Close()\n\t\treturn nil, fmt.Errorf(\"search query failed: %s\", resp.Status)\n\t}\n\tvar result IssuesSearchResult\n\tif err := json.NewDecoder(resp.Body).Decode(&result); err != nil {\n\t\tresp.Body.Close()\n\t\treturn nil, err\n\t}\n\tresp.Body.Close()\n\treturn &result, nil\n}", "title": "" }, { "docid": "5480508f71a548440e2339fa382d568b", "score": "0.6246542", "text": "func SearchIssues(terms []string) (*IssuesSearchResult, error) {\n\tq := url.QueryEscape(strings.Join(terms, \" \"))\n\tresp, err := http.Get(IssuesURL + \"?q=\" + q)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// We must close resp.Body on all execution paths.\n\t// (Chapter 5 presents 'defer', which makes this simpler.)\n\tif resp.StatusCode != http.StatusOK {\n\t\tresp.Body.Close()\n\t\treturn nil, fmt.Errorf(\"search query failed: %s\", resp.Status)\n\t}\n\n\tvar result IssuesSearchResult\n\tif err := json.NewDecoder(resp.Body).Decode(&result); err != nil {\n\t\tresp.Body.Close()\n\t\treturn nil, err\n\t}\n\tresp.Body.Close()\n\treturn &result, nil\n}", "title": "" }, { "docid": "69b05fffbc2f0f1a4c4462d6b1c0acc6", "score": "0.6128107", "text": "func (h *Engine) SearchIssues(ctx context.Context, org string, project string, fs []Filter, newerThan time.Time) ([]*Conversation, time.Time, error) {\n\tfs = openByDefault(fs)\n\tklog.V(1).Infof(\"Gathering raw data for %s/%s search %s - newer than %s\", org, project, toYAML(fs), logu.STime(newerThan))\n\tvar wg sync.WaitGroup\n\n\tvar members map[string]bool\n\tvar open []*github.Issue\n\tvar closed []*github.Issue\n\tvar err error\n\n\torgCutoff := time.Now().Add(h.memberRefresh * -1)\n\tif orgCutoff.After(newerThan) {\n\t\tklog.V(1).Infof(\"Setting org cutoff to %s\", newerThan)\n\t\torgCutoff = newerThan\n\t}\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tmembers, err = h.cachedOrgMembers(ctx, org, orgCutoff)\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"members: %v\", err)\n\t\t\treturn\n\t\t}\n\t}()\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\topen, err = h.cachedIssues(ctx, org, project, \"open\", 0, newerThan)\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"open issues: %v\", err)\n\t\t\treturn\n\t\t}\n\t\tklog.V(1).Infof(\"%s/%s open issue count: %d\", org, project, len(open))\n\t}()\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tclosed, err = h.cachedIssues(ctx, org, project, \"closed\", closedIssueDays, newerThan)\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"closed issues: %v\", err)\n\t\t}\n\t\tklog.V(1).Infof(\"%s/%s closed issue count: %d\", org, project, len(closed))\n\t}()\n\n\twg.Wait()\n\n\tvar is []*github.Issue\n\tvar latest time.Time\n\tseen := map[string]bool{}\n\n\tfor _, i := range append(open, closed...) {\n\t\tif i.GetUpdatedAt().After(latest) {\n\t\t\tlatest = i.GetUpdatedAt()\n\t\t}\n\n\t\tif h.debugNumber != 0 {\n\t\t\tif i.GetNumber() == h.debugNumber {\n\t\t\t\tklog.Errorf(\"*** Found debug issue #%d:\\n%s\", i.GetNumber(), formatStruct(*i))\n\n\t\t\t} else {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tif seen[i.GetURL()] {\n\t\t\tklog.Errorf(\"unusual: I already saw #%d\", i.GetNumber())\n\t\t\tcontinue\n\t\t}\n\t\tseen[i.GetURL()] = true\n\t\tis = append(is, i)\n\t}\n\n\tvar filtered []*Conversation\n\tklog.V(1).Infof(\"%s/%s aggregate issue count: %d, filtering for:\\n%s\", org, project, len(is), toYAML(fs))\n\n\tfor _, i := range is {\n\t\t// Inconsistency warning: issues use a list of labels, prs a list of label pointers\n\t\tlabels := []*github.Label{}\n\t\tfor _, l := range i.Labels {\n\t\t\tl := l\n\t\t\tlabels = append(labels, l)\n\t\t}\n\n\t\tif !preFetchMatch(i, labels, fs) {\n\t\t\tklog.V(1).Infof(\"#%d - %q did not match item filter: %s\", i.GetNumber(), i.GetTitle(), toYAML(fs))\n\t\t\tcontinue\n\t\t}\n\n\t\tcomments := []*github.IssueComment{}\n\t\tif i.GetComments() > 0 {\n\t\t\tklog.V(1).Infof(\"#%d - %q: need comments for final filtering\", i.GetNumber(), i.GetTitle())\n\t\t\tcomments, err = h.cachedIssueComments(ctx, org, project, i.GetNumber(), i.GetUpdatedAt())\n\t\t\tif err != nil {\n\t\t\t\tklog.Errorf(\"comments: %v\", err)\n\t\t\t}\n\t\t}\n\n\t\tco := h.IssueSummary(i, comments, members[i.User.GetLogin()])\n\t\tco.Labels = labels\n\t\th.seen[co.URL] = co\n\n\t\tif !postFetchMatch(co, fs) {\n\t\t\tklog.V(1).Infof(\"#%d - %q did not match conversation filter: %s\", i.GetNumber(), i.GetTitle(), toYAML(fs))\n\t\t\tcontinue\n\t\t}\n\n\t\tco.Similar = h.FindSimilar(co)\n\t\tif len(co.Similar) > 0 {\n\t\t\tco.Tags = append(co.Tags, Tag{ID: \"similar\", Description: \"Title appears similar to another PR or issue\"})\n\t\t}\n\n\t\tfiltered = append(filtered, co)\n\t}\n\n\tklog.V(1).Infof(\"%d of %d issues within %s/%s matched filters %s\", len(filtered), len(is), org, project, toYAML(fs))\n\treturn filtered, latest, nil\n}", "title": "" }, { "docid": "1754ade1672944c11863c062f6f758a1", "score": "0.60991544", "text": "func SearchIssues(sterm []string) (*IssuesSearchResult, error) {\n\tq := url.QueryEscape(strings.Join(sterm, \" \"))\n\tresp, err := http.Get(IssueURL + \"?q=\" + q)\n\tif err != nil {\n\t\tlog.Fatalf(\"Get issues is failure from %s\\n, And the error is: \\\"%s\\\"\", IssueURL+\"?q=\"+q, err)\n\t\treturn nil, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn nil, fmt.Errorf(\"Search Issues Failed: %s\", resp.Status)\n\t}\n\n\tvar result IssuesSearchResult\n\tif err := json.NewDecoder(resp.Body).Decode(&result); err != nil { //有多个json实体的时候需要用json.Decode方法来解码\n\t\tlog.Fatalf(\"Resutl of Search is error, the erros is %s\\n\", err)\n\t\treturn nil, err\n\t}\n\n\tfmt.Printf(\"%s\\n\", resp.Body)\n\tvar f interface{}\n\tf = json.NewDecoder(resp.Body).Decode(&f)\n\tPrintUnknownJson(f)\n\n\treturn &result, nil\n}", "title": "" }, { "docid": "d68a6364fa13804616d61771370b2317", "score": "0.60324", "text": "func (c *Client) SearchIssues(ctx context.Context, repos []Repository, opts SearchOptions, queryArgs url.Values) (map[string][]github.Issue, error) {\n\tm := make(map[string][]github.Issue, len(c.cfg.Repos))\n\tfor _, repo := range repos {\n\t\tissues, err := c.SearchIssuesByRepo(ctx, repo, opts, queryArgs)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tm[fmt.Sprintf(\"%s/%s\", repo.Owner, repo.Name)] = issues\n\t}\n\treturn m, nil\n}", "title": "" }, { "docid": "cfc5c44b9ac3dca5a769a83cf8be6fa0", "score": "0.5886479", "text": "func (m *monorail) Search(ctx context.Context) ([]*types.Issue, *types.IssueCountsData, error) {\n\tmonorailIssues, err := m.searchIssuesWithPagination()\n\tif err != nil {\n\t\treturn nil, nil, skerr.Wrapf(err, \"error when searching issues\")\n\t}\n\n\t// Convert monorail issues into bug_framework's generic issues\n\tissues := []*types.Issue{}\n\tcountsData := &types.IssueCountsData{}\n\tfor _, mi := range monorailIssues {\n\t\t// Find the owner.\n\t\towner := \"\"\n\t\tif mi.Owner.User != \"\" {\n\t\t\t// Check the cache before making an external API call.\n\t\t\tif email, ok := userToEmailCache[mi.Owner.User]; ok {\n\t\t\t\towner = email\n\t\t\t} else {\n\t\t\t\t// Find the owner's email address.\n\t\t\t\tb, err := m.makeJSONCall([]byte(fmt.Sprintf(`{\"name\": \"%s\"}`, mi.Owner.User)), \"Users\", \"GetUser\")\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, nil, skerr.Wrapf(err, \"Users.GetUser JSON API call failed\")\n\t\t\t\t}\n\t\t\t\tvar monorailUser struct {\n\t\t\t\t\tDisplayName string `json:\"displayName\"`\n\t\t\t\t}\n\t\t\t\tif err := json.Unmarshal(b, &monorailUser); err != nil {\n\t\t\t\t\treturn nil, nil, err\n\t\t\t\t}\n\t\t\t\t// Cache results for next time.\n\t\t\t\tuserToEmailCache[mi.Owner.User] = monorailUser.DisplayName\n\t\t\t\towner = monorailUser.DisplayName\n\t\t\t}\n\t\t}\n\n\t\t// Find priority using MonorailProjectToPriorityData\n\t\tpriority := types.StandardizedPriority(\"\")\n\t\tif priorityData, ok := monorailProjectToPriorityData[m.queryConfig.Instance]; ok {\n\t\t\tfor _, fv := range mi.FieldValues {\n\t\t\t\tif priorityData.FieldName == fv.Field {\n\t\t\t\t\t// Found the priority field for this project. Now translate\n\t\t\t\t\t// the priority field value into the generic priority value (P0, P1, ...)\n\t\t\t\t\tif p, ok := priorityData.PriorityMapping[fv.Value]; ok {\n\t\t\t\t\t\tpriority = p\n\t\t\t\t\t\tbreak\n\t\t\t\t\t} else {\n\t\t\t\t\t\tsklog.Errorf(\"Could not find priority value %s for project %s\", fv.Value, m.queryConfig.Instance)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tsklog.Errorf(\"Could not find MonorailProjectToPriorityData for project %s\", m.queryConfig.Instance)\n\t\t}\n\n\t\t// Populate counts data.\n\t\tcountsData.OpenCount++\n\t\tif owner == \"\" {\n\t\t\tcountsData.UnassignedCount++\n\t\t}\n\t\tcountsData.IncPriority(priority)\n\t\tsloViolation, reason, d := types.IsPrioritySLOViolation(time.Now(), mi.CreatedTime, mi.ModifiedTime, priority)\n\t\tcountsData.IncSLOViolation(sloViolation, priority)\n\t\tif util.In(mi.State.Status, m.queryConfig.UntriagedStatuses) {\n\t\t\tcountsData.UntriagedCount++\n\t\t} else if m.queryConfig.UnassignedIsUntriaged && owner == \"\" {\n\t\t\tcountsData.UntriagedCount++\n\t\t}\n\n\t\t// Monorail issue names look like \"projects/skia/issues/10783\". Extract out the \"10783\".\n\t\tnameTokens := strings.Split(mi.Name, \"/\")\n\t\tid := nameTokens[len(nameTokens)-1]\n\n\t\tissues = append(issues, &types.Issue{\n\t\t\tId: id,\n\t\t\tState: mi.State.Status,\n\t\t\tPriority: priority,\n\t\t\tOwner: owner,\n\t\t\tLink: m.GetIssueLink(m.queryConfig.Instance, id),\n\n\t\t\tSLOViolation: sloViolation,\n\t\t\tSLOViolationReason: reason,\n\t\t\tSLOViolationDuration: d,\n\n\t\t\tCreatedTime: mi.CreatedTime,\n\t\t\tModifiedTime: mi.ModifiedTime,\n\n\t\t\tTitle: mi.Title,\n\t\t})\n\t}\n\n\treturn issues, countsData, nil\n}", "title": "" }, { "docid": "39256fd23eacff634b9cbde07116bf62", "score": "0.5686338", "text": "func (tracker *issueTracker) searchIssuesAndWrap(\n\tqueryFormat string,\n\tv ...interface{},\n) ([]common.Story, error) {\n\n\tissues, err := tracker.searchIssues(queryFormat, v...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn toCommonStories(issues, tracker), nil\n}", "title": "" }, { "docid": "abe8a9059a3c9d8a953ce6d35586482e", "score": "0.55815184", "text": "func (tracker *issueTracker) goSearchIssues(\n\tch chan<- *searchResult,\n\tqueryFormat string,\n\tv ...interface{},\n) {\n\tgo func() {\n\t\tissues, err := tracker.searchIssues(queryFormat, v...)\n\t\tch <- &searchResult{issues, err}\n\t}()\n}", "title": "" }, { "docid": "559ffa75d504015ad0c342c8c279759a", "score": "0.5579919", "text": "func SearchIssueKeys(str string, projectKey string) []string {\n\trep := regexp.MustCompile(projectKey + `-[\\d]+`)\n\tkeys := rep.FindAllString(str, -1)\n\treturn keys\n}", "title": "" }, { "docid": "a775008478f251e8502d0cc772009c22", "score": "0.5531154", "text": "func (f *FakeClient) FindIssues(query, sort string, asc bool) ([]github.Issue, error) {\n\treturn f.FindIssuesWithOrg(\"\", query, sort, asc)\n}", "title": "" }, { "docid": "42da2de5661b0be3d7f3d43309ea3d23", "score": "0.5505531", "text": "func (m *MaintainerManager) GetIssuesFound(query string) ([]*gh.SearchItem, error) {\n\to := &gh.Options{}\n\to.QueryParams = map[string]string{\n\t\t\"sort\": \"updated\",\n\t\t\"order\": \"asc\",\n\t\t\"per_page\": \"100\",\n\t}\n\tprevSize := -1\n\tpage := 1\n\tissuesFound := []*gh.SearchItem{}\n\tfor len(issuesFound) != prevSize {\n\t\to.QueryParams[\"page\"] = strconv.Itoa(page)\n\t\tif issues, err := m.client.SearchIssues(query, o); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tprevSize = len(issuesFound)\n\t\t\tissuesFound = append(issuesFound, issues...)\n\t\t\tpage += 1\n\t\t}\n\t\tfmt.Printf(\".\")\n\t}\n\treturn issuesFound, nil\n}", "title": "" }, { "docid": "9b367425b8827389ae91bbed9fcaba54", "score": "0.5456019", "text": "func (j dryrunJIRAClient) ListIssues(ids []int) ([]jira.Issue, error) {\n\tlog := j.config.GetLogger()\n\n\tidStrs := make([]string, len(ids))\n\tfor i, v := range ids {\n\t\tidStrs[i] = fmt.Sprint(v)\n\t}\n\n\tvar jql string\n\t// If the list of IDs is too long, we get a 414 Request-URI Too Large, so in that case,\n\t// we'll need to do the filtering ourselves.\n\tif len(ids) < maxJQLIssueLength {\n\t\tjql = fmt.Sprintf(\"project='%s' AND cf[%s] in (%s)\",\n\t\t\tj.config.GetProjectKey(), j.config.GetFieldID(cfg.GitHubID), strings.Join(idStrs, \",\"))\n\t} else {\n\t\tjql = fmt.Sprintf(\"project='%s'\", j.config.GetProjectKey())\n\t}\n\n\tji, res, err := j.request(func() (interface{}, *jira.Response, error) {\n\t\treturn j.client.Issue.Search(jql, nil)\n\t})\n\tif err != nil {\n\t\tlog.Errorf(\"Error retrieving JIRA issues: %v\", err)\n\t\treturn nil, getErrorBody(j.config, res)\n\t}\n\tjiraIssues, ok := ji.([]jira.Issue)\n\tif !ok {\n\t\tlog.Errorf(\"Get JIRA issues did not return issues! Got: %v\", ji)\n\t\treturn nil, fmt.Errorf(\"get JIRA issues failed: expected []jira.Issue; got %T\", ji)\n\t}\n\n\tvar issues []jira.Issue\n\tif len(ids) < maxJQLIssueLength {\n\t\t// The issues were already filtered by our JQL, so use as is\n\t\tissues = jiraIssues\n\t} else {\n\t\t// Filter only issues which have a defined GitHub ID in the list of IDs\n\t\tfor _, v := range jiraIssues {\n\t\t\tif id, err := v.Fields.Unknowns.Int(j.config.GetFieldKey(cfg.GitHubID)); err == nil {\n\t\t\t\tfor _, idOpt := range ids {\n\t\t\t\t\tif id == int64(idOpt) {\n\t\t\t\t\t\tissues = append(issues, v)\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn issues, nil\n}", "title": "" }, { "docid": "b4872ff5f86c0f9bbd18bdd43af00336", "score": "0.54209787", "text": "func Issues() *IssueFinder {\n\tvar f = &IssueFinder{conditions: make(map[string]any), op: dbi.DB.Operation()}\n\tf.conditions[\"ignored = ?\"] = false\n\tf.op.Dbg = dbi.Debug\n\treturn f\n}", "title": "" }, { "docid": "580e40e3d19e20205caa96bc0c78248f", "score": "0.53694344", "text": "func (c *Client) SprintIssues(boardID, sprintID int, jql string, limit uint) (*SearchResult, error) {\n\tpath := fmt.Sprintf(\"/board/%d/sprint/%d/issue?maxResults=%d\", boardID, sprintID, limit)\n\tif jql != \"\" {\n\t\tpath += fmt.Sprintf(\"&jql=%s\", url.QueryEscape(jql))\n\t}\n\n\tres, err := c.GetV1(context.Background(), path, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif res == nil {\n\t\treturn nil, ErrEmptyResponse\n\t}\n\tdefer func() { _ = res.Body.Close() }()\n\n\tif res.StatusCode != http.StatusOK {\n\t\treturn nil, formatUnexpectedResponse(res)\n\t}\n\n\tvar out SearchResult\n\n\terr = json.NewDecoder(res.Body).Decode(&out)\n\n\treturn &out, err\n}", "title": "" }, { "docid": "6656ff393ed6c17674a953ff7a18b327", "score": "0.5368891", "text": "func buildIssuesQuery(opts *IssuesOptions) *xorm.Session {\n\tsess := x.NewSession()\n\n\tif opts.Page <= 0 {\n\t\topts.Page = 1\n\t}\n\n\tif opts.RepoID > 0 {\n\t\tsess.Where(\"issue.repo_id=?\", opts.RepoID).And(\"issue.is_closed=?\", opts.IsClosed)\n\t} else if opts.RepoIDs != nil {\n\t\t// In case repository IDs are provided but actually no repository has issue.\n\t\tif len(opts.RepoIDs) == 0 {\n\t\t\treturn nil\n\t\t}\n\t\tsess.In(\"issue.repo_id\", opts.RepoIDs).And(\"issue.is_closed=?\", opts.IsClosed)\n\t} else {\n\t\tsess.Where(\"issue.is_closed=?\", opts.IsClosed)\n\t}\n\n\tif opts.AssigneeID > 0 {\n\t\tsess.And(\"issue.assignee_id=?\", opts.AssigneeID)\n\t} else if opts.PosterID > 0 {\n\t\tsess.And(\"issue.poster_id=?\", opts.PosterID)\n\t}\n\n\tif opts.MilestoneID > 0 {\n\t\tsess.And(\"issue.milestone_id=?\", opts.MilestoneID)\n\t}\n\n\tsess.And(\"issue.is_pull=?\", opts.IsPull)\n\n\tswitch opts.SortType {\n\tcase \"oldest\":\n\t\tsess.Asc(\"issue.created_unix\")\n\tcase \"recentupdate\":\n\t\tsess.Desc(\"issue.updated_unix\")\n\tcase \"leastupdate\":\n\t\tsess.Asc(\"issue.updated_unix\")\n\tcase \"mostcomment\":\n\t\tsess.Desc(\"issue.num_comments\")\n\tcase \"leastcomment\":\n\t\tsess.Asc(\"issue.num_comments\")\n\tcase \"priority\":\n\t\tsess.Desc(\"issue.priority\")\n\tdefault:\n\t\tsess.Desc(\"issue.created_unix\")\n\t}\n\n\tif len(opts.Labels) > 0 && opts.Labels != \"0\" {\n\t\tlabelIDs := strings.Split(opts.Labels, \",\")\n\t\tif len(labelIDs) > 0 {\n\t\t\tsess.Join(\"INNER\", \"issue_label\", \"issue.id = issue_label.issue_id\").In(\"issue_label.label_id\", labelIDs)\n\t\t}\n\t}\n\n\tif opts.IsMention {\n\t\tsess.Join(\"INNER\", \"issue_user\", \"issue.id = issue_user.issue_id\").And(\"issue_user.is_mentioned = ?\", true)\n\n\t\tif opts.UserID > 0 {\n\t\t\tsess.And(\"issue_user.uid = ?\", opts.UserID)\n\t\t}\n\t}\n\n\treturn sess\n}", "title": "" }, { "docid": "899026d16cf27496af8882b7a417bc30", "score": "0.53528875", "text": "func searchAndUpdateIssues(c context.Context, assigner *model.Assigner, task *model.Task) (int32, error) {\n\tassignee, ccs, err := findAssigneeAndCCs(c, assigner, task)\n\tif err != nil {\n\t\ttask.WriteLog(c, \"Failed to find assignees and CCs; %s\", err)\n\t\treturn 0, err\n\t}\n\tif assignee == nil && ccs == nil {\n\t\t// early stop if there is no one available to assign or cc issues to.\n\t\ttask.WriteLog(\n\t\t\tc, \"No one was available to be assigned or CCed; \"+\n\t\t\t\t\"skipping issue searches and updates\",\n\t\t)\n\t\treturn 0, nil\n\t}\n\n\tmc := getMonorailClient(c)\n\tissues, err := searchIssues(c, mc, assigner, task)\n\tif err != nil {\n\t\ttask.WriteLog(c, \"Failed to search issues; %s\", err)\n\t\treturn 0, err\n\t}\n\n\t// As long as it succeeded to update at least one issue, the task is\n\t// not marked as failed.\n\tnUpdated, nFailed := updateIssues(c, mc, assigner, task, issues, assignee, ccs)\n\tif nUpdated == 0 && nFailed > 0 {\n\t\treturn 0, errors.New(\"all issue updates failed\")\n\t}\n\treturn nUpdated, nil\n}", "title": "" }, { "docid": "80ec72ccb7b5730d16da2d2d532c5e22", "score": "0.5334879", "text": "func (j realJIRAClient) ListIssues(ids []int) ([]jira.Issue, error) {\n\tlog := j.config.GetLogger()\n\n\tidStrs := make([]string, len(ids))\n\tfor i, v := range ids {\n\t\tidStrs[i] = fmt.Sprint(v)\n\t}\n\n\tvar jql string\n\t// If the list of IDs is too long, we get a 414 Request-URI Too Large, so in that case,\n\t// we'll need to do the filtering ourselves.\n\tif len(ids) < maxJQLIssueLength {\n\t\tjql = fmt.Sprintf(\"project='%s' AND cf[%s] in (%s)\",\n\t\t\tj.config.GetProjectKey(), j.config.GetFieldID(cfg.GitHubID), strings.Join(idStrs, \",\"))\n\t} else {\n\t\tjql = fmt.Sprintf(\"project='%s'\", j.config.GetProjectKey())\n\t}\n\n\tji, res, err := j.request(func() (interface{}, *jira.Response, error) {\n\t\treturn j.client.Issue.Search(jql, nil)\n\t})\n\tif err != nil {\n\t\tlog.Errorf(\"Error retrieving JIRA issues: %v\", err)\n\t\treturn nil, getErrorBody(j.config, res)\n\t}\n\tjiraIssues, ok := ji.([]jira.Issue)\n\tif !ok {\n\t\tlog.Errorf(\"Get JIRA issues did not return issues! Got: %v\", ji)\n\t\treturn nil, fmt.Errorf(\"get JIRA issues failed: expected []jira.Issue; got %T\", ji)\n\t}\n\n\tvar issues []jira.Issue\n\tif len(ids) < maxJQLIssueLength {\n\t\t// The issues were already filtered by our JQL, so use as is\n\t\tissues = jiraIssues\n\t} else {\n\t\t// Filter only issues which have a defined GitHub ID in the list of IDs\n\t\tfor _, v := range jiraIssues {\n\t\t\tif id, err := v.Fields.Unknowns.Int(j.config.GetFieldKey(cfg.GitHubID)); err == nil {\n\t\t\t\tfor _, idOpt := range ids {\n\t\t\t\t\tif id == int64(idOpt) {\n\t\t\t\t\t\tissues = append(issues, v)\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn issues, nil\n}", "title": "" }, { "docid": "e38dd2be38c943efed43da9622d6db14", "score": "0.5276686", "text": "func (c *Client) IssuesByFilter(f *IssueFilter) ([]Issue, error) {\n\tissues, err := getIssues(c, \"/issues.json?key=\"+c.apikey+c.getPaginationClause()+getIssueFilterClause(f))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn issues, nil\n}", "title": "" }, { "docid": "65a0511c1ea13ae391f020416362c0ba", "score": "0.5263981", "text": "func (g GitHub) findIssuesWithString(id string) ([]string, error) {\n\tquery := fmt.Sprintf(\"repo:%s %s\", g.Repo, id)\n\topts := &github.SearchOptions{\n\t\tSort: \"date\",\n\t\tOrder: \"desc\",\n\t\tListOptions: github.ListOptions{Page: 1, PerPage: 1},\n\t}\n\tresult, _, err := g.Client.Search.Issues(context.Background(), query, opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor _, issue := range result.Issues {\n\t\treturn []string{*issue.URL}, nil\n\t}\n\n\treturn nil, nil\n}", "title": "" }, { "docid": "7714a4d20530471ae579cfc699847293", "score": "0.5198553", "text": "func (g *Gerrit) Search(ctx context.Context, limit int, sortResults bool, terms ...*SearchTerm) ([]*ChangeInfo, error) {\n\tvar issues changeListSortable\n\tfor {\n\t\tdata := make([]*ChangeInfo, 0)\n\t\tqueryLimit := util.MinInt(limit-len(issues), maxSearchResultLimit)\n\t\tskip := len(issues)\n\n\t\tq := url.Values{}\n\t\tq.Add(\"q\", queryString(terms))\n\t\tq.Add(\"n\", strconv.Itoa(queryLimit))\n\t\tq.Add(\"S\", strconv.Itoa(skip))\n\t\tsearchUrl := \"/changes/?\" + q.Encode()\n\t\terr := g.get(ctx, searchUrl, &data, nil)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Gerrit search failed: %v\", err)\n\t\t}\n\t\tvar moreChanges bool\n\n\t\tfor _, issue := range data {\n\t\t\t// See if there are more changes available.\n\t\t\tmoreChanges = issue.MoreChanges\n\t\t\tissues = append(issues, fixupChangeInfo(issue))\n\t\t}\n\t\tif len(issues) >= limit || !moreChanges {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif sortResults {\n\t\tsort.Sort(issues)\n\t}\n\treturn issues, nil\n}", "title": "" }, { "docid": "81637f9fd108312563e338b4bf18ed9e", "score": "0.51932555", "text": "func Search(terms []string) (*github.IssuesSearchResult, error) {\n\tvar result github.IssuesSearchResult\n\tdata, err := ioutil.ReadFile(\"issues.json\")\n\tif err != nil {\n\t\tlog.Fatalf(err.Error())\n\t}\n\tif err := json.Unmarshal(data, &result); err != nil {\n\t\tlog.Fatalf(err.Error())\n\t}\n\treturn &result, nil\n}", "title": "" }, { "docid": "e41ba5be8bcc0ff4e031595ea4094feb", "score": "0.51717025", "text": "func (b *Indexer) Search(ctx context.Context, keyword string, repoIDs []int64, limit, start int) (*internal.SearchResult, error) {\n\trepoFilters := make([]string, 0, len(repoIDs))\n\tfor _, repoID := range repoIDs {\n\t\trepoFilters = append(repoFilters, \"repo_id = \"+strconv.FormatInt(repoID, 10))\n\t}\n\tfilter := strings.Join(repoFilters, \" OR \")\n\tsearchRes, err := b.inner.Client.Index(b.inner.VersionedIndexName()).Search(keyword, &meilisearch.SearchRequest{\n\t\tFilter: filter,\n\t\tLimit: int64(limit),\n\t\tOffset: int64(start),\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\thits := make([]internal.Match, 0, len(searchRes.Hits))\n\tfor _, hit := range searchRes.Hits {\n\t\thits = append(hits, internal.Match{\n\t\t\tID: int64(hit.(map[string]interface{})[\"id\"].(float64)),\n\t\t})\n\t}\n\treturn &internal.SearchResult{\n\t\tTotal: searchRes.TotalHits,\n\t\tHits: hits,\n\t}, nil\n}", "title": "" }, { "docid": "821b1ae4852dc5e6d506fbe4de1d5633", "score": "0.51047117", "text": "func (m *monorail) searchIssuesWithPagination() ([]monorailIssue, error) {\n\tissues := []monorailIssue{}\n\tmc := m.queryConfig\n\n\t// Put in a loop till there are no new pages.\n\tnextPageToken := \"\"\n\tfor {\n\t\tquery := fmt.Sprintf(`{\"projects\": [\"projects/%s\"], \"query\": \"%s\", \"page_token\": \"%s\"}`, mc.Instance, mc.Query, nextPageToken)\n\t\tb, err := m.makeJSONCall([]byte(query), \"Issues\", \"SearchIssues\")\n\t\tif err != nil {\n\t\t\treturn nil, skerr.Wrapf(err, \"Issues.SearchIssues JSON API call failed\")\n\t\t}\n\t\tvar monorailIssues struct {\n\t\t\tIssues []monorailIssue `json:\"issues\"`\n\t\t\tNextPageToken string `json:\"nextPageToken\"`\n\t\t}\n\t\tif err := json.Unmarshal(b, &monorailIssues); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tissues = append(issues, monorailIssues.Issues...)\n\t\tnextPageToken = monorailIssues.NextPageToken\n\t\tif nextPageToken == \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn issues, nil\n}", "title": "" }, { "docid": "548b8831c795d755ceecd5c448d84833", "score": "0.505592", "text": "func (s *JobStore) Find(ctx context.Context, filter []*v1.FilterExpression, order []*v1.OrderExpression, start, limit int) (slice []v1.JobStatus, total int, err error) {\n\tfieldMap := map[string]string{\n\t\t\"name\": \"name\",\n\t\t\"owner\": \"owner\",\n\t\t\"phase\": \"phase\",\n\t\t\"repo.owner\": \"repo_owner\",\n\t\t\"repo.repo\": \"repo_repo\",\n\t\t\"repo.host\": \"repo_host\",\n\t\t\"repo.ref\": \"repo_ref\",\n\t\t\"trigger\": \"trigger\",\n\t\t\"success\": \"success\",\n\t\t\"created\": \"created\",\n\t}\n\n\tvar (\n\t\twhereExps []string\n\t\targs []interface{}\n\t)\n\tfor _, f := range filter {\n\t\tif len(f.Terms) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar terms []string\n\t\tfor _, t := range f.Terms {\n\t\t\tvar not string\n\t\t\tif t.Negate {\n\t\t\t\tnot = \"NOT\"\n\t\t\t}\n\n\t\t\tfield, ok := fieldMap[t.Field]\n\t\t\tif !ok {\n\t\t\t\treturn nil, 0, xerrors.Errorf(\"unknown field %s\", t.Field)\n\t\t\t}\n\n\t\t\tvar op string\n\t\t\tswitch t.Operation {\n\t\t\tcase v1.FilterOp_OP_CONTAINS:\n\t\t\t\top = \"LIKE '%' || ? || '%'\"\n\t\t\tcase v1.FilterOp_OP_ENDS_WITH:\n\t\t\t\top = \"LIKE '%' || ?\"\n\t\t\tcase v1.FilterOp_OP_EQUALS:\n\t\t\t\top = \"= ?\"\n\t\t\tcase v1.FilterOp_OP_STARTS_WITH:\n\t\t\t\top = \"LIKE ? || '%'\"\n\t\t\tcase v1.FilterOp_OP_EXISTS:\n\t\t\t\top = \"IS NOT NULL\"\n\t\t\tdefault:\n\t\t\t\treturn nil, 0, xerrors.Errorf(\"unknown operation %v\", t.Operation)\n\t\t\t}\n\t\t\texpr := fmt.Sprintf(\"%s %s %s\", not, field, op)\n\t\t\tterms = append(terms, expr)\n\t\t\targs = append(args, t.Value)\n\t\t}\n\n\t\texpr := fmt.Sprintf(\"(%s)\", strings.Join(terms, \" OR \"))\n\t\twhereExps = append(whereExps, expr)\n\t}\n\twhereExp := strings.Join(whereExps, \" AND \")\n\tif whereExp != \"\" {\n\t\twhereExp = \"WHERE \" + whereExp\n\t\tprev := \"\"\n\t\tfor i := 1; prev != whereExp; i++ {\n\t\t\tprev = whereExp\n\t\t\twhereExp = strings.Replace(whereExp, \"?\", fmt.Sprintf(\"$%d\", i), 1)\n\t\t}\n\t}\n\n\tvar orderExps []string\n\tfor _, o := range order {\n\t\tfield, ok := fieldMap[o.Field]\n\t\tif !ok {\n\t\t\treturn nil, 0, xerrors.Errorf(\"unknown field %s\", o.Field)\n\t\t}\n\n\t\tdir := \"DESC\"\n\t\tif o.Ascending {\n\t\t\tdir = \"ASC\"\n\t\t}\n\t\torderExps = append(orderExps, fmt.Sprintf(\"%s %s\", field, dir))\n\t}\n\tvar orderExp string\n\tif len(orderExps) > 0 {\n\t\torderExp = fmt.Sprintf(\"ORDER BY %s\", strings.Join(orderExps, \", \"))\n\t}\n\n\tlimitExp := \"ALL\"\n\tif limit > 0 {\n\t\tlimitExp = fmt.Sprintf(\"%d\", limit)\n\t}\n\n\tcountQuery := fmt.Sprintf(\"SELECT COUNT(1) FROM job_status %s\", whereExp)\n\tlog.WithField(\"query\", countQuery).Debug(\"running query\")\n\terr = s.DB.QueryRow(countQuery, args...).Scan(&total)\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\tquery := fmt.Sprintf(\"SELECT data FROM job_status %s %s LIMIT %s OFFSET %d\", whereExp, orderExp, limitExp, start)\n\tlog.WithField(\"query\", query).Debug(\"running query\")\n\trows, err := s.DB.Query(query, args...)\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\tdefer rows.Close()\n\n\tvar result []v1.JobStatus\n\tfor rows.Next() {\n\t\tvar data string\n\t\terr = rows.Scan(&data)\n\t\tif err != nil {\n\t\t\treturn nil, 0, err\n\t\t}\n\n\t\tvar res v1.JobStatus\n\t\terr = jsonpb.UnmarshalString(data, &res)\n\t\tif err != nil {\n\t\t\treturn nil, 0, err\n\t\t}\n\n\t\tresult = append(result, res)\n\t}\n\tif rows.Err() != nil {\n\t\treturn nil, 0, err\n\t}\n\n\treturn result, total, nil\n}", "title": "" }, { "docid": "2b64875e0ac8eef66ac1b0077fd29a25", "score": "0.49486703", "text": "func (maidRepo *MaidRepo) SearchMaids(conte context.Context) ([]*model.Maid, error) {\n\toffset := conte.Value(\"offset\").(int)\n\tlimit := conte.Value(\"limit\").(int)\n\tq := conte.Value(\"q\").(string)\n\tprintln(offset, limit, q)\n\n\tcreate := func(val int64) *int64 {\n\t\treturn &val\n\t}\n\n\toffsetAndLimit := struct {\n\t\tOffset *int64\n\t\tLimit *int64\n\t}{\n\t\tOffset: create(int64(offset)),\n\t\tLimit: create(int64(limit)),\n\t}\n\tmaids := []*model.Maid{}\n\t// creating an index\n\tif cursor, er := maidRepo.DB.Collection(model.SMAID).Find(conte, bson.M{\"$text\": bson.D{{\"$search\", q + \"*\"}}} /*Options*/, &options.FindOptions{Limit: offsetAndLimit.Limit}, &options.FindOptions{Skip: offsetAndLimit.Offset}); er == nil && cursor != nil {\n\t\tfor cursor.Next(conte) {\n\t\t\tmaid := &model.Maid{}\n\t\t\tif er := cursor.Decode(maid); er == nil {\n\t\t\t\tmaids = append(maids, maid)\n\t\t\t}\n\t\t}\n\t\treturn maids, nil\n\t} else {\n\t\tprintln(er.Error())\n\t}\n\treturn nil, nil\n}", "title": "" }, { "docid": "7c8f93709d1e1f8dd2888d92f0f7f8ab", "score": "0.4923057", "text": "func GetSolutionsForIssue(issueid int) []*Solution {\n\tsolutions := make([]*Solution, 0)\n\n\tGetDB().Find(&solutions).Where(\"exists (select 1 from constraint_issues_solution_maps where solutions.id = solution_id and issue_id = ?)\", issueid)\n\treturn solutions\n}", "title": "" }, { "docid": "f9f59ebca85749380b81d6f3c8978f59", "score": "0.4887923", "text": "func (b *Backend) findOccurrencesForCriteria(ctx context.Context, projectPath string, resourceURIs []string) ([]*pb.Occurrence, error) {\n\tvar uriFilters []string\n\tfor _, url := range resourceURIs {\n\t\turiFilters = append(uriFilters, fmt.Sprintf(\"resourceUrl=%q\", url))\n\t}\n\n\toccurences, err := b.client.ListOccurrences(ctx,\n\t\t&pb.ListOccurrencesRequest{\n\t\t\tParent: projectPath,\n\t\t\tFilter: strings.Join(uriFilters, \" OR \"),\n\t\t},\n\t)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn occurences.GetOccurrences(), nil\n}", "title": "" }, { "docid": "9d671b38f3a3976227b2e6c06f43a802", "score": "0.4850857", "text": "func (query *IssueQueryInMemory) GetIssues(args types.GetIssuesInput) <-chan repository.QueryResult {\n\tresult := make(chan repository.QueryResult)\n\ttotal := 0\n\n\tgo func() {\n\t\tissues := []domain.Issue{}\n\n\t\tif args.First == nil && args.Last == nil {\n\t\t\tresult <- repository.QueryResult{\n\t\t\t\tError: repository.IssueQueryError{Code: repository.IssueQueryErrorMissingPaginationBoundaries},\n\t\t\t}\n\t\t}\n\n\t\tif args.First != nil && args.Last != nil {\n\t\t\tresult <- repository.QueryResult{\n\t\t\t\tError: repository.IssueQueryError{Code: repository.IssueQueryErrorPagination},\n\t\t\t}\n\t\t}\n\n\t\tfor _, issue := range query.Storage.IssueMap {\n\t\t\tif issue.ProjectID == args.ProjectID {\n\t\t\t\tissues = append(issues, issue)\n\t\t\t}\n\t\t}\n\n\t\t// Filter\n\n\t\t// Sort\n\t\tby(func(p1, p2 *domain.Issue) bool {\n\t\t\treturn p1.ID < p2.ID\n\t\t}).Sort(issues)\n\n\t\t// Calculate total query\n\t\ttotal = len(issues)\n\t\tif total == 0 {\n\t\t\tresult <- repository.QueryResult{\n\t\t\t\tResult: issues,\n\t\t\t}\n\t\t}\n\n\t\t// Cursor\n\t\tif args.After != nil {\n\t\t\tvar offset int\n\t\t\tfor index, item := range issues {\n\t\t\t\tif item.ID > *args.After {\n\t\t\t\t\toffset = index\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tissues = issues[offset:]\n\t\t}\n\n\t\tif args.Before != nil {\n\t\t\tvar offset int\n\t\t\tfor index, item := range issues {\n\t\t\t\tif item.ID == *args.Before {\n\t\t\t\t\toffset = index\n\t\t\t\t}\n\t\t\t}\n\t\t\tissues = issues[:offset]\n\t\t}\n\n\t\t// Get n first items\n\t\tif args.First != nil {\n\t\t\tlimit := *args.First\n\t\t\tif limit > len(issues) {\n\t\t\t\tlimit = len(issues)\n\t\t\t}\n\t\t\tissues = issues[:limit]\n\t\t}\n\n\t\tif args.Last != nil {\n\t\t\tlimit := *args.Last\n\t\t\tif limit > len(issues) {\n\t\t\t\tlimit = len(issues)\n\t\t\t}\n\t\t\toffset := len(issues) - limit\n\t\t\tissues = issues[offset:]\n\t\t}\n\n\t\t// Result\n\t\tres := repository.QueryResult{\n\t\t\tResult: issues,\n\t\t\tTotal: total,\n\t\t}\n\n\t\tif len(issues) > 0 {\n\t\t\tres.PageInfo = repository.PageInfo{\n\t\t\t\tStartCursor: issues[0].ID,\n\t\t\t\tEndCursor: issues[len(issues)-1].ID,\n\t\t\t\t// Todo implement hasNextPage and endNextPage\n\t\t\t}\n\t\t}\n\n\t\tresult <- res\n\t\tclose(result)\n\t}()\n\n\treturn result\n}", "title": "" }, { "docid": "3cd98c4f7f903b8d4a0eea5c4b923419", "score": "0.48395908", "text": "func FindIssuesByKey(key string) ([]*Issue, error) {\n\tvar parts = strings.Split(key, \"/\")\n\tif len(parts) != 2 {\n\t\treturn nil, fmt.Errorf(\"invalid issue key %q\", key)\n\t}\n\tif len(parts[1]) != 10 {\n\t\treturn nil, fmt.Errorf(\"invalid issue key %q\", key)\n\t}\n\n\tvar lccn = parts[0]\n\tvar dateShort = parts[1][:8]\n\tvar date = fmt.Sprintf(\"%s-%s-%s\", dateShort[:4], dateShort[4:6], dateShort[6:8])\n\n\tvar ed, err = strconv.Atoi(parts[1][8:])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"invalid issue key %q\", key)\n\t}\n\n\treturn Issues().LCCN(lccn).date(date).edition(ed).Fetch()\n}", "title": "" }, { "docid": "e274b684be5bb193f10d78d2e876a925", "score": "0.47846246", "text": "func searchSQL(searchInput string, userid string) []Note {\n\tvar searchNotes []Note\n\tvar input = searchInput\n\n\tvar note Note\n\n\tfmt.Println(input)\n\n\trows, err := db.Query(\"SELECT DISTINCT note.NoteID, note.UserId, note.title, note.contents, note.datecreated, note.dateupdated FROM note LEFT JOIN noteaccess ON note.noteid = noteaccess.noteid WHERE (note.userid = \" + userid + \" OR (noteaccess.userid = \" + userid + \" AND noteaccess.read = true)) AND note.contents LIKE \" + \"'%\" + searchInput + \"%'\" + \" OR note.Title LIKE \" + \"'%\" + searchInput + \"%'\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tfor rows.Next() {\n\t\t//Put SQL data into object\n\t\terr = rows.Scan(&note.NoteID, &note.UserID, &note.Title, &note.Contents, &note.DateCreated, &note.DateUpdated)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\t//Add each note to searchNotes\n\t\tsearchNotes = append(searchNotes, note)\n\t}\n\terr = rows.Err()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn searchNotes\n}", "title": "" }, { "docid": "42b973339aadd1b9d3aec2797c861793", "score": "0.4784048", "text": "func FilterIssues(is Issues, fromDate, toDate time.Time) Issues {\n\tvar ret Issues\n\tfor _, issue := range is {\n\t\tif issue.ClosedDate.After(fromDate) &&\n\t\t\t(issue.ClosedDate.Before(toDate) || issue.ClosedDate.Equal(toDate)) {\n\t\t\tret = append(ret, issue)\n\t\t}\n\t}\n\treturn ret\n}", "title": "" }, { "docid": "f1dbc895750606ef64bfb062c798b915", "score": "0.47578582", "text": "func (s *IssueService) FindIssueList(ctx context.Context, find *api.IssueFind) ([]*api.Issue, error) {\n\ttx, err := s.db.BeginTx(ctx, nil)\n\tif err != nil {\n\t\treturn nil, FormatError(err)\n\t}\n\tdefer tx.Rollback()\n\n\tlist, err := s.findIssueList(ctx, tx, find)\n\tif err != nil {\n\t\treturn []*api.Issue{}, err\n\t}\n\n\tif err == nil {\n\t\tfor _, issue := range list {\n\t\t\tif err := s.cache.UpsertCache(api.IssueCache, issue.ID, issue); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn list, nil\n}", "title": "" }, { "docid": "e63af0e51afcd2103c8b9c0dae392949", "score": "0.47450483", "text": "func (repo *APITokenRepository) Search(identifier string) ([]*api.Token, error) {\n\tvar apiTokens []*api.Token\n\terr := repo.conn.Model(api.Token{}).\n\t\tWhere(\"api_key = ?\", identifier).\n\t\tFind(&apiTokens).Error\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif len(apiTokens) == 0 {\n\t\treturn nil, errors.New(\"no available api token for the provided identifier\")\n\t}\n\treturn apiTokens, nil\n}", "title": "" }, { "docid": "03a17bf401151adffd0af6891cc705c3", "score": "0.47322893", "text": "func (c *Client) SearchIssuesByRepo(ctx context.Context, repo Repository, opts SearchOptions, queryArgs url.Values) ([]github.Issue, error) {\n\topt := github.SearchOptions{\n\t\tSort: opts.Sort,\n\t\tOrder: opts.Order,\n\t}\n\n\tqueryArgs.Del(\"repo\")\n\tqueryArgs.Add(\"repo\", repo.String())\n\n\tvar (\n\t\tquery bytes.Buffer\n\t\tfirst = true\n\t)\n\tfor key, values := range queryArgs {\n\t\tfor _, value := range values {\n\t\t\tif !first {\n\t\t\t\tquery.WriteByte(' ')\n\t\t\t}\n\t\t\tfirst = false\n\t\t\tquery.WriteString(fmt.Sprintf(\"%s:%s\", key, value))\n\t\t}\n\t}\n\n\tvar allIssues []github.Issue\n\tfor {\n\t\tissues, resp, err := c.c.Search.Issues(ctx, query.String(), &opt)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tallIssues = append(allIssues, issues.Issues...)\n\n\t\tif opts.Limit > 0 && len(allIssues) >= opts.Limit {\n\t\t\tbreak\n\t\t}\n\n\t\tif resp.NextPage == 0 {\n\t\t\tbreak\n\t\t}\n\t\topt.ListOptions.Page = resp.NextPage\n\t}\n\n\treturn allIssues, nil\n}", "title": "" }, { "docid": "21d0efba2d8d5285d770e8df700b2b55", "score": "0.47284836", "text": "func (idx Index) search(text string) []int {\n\tvar r []int\n\tfor _, token := range analyze(text) {\n\t\tif ids, ok := idx[token]; ok {\n\t\t\tif r == nil {\n\t\t\t\tr = ids\n\t\t\t} else {\n\t\t\t\tr = intersection(r, ids)\n\t\t\t}\n\t\t} else {\n\t\t\t// Token doesn't exist.\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn r\n}", "title": "" }, { "docid": "09f497c7c0e17177f453f3850f0cf3e0", "score": "0.47270557", "text": "func (js *JiraService) Filter(id int) (issues []jira.Issue, err error) {\n\t// filterEndpoint := url.URL{Path: fmt.Sprintf(\"/api/2/filter/%d\", id)}\n\n\treturn issues, err\n}", "title": "" }, { "docid": "b4e041ad791edcf28f5a2ec1eac36e53", "score": "0.4723217", "text": "func (m *MetricsClient) GetIssues(ctx context.Context, repoOwner string, repos []string, beginDate, endDate time.Time) (models.Issues, error) {\n\tif repoOwner == \"\" {\n\t\treturn nil, errors.New(\"owner cannot be blank\")\n\t}\n\tprojectIssues := make(models.Issues, 0)\n\tfor _, repo := range repos {\n\t\trepoIssues := make(models.Issues, 0)\n\t\tlogrus.Debugf(\"getting issues for repo: %s\", repo)\n\t\topt := github.ListOptions{PerPage: 100}\n\t\tfor {\n\t\t\tlogrus.Debugf(\"getting issues for repo: %s page %d\", repo, opt.Page)\n\t\t\tissuesForPage, resp, err := m.c.Issues.ListByRepo(ctx, repoOwner, repo, &github.IssueListByRepoOptions{\n\t\t\t\t//Milestone: \"\",\n\t\t\t\tState: all,\n\t\t\t\t//Assignee: \"\",\n\t\t\t\t//Creator: \"\",\n\t\t\t\t//Mentioned: \"\",\n\t\t\t\t//Labels: nil,\n\t\t\t\tSince: beginDate,\n\t\t\t\tListOptions: opt,\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif resp != nil && resp.StatusCode == 404 {\n\t\t\t\tlogrus.Warnf(\"URL Not Found: %s\", resp.Request.URL.String())\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tlogrus.Debugf(\"retrieved %d issue for page %d\", len(issuesForPage), opt.Page)\n\t\t\tfor _, issue := range issuesForPage {\n\t\t\t\tissue := mapToIssue(issue)\n\t\t\t\tissue.RepoName = repo\n\t\t\t\tissue.Owner = repoOwner\n\t\t\t\tif issue.CreatedAt.After(endDate) {\n\t\t\t\t\tlogrus.Debugf(\"skipping.... issue.CreatedAt: %s - After(%s)\", issue.CreatedAt.String(), endDate.String())\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tlogrus.Debugf(\"\\tadding issue: %s/%d - %s\", issue.RepoName, issue.Number, issue.Title)\n\t\t\t\trepoIssues = append(repoIssues, issue)\n\t\t\t}\n\n\t\t\tif resp.NextPage == 0 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\topt.Page = resp.NextPage\n\t\t}\n\t\tlogrus.Debugf(\"repo %s has %d issues\", repo, len(repoIssues))\n\t\tprojectIssues = append(projectIssues, repoIssues...)\n\t}\n\treturn projectIssues, nil\n}", "title": "" }, { "docid": "72499e433187266e0fd7ef3507630ed5", "score": "0.471891", "text": "func searchSymbols(ctx context.Context, args *search.Args, limit int) (res []*fileMatchResolver, common *searchResultsCommon, err error) {\n\tif mockSearchSymbols != nil {\n\t\treturn mockSearchSymbols(ctx, args, limit)\n\t}\n\n\ttr, ctx := trace.New(ctx, \"Search symbols\", fmt.Sprintf(\"query: %+v, numRepoRevs: %d\", args.Pattern, len(args.Repos)))\n\tdefer func() {\n\t\ttr.SetError(err)\n\t\ttr.Finish()\n\t}()\n\n\tif args.Pattern.Pattern == \"\" {\n\t\treturn nil, nil, nil\n\t}\n\n\tctx, cancelAll := context.WithCancel(ctx)\n\tdefer cancelAll()\n\n\tcommon = &searchResultsCommon{}\n\tvar (\n\t\trun = parallel.NewRun(20)\n\t\tmu sync.Mutex\n\t)\n\tfor _, repoRevs := range args.Repos {\n\t\trepoRevs := repoRevs\n\t\tif ctx.Err() != nil {\n\t\t\tbreak\n\t\t}\n\t\tif len(repoRevs.RevSpecs()) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\trun.Acquire()\n\t\tgoroutine.Go(func() {\n\t\t\tdefer run.Release()\n\t\t\trepoSymbols, repoErr := searchSymbolsInRepo(ctx, repoRevs, args.Pattern, args.Query, limit)\n\t\t\tif repoErr != nil {\n\t\t\t\ttr.LogFields(otlog.String(\"repo\", string(repoRevs.Repo.Name)), otlog.String(\"repoErr\", repoErr.Error()), otlog.Bool(\"timeout\", errcode.IsTimeout(repoErr)), otlog.Bool(\"temporary\", errcode.IsTemporary(repoErr)))\n\t\t\t}\n\t\t\tmu.Lock()\n\t\t\tdefer mu.Unlock()\n\t\t\tlimitHit := len(res) > limit\n\t\t\trepoErr = handleRepoSearchResult(common, *repoRevs, limitHit, false, repoErr)\n\t\t\tif repoErr != nil {\n\t\t\t\tif ctx.Err() == nil || errors.Cause(repoErr) != ctx.Err() {\n\t\t\t\t\t// Only record error if it's not directly caused by a context error.\n\t\t\t\t\trun.Error(repoErr)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tcommon.searched = append(common.searched, repoRevs.Repo)\n\t\t\t}\n\t\t\tif repoSymbols != nil {\n\t\t\t\tres = append(res, repoSymbols...)\n\t\t\t\tif limitHit {\n\t\t\t\t\tcancelAll()\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n\terr = run.Wait()\n\n\tif len(res) > limit {\n\t\tcommon.limitHit = true\n\t\tres = res[:limit]\n\t}\n\treturn res, common, err\n}", "title": "" }, { "docid": "77e0c123658d7a042917dd41ecd57382", "score": "0.47101057", "text": "func handler(w http.ResponseWriter, r *http.Request) {\n\t// fmt.Fprintf(w, \"URL.Path = %q\\n\", r.URL.Path)\n\targs := []string{\"js\", \"php\"}\n\tresult, err := github.SearchIssues(args)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tvar issueList = template.Must(template.New(\"issuelist\").Parse(`\n<h1>{{.TotalCount}} issues</h1>\n<table>\n<tr style='text-align: left'>\n <th>#</th>\n <th>State</th>\n <th>User</th>\n <th>Title</th>\n</tr>\n{{range .Items}}\n<tr>\n <td><a href='{{.HTMLURL}}'>{{.Number}}</a></td>\n <td>{{.State}}</td>\n <td><a href='{{.User.HTMLURL}}'>{{.User.Login}}</a></td>\n <td><a href='{{.HTMLURL}}'>{{.Title}}</a></td>\n</tr>\n{{end}}\n</table>\n`))\n\tif err := issueList.Execute(w, result); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n}", "title": "" }, { "docid": "12d23c9e7690dcf5d4d9d409ebe4979f", "score": "0.47019342", "text": "func SearchAnnouncements(c context.Context, platformName string, retired bool, limit, offset int32) ([]*dashpb.Announcement, error) {\n\tannQ := datastore.NewQuery(\"Announcement\").Eq(\"Retired\", retired).Limit(limit).Offset(offset).Order(\"-EndTime\", \"-StartTime\")\n\tif platformName != \"\" {\n\t\tannQ = annQ.Eq(\"PlatformNames\", platformName)\n\t}\n\tvar announcements []*Announcement\n\tif err := datastore.GetAll(c, annQ, &announcements); err != nil {\n\t\treturn nil, fmt.Errorf(\"error getting Announcement entities - %s\", err)\n\t}\n\treturn GetAllAnnouncementsPlatforms(c, announcements)\n}", "title": "" }, { "docid": "5afa2f6504f36c42ac88813cd1b6c866", "score": "0.46963957", "text": "func Issues(opts *IssuesOptions) ([]*Issue, error) {\n\tsess := buildIssuesQuery(opts)\n\tif sess == nil {\n\t\treturn make([]*Issue, 0), nil\n\t}\n\n\tsess.Limit(conf.UI.IssuePagingNum, (opts.Page-1)*conf.UI.IssuePagingNum)\n\n\tissues := make([]*Issue, 0, conf.UI.IssuePagingNum)\n\tif err := sess.Find(&issues); err != nil {\n\t\treturn nil, fmt.Errorf(\"Find: %v\", err)\n\t}\n\n\t// FIXME: use IssueList to improve performance.\n\tfor i := range issues {\n\t\tif err := issues[i].LoadAttributes(); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"LoadAttributes [%d]: %v\", issues[i].ID, err)\n\t\t}\n\t}\n\n\treturn issues, nil\n}", "title": "" }, { "docid": "1dac799c1d2b26e696621a5bfa2a0d73", "score": "0.46915042", "text": "func FindSQLDrivenLookupDetailsFiltered(lookupCodeFilters []coredata.LookupRequestData, lookupHeaders []coremodel.LookupHeader, db *gorm.DB, logEntry *logrus.Entry) (lookupDetails []coremodel.LookupDetail, err error) {\n\tif len(lookupHeaders) == 0 {\n\t\treturn\n\t}\n\tdataFinderStatment := []string{}\n\tmapByLovID := make(map[string]*coremodel.LookupHeader)\n\tmapByLovIDFilter := make(map[string]*[]string)\n\tfor _, fltD := range lookupCodeFilters {\n\t\tif len(fltD.FilteredCodes) > 0 {\n\t\t\tmapByLovIDFilter[fltD.LovID] = &fltD.FilteredCodes\n\t\t}\n\t}\n\tfor _, h := range lookupHeaders {\n\t\tmapByLovID[h.ID] = &h\n\t\tif h.SQLForData == nil {\n\t\t\tcontinue\n\t\t}\n\t\tif x, ok := mapByLovIDFilter[h.ID]; ok {\n\t\t\tvar inVal string\n\t\t\tif h.CodeActualDataType == \"string\" {\n\t\t\t\tinVal = \"'\" + strings.Join(*x, \"','\") + \"'\"\n\t\t\t} else {\n\t\t\t\tinVal = strings.Join(*x, \",\")\n\t\t\t}\n\t\t\tfinalSQL := strings.Join(strings.Split(h.SQLForDataFiltered, \"{{codes}}\"), inVal)\n\t\t\tdataFinderStatment = append(dataFinderStatment, finalSQL)\n\t\t} else {\n\t\t\tdataFinderStatment = append(dataFinderStatment, *h.SQLForData)\n\t\t}\n\n\t}\n\tif len(dataFinderStatment) == 0 {\n\t\tlogEntry.WithField(\"lovIds\", reflect.ValueOf(mapByLovID).MapKeys()).Warnf(\"No sql for lookup is found in all lookup data, no sql statement executed\")\n\t\treturn\n\t}\n\tfinalSQL := strings.Join(dataFinderStatment, \" union all \")\n\trows, errRow := db.Raw(finalSQL).Rows()\n\tif errRow != nil {\n\t\tlogEntry.WithError(errRow).WithField(\"lovIds\", reflect.ValueOf(mapByLovID).MapKeys()).Errorf(\"Fail to run query for lookup data , error: %s\", errRow.Error())\n\t\treturn nil, errRow\n\t}\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\tvar lkpDetail coremodel.LookupDetail\n\t\tdb.ScanRows(rows, &lkpDetail)\n\t\tlookupDetails = append(lookupDetails, lkpDetail)\n\t\tmapByLovID[lkpDetail.LovID].AppendLookupDetail(lkpDetail)\n\t}\n\treturn\n\n}", "title": "" }, { "docid": "7794e60e622da5cf78b0b4c715ccb546", "score": "0.46625698", "text": "func FindInProcessIssues() ([]*Issue, error) {\n\t// This seems really awful, but every issue that's in the system and isn't\n\t// ignored is in process today. We keep this helper around because at least\n\t// the function name says something clearly compared to the code below.\n\treturn Issues().Fetch()\n}", "title": "" }, { "docid": "180addd68af79a42acfa01a0fb00f8ae", "score": "0.46429726", "text": "func FindIssue(id int64) (*Issue, error) {\n\tvar op = dbi.DB.Operation()\n\top.Dbg = dbi.Debug\n\tvar i = &Issue{}\n\tvar ok = op.Select(\"issues\", &Issue{}).Where(\"id = ?\", id).First(i)\n\tif !ok {\n\t\treturn nil, op.Err()\n\t}\n\tvar err error\n\ti.Title, err = FindTitle(\"lccn = ?\", i.LCCN)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ti.deserialize()\n\treturn i, op.Err()\n}", "title": "" }, { "docid": "04556e7eca97c60a8adbafed72784c1e", "score": "0.46245134", "text": "func (client *Client) Search(model string, domain []interface{}) ([]int, error) {\n\targs := []interface{}{domain}\n\tkwargs := map[string]interface{}{}\n\trespIface, err := client.Call(model, \"search\", args, kwargs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\trecs := respIface.([]interface{})\n\tvar ids []int\n\tfor _, rec := range recs {\n\t\tidf := rec.(float64)\n\t\tids = append(ids, int(idf))\n\t}\n\treturn ids, nil\n}", "title": "" }, { "docid": "ab093eea766fa98f8a120025ea7cc73b", "score": "0.4610113", "text": "func (h *Engine) SearchAny(ctx context.Context, org string, project string, fs []Filter, newerThan time.Time) ([]*Conversation, time.Time, error) {\n\tcs, ts, err := h.SearchIssues(ctx, org, project, fs, newerThan)\n\tif err != nil {\n\t\treturn cs, ts, err\n\t}\n\n\tpcs, pts, err := h.SearchPullRequests(ctx, org, project, fs, newerThan)\n\tif err != nil {\n\t\treturn cs, ts, err\n\t}\n\n\tif pts.After(ts) {\n\t\tts = pts\n\t}\n\n\treturn append(cs, pcs...), ts, nil\n}", "title": "" }, { "docid": "7f77ab0c38e0f5b699c3609bea5174ac", "score": "0.4600743", "text": "func FindSimpleLookupDetailFiltered(lovIDs []coredata.LookupRequestData, db *gorm.DB, logEntry *logrus.Entry) (lookupDetails []coremodel.LookupDetail, err error) {\n\tif len(lovIDs) == 0 {\n\t\treturn\n\t}\n\tlogEntry = logEntry.WithField(\"lovIds\", lovIDs).WithField(\"model\", \"LookupDetail\")\n\tfor _, param := range lovIDs {\n\t\tif len(param.FilteredCodes) > 0 {\n\t\t\tdb = db.Or(\"(lov_id = ? and detail_code in (?))\", param.LovID, param.FilteredCodes)\n\t\t} else {\n\t\t\tdb = db.Or(\"lov_id = ?\", param.LovID)\n\t\t}\n\t}\n\tif dbRslt := db.Order(\"seq_no\").Find(&lookupDetails); dbRslt.Error != nil {\n\t\terr = dbRslt.Error\n\t\tlogEntry.WithError(err).Errorf(\"Fail to query for lookup details. error: %s\", err.Error())\n\t\treturn\n\t}\n\treturn\n}", "title": "" }, { "docid": "fc9c0e435e0b4dffb8a6c0daf5804ee4", "score": "0.4568626", "text": "func (idx index) search(text string) []uint64 {\n\tvar r []uint64\n\tfor _, token := range analyze(text) {\n\t\tif ids, ok := idx[token]; ok {\n\t\t\tif r == nil {\n\t\t\t\tr = ids\n\t\t\t} else {\n\t\t\t\tr = intersection(r, ids)\n\t\t\t}\n\t\t} else {\n\t\t\t// Token doesn't exist.\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn r\n}", "title": "" }, { "docid": "2536bc7dd9414a72918269eb72e5349e", "score": "0.45648894", "text": "func main() {\n\n\t// Get the issues according input parameters\n\tresult, err := github.SearchIssues(os.Args[1:])\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tfmt.Printf(\"%d issues in total\\n\", result.TotalCount)\n\n\t// Compute the milestone dates\n\tnow := time.Now()\n\tmonth := now.AddDate(0, -1, 0)\n\tyear := now.AddDate(-1, 0, 0)\n\n\t// Create the list of issues\n\tless1month := make([]*github.Issue, 0)\n\tless1year := make([]*github.Issue, 0)\n\tmore1year := make([]*github.Issue, 0)\n\n\t// Sort the issues\n\tfor _, item := range result.Items {\n\t\tif item.CreatedAt.After(month) {\n\t\t\tless1month = append(less1month, item)\n\t\t} else if item.CreatedAt.After(year) {\n\t\t\tless1year = append(less1year, item)\n\t\t} else {\n\t\t\tmore1year = append(more1year, item)\n\t\t}\n\t}\n\n\t// Display the list of issues less than one month\n\tfmt.Printf(\"Less than one month (%d issues)\\n\", len(less1month))\n\tdisplayIssues(less1month)\n\n\t// Display the list of issues less than one year\n\tfmt.Printf(\"Less than one year (%d issues)\\n\", len(less1year))\n\tdisplayIssues(less1year)\n\n\t// Display the list of issues more than one year\n\tfmt.Printf(\"More than one year (%d issues)\\n\", len(more1year))\n\tdisplayIssues(more1year)\n}", "title": "" }, { "docid": "f8cd8c8fa7285762c37855d27a004c87", "score": "0.45511502", "text": "func (a *API) SearchAcknowledgements(searchCriteria *SearchQueryType, filterCriteria *SearchFilterType) (*[]Acknowledgement, error) {\n\tq := url.Values{}\n\n\tif searchCriteria != nil && *searchCriteria != \"\" {\n\t\tq.Set(\"search\", string(*searchCriteria))\n\t}\n\n\tif filterCriteria != nil && len(*filterCriteria) > 0 {\n\t\tfor filter, criteria := range *filterCriteria {\n\t\t\tfor _, val := range criteria {\n\t\t\t\tq.Add(filter, val)\n\t\t\t}\n\t\t}\n\t}\n\n\tif q.Encode() == \"\" {\n\t\treturn a.FetchAcknowledgements()\n\t}\n\n\treqURL := url.URL{\n\t\tPath: config.AcknowledgementPrefix,\n\t\tRawQuery: q.Encode(),\n\t}\n\n\tresult, err := a.Get(reqURL.String())\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"[ERROR] API call error %+v\", err)\n\t}\n\n\tvar acknowledgements []Acknowledgement\n\tif err := json.Unmarshal(result, &acknowledgements); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &acknowledgements, nil\n}", "title": "" }, { "docid": "99f78b22796966a952f105a21976d992", "score": "0.45100716", "text": "func (yt *YouTrack) GetIssues(query string) (issues map[string]model.Issue, err error) {\n\tu := yt.getAPIURL(query)\n\n\tbody, err := yt.requester.MakeRequest(u, yt.headers)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := make(apiResponse, 0)\n\terr = json.Unmarshal(body, &response)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tissues = make(map[string]model.Issue, len(response))\n\tfor _, ai := range response {\n\t\tissue := ai.ToIssue()\n\t\tissues[issue.FullID()] = issue\n\t}\n\n\treturn issues, nil\n}", "title": "" }, { "docid": "9f6c3b3016ca490bd45df9eac0165772", "score": "0.4491308", "text": "func (i *SearchIssue) searchWithLabel(labels string) {\n\tvar b strings.Builder\n\n\tb.WriteString(\"labels in (\")\n\tb.WriteString(labels)\n\tb.WriteString(\")\")\n\n\ti.Query[\"jql\"] = b.String()\n}", "title": "" }, { "docid": "4cf518f017e901dbdf56f95e1e0f2b7d", "score": "0.44905695", "text": "func FindEventsByTimeRangeAndActionID(c *gin.Context) {\n\tjsonData, err := c.GetRawData()\n\tif err != nil {\n\t\tc.JSON(http.StatusBadRequest, helpers.BadRequest())\n\t}\n\n\tvar timeRange = models.EventTimeRange{}\n\n\terr = json.Unmarshal(jsonData, &timeRange)\n\tif err != nil {\n\t\tc.JSON(http.StatusBadRequest, helpers.BadRequest())\n\t}\n\n\tvar events []models.Event\n\n\tresult := models.DB.Find(\n\t\t&events,\n\t\t\"time > ? AND time < ? AND action_id = ?\",\n\t\ttimeRange.After,\n\t\ttimeRange.Before,\n\t\tc.Param(\"action_id\"),\n\t)\n\n\tif result.RowsAffected == 0 {\n\t\tc.JSON(http.StatusOK, helpers.NoResults())\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\"data\": helpers.Results{\n\t\tCount: len(events),\n\t\tResults: events,\n\t}})\n}", "title": "" }, { "docid": "64ca106df9674967459e4e0a225f51bb", "score": "0.4490319", "text": "func (f *Facade) Search(ctx context.Context, cls []*run.RunCL, definitions []*tryjob.Definition, luciProject string, cb func(*tryjob.Tryjob) bool) error {\n\tshouldStop, stop := makeStopFunction()\n\tworkers, err := f.makeSearchWorkers(ctx, cls, definitions, luciProject, shouldStop)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar wg sync.WaitGroup\n\twg.Add(len(workers))\n\tresultCh := make(chan searchResult)\n\tfor _, worker := range workers {\n\t\tworker := worker\n\t\tgo func() {\n\t\t\tdefer wg.Done()\n\t\t\tworker.search(ctx, resultCh)\n\t\t}()\n\t}\n\tgo func() {\n\t\twg.Wait()\n\t\tclose(resultCh)\n\t}()\n\n\tfor res := range resultCh {\n\t\tswitch {\n\t\tcase shouldStop(): // draining\n\t\t\tcontinue\n\t\tcase res.err != nil:\n\t\t\terr = res.err\n\t\t\tstop()\n\t\tcase !cb(res.tryjob):\n\t\t\tstop()\n\t\t}\n\t}\n\treturn err\n}", "title": "" }, { "docid": "e7f5fc09ea8054019eab3904054aea35", "score": "0.44837427", "text": "func (g githubClientService) FindVersionIssues(version string, sections []*changelog.Section) {\n\n\tmilestone := g.findMilstone(version)\n\n\tif milestone == nil {\n\t\treturn\n\t}\n\n\tissueList := github.IssueListByRepoOptions{\n\t\tMilestone: strconv.Itoa(milestone.GetNumber()),\n\t\tState: \"closed\",\n\t}\n\t// check reponse for all issues\n\tissues, _, err := g.client.Issues.ListByRepo(g.ctx, g.owner, g.repo, &issueList)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tfor _, issue := range issues {\n\t\tlabels := createSet(issue)\n\t\tfor _, section := range sections {\n\t\t\tif section.ContaintsLabels(labels) {\n\t\t\t\tsection.Items = append(section.Items, gitHubItem{Issue: issue})\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "43da10db2e007eaecff11e1a3c6a2a7a", "score": "0.44792563", "text": "func SearchUserTickets(user *model.User) {\n\tid := user.ID\n\n\tsortedTickets := ticketsByAssigneeID[id]\n\tidx := 1\n\tfor ticketSubject := range sortedTickets {\n\t\tfmt.Println(aurora.BrightCyan(\"Assignee Ticket \"+strconv.Itoa(idx)), \":\", aurora.BrightGreen(ticketSubject))\n\t\tidx++\n\t}\n\n\tsortedTickets = ticketsBySubmitterID[id]\n\tidx = 1\n\tfor ticketSubject := range sortedTickets {\n\t\tfmt.Println(aurora.BrightCyan(\"Submitter Ticket \"+strconv.Itoa(idx)), \":\", aurora.BrightGreen(ticketSubject))\n\t\tidx++\n\t}\n}", "title": "" }, { "docid": "a432ded359f1aa46f59a578d94859b0c", "score": "0.44372606", "text": "func FindEventsByActionID(c *gin.Context) {\n\tvar events []models.Event\n\tresult := models.DB.Find(&events, \"action_id = ?\", c.Param(\"action_id\"))\n\n\tif result.RowsAffected == 0 {\n\t\tc.JSON(http.StatusOK, helpers.NoResults())\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\"data\": helpers.Results{\n\t\tCount: len(events),\n\t\tResults: events,\n\t}})\n}", "title": "" }, { "docid": "1a6f38b7fb2d86e2a62a84e28efe34c9", "score": "0.44307196", "text": "func (s *IssueService) FindIssue(ctx context.Context, find *api.IssueFind) (*api.Issue, error) {\n\ttx, err := s.db.BeginTx(ctx, nil)\n\tif err != nil {\n\t\treturn nil, FormatError(err)\n\t}\n\tdefer tx.Rollback()\n\n\tlist, err := s.findIssueList(ctx, tx, find)\n\tif err != nil {\n\t\treturn nil, err\n\t} else if len(list) == 0 {\n\t\treturn nil, &common.Error{Code: common.NotFound, Err: fmt.Errorf(\"issue not found: %+v\", find)}\n\t} else if len(list) > 1 {\n\t\treturn nil, &common.Error{Code: common.Conflict, Err: fmt.Errorf(\"found %d issues with filter %+v, expect 1\", len(list), find)}\n\t}\n\n\tif err := s.cache.UpsertCache(api.IssueCache, list[0].ID, list[0]); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn list[0], nil\n}", "title": "" }, { "docid": "152d1ad304282861099ad9b80151e430", "score": "0.44294038", "text": "func (r *ExactMatchDockerSearcher) Search(precise bool, terms ...string) (app.ComponentMatches, []error) {\n\tmatches := app.ComponentMatches{}\n\tfor _, value := range terms {\n\t\tmatches = append(matches, &app.ComponentMatch{\n\t\t\tValue: value,\n\t\t\tName: value,\n\t\t\tArgument: fmt.Sprintf(\"--docker-image=%q\", value),\n\t\t\tDescription: fmt.Sprintf(\"Docker image %q\", value),\n\t\t\tScore: 0.0,\n\t\t})\n\t}\n\treturn matches, r.Errs\n}", "title": "" }, { "docid": "42c2f02ed346fe1d1277f5048af94aec", "score": "0.44250914", "text": "func ProblemstatusIn(vs ...string) predicate.ProblemStatus {\n\tv := make([]interface{}, len(vs))\n\tfor i := range v {\n\t\tv[i] = vs[i]\n\t}\n\treturn predicate.ProblemStatus(func(s *sql.Selector) {\n\t\t// if not arguments were provided, append the FALSE constants,\n\t\t// since we can't apply \"IN ()\". This will make this predicate falsy.\n\t\tif len(v) == 0 {\n\t\t\ts.Where(sql.False())\n\t\t\treturn\n\t\t}\n\t\ts.Where(sql.In(s.C(FieldProblemstatus), v...))\n\t})\n}", "title": "" }, { "docid": "6377ba7071e79244fcdcac3cdb19c3e8", "score": "0.44146696", "text": "func (js JiraService) Get(target sla.Target) (issues []jira.Issue) {\n\tgetCtx := log.WithFields(log.Fields{\n\t\t\"board\": target.Board,\n\t\t\"filter\": target.Filter,\n\t})\n\n\tif target.Board != 0 {\n\t\tboardIssues, err := js.Board(target.Board)\n\t\tif err != nil {\n\t\t\tgetCtx.WithError(err).Error(\"couldn't fetch board issues\")\n\t\t}\n\t\tissues = append(issues, boardIssues...)\n\t}\n\n\tif target.Filter != 0 {\n\t\tfilterIssues, err := js.Board(target.Filter)\n\t\tif err != nil {\n\t\t\tgetCtx.WithError(err).Error(\"couldn't fetch filter issues\")\n\t\t}\n\t\tissues = append(issues, filterIssues...)\n\t}\n\n\treturn issues\n}", "title": "" }, { "docid": "ba6172e832cfe14448187a5007daef31", "score": "0.44080812", "text": "func (s Searcher) Search(params index.SearchParameters) (index.Result, error) {\n\tsearchResult := index.Result{}\n\n\tindexAlias := bleve.NewIndexAlias()\n\tfor _, plat := range params.Platforms {\n\t\tif idx, ok := s.indexes[plat]; ok {\n\t\t\tindexAlias.Add(idx)\n\t\t} else {\n\t\t\tsearchResult.IgnoredPlatforms = append(searchResult.IgnoredPlatforms, plat)\n\t\t}\n\t}\n\n\tif len(searchResult.IgnoredPlatforms) == len(params.Platforms) {\n\t\treturn searchResult, fmt.Errorf(\"Could not execute search due to lack of indexes for any of the provided platforms\")\n\t}\n\n\tquery := bleve.NewDisjunctionQuery()\n\tquery.SetMin(1)\n\n\ttextQuery := bleve.NewMatchPhraseQuery(params.Text)\n\tquery.AddQuery(textQuery)\n\n\tprefixQuery := bleve.NewPrefixQuery(params.Text)\n\tquery.AddQuery(prefixQuery)\n\n\trequest := bleve.NewSearchRequest(query)\n\trequest.Size = maxNumberOfResults\n\n\tresult, err := indexAlias.Search(request)\n\tif err != nil {\n\t\treturn searchResult, err\n\t}\n\n\tfor _, hit := range result.Hits {\n\t\tgameHit := index.GameHit{\n\t\t\tID: hit.ID,\n\t\t}\n\t\tsearchResult.Hits = append(searchResult.Hits, gameHit)\n\t}\n\n\treturn searchResult, nil\n}", "title": "" }, { "docid": "a9e77d2a6d260941e6aecefde12427ed", "score": "0.44026574", "text": "func (n *Neo4jManager) SearchArticlesByID(id int64) ([]*db.WikiData, error,\n) {\n\tres := make([]*db.WikiData, 0, 1) // # 1 is logically expected.\n\tcql := `\n\t\tMATCH (v:WikiData)\n\t\tWHERE id(v) = $id\n\t\tRETURN id(v) as i, v.title as t \n\t`\n\terr := n.execute(executeParams{\n\t\tcypher: cql,\n\t\tbindings: map[string]interface{}{\"id\": id},\n\t\tcallback: func(r neo4j.Result) {\n\t\t\tv, ok := n.unpackWikiData(r, \"i\", \"t\")\n\t\t\tif ok {\n\t\t\t\tres = append(res, v)\n\t\t\t}\n\t\t},\n\t})\n\treturn res, err\n}", "title": "" }, { "docid": "7a02966958d24c8402c2b64dc9092cfc", "score": "0.43925044", "text": "func (j *DSJira) ProcessIssue(ctx *Ctx, allIssues *[]interface{}, allIssuesMtx *sync.Mutex, issue interface{}, customFields map[string]JiraField, from time.Time, to *time.Time, thrN int) (wch chan error, err error) {\n\tvar mtx *sync.RWMutex\n\tif thrN > 1 {\n\t\tmtx = &sync.RWMutex{}\n\t}\n\tissueID := j.ItemID(issue)\n\tvar headers map[string]string\n\tif j.Token != \"\" {\n\t\theaders = map[string]string{\"Content-Type\": \"application/json\", \"Authorization\": \"Basic \" + j.Token}\n\t} else {\n\t\theaders = map[string]string{\"Content-Type\": \"application/json\"}\n\t}\n\t// Encode search params in query for GET requests\n\tencodeInQuery := true\n\tcacheFor := time.Duration(3) * time.Hour\n\tprocessIssue := func(c chan error) (e error) {\n\t\tdefer func() {\n\t\t\tif c != nil {\n\t\t\t\tc <- e\n\t\t\t}\n\t\t}()\n\t\turlRoot := j.URL + JiraAPIRoot + JiraAPIIssue + \"/\" + issueID + JiraAPIComment\n\t\tstartAt := int64(0)\n\t\tmaxResults := int64(j.PageSize)\n\t\tepochMS := from.UnixNano() / 1e6\n\t\t// Seems like original Jira was using project filter there which is not needed IMHO.\n\t\tvar jql string\n\t\tif JiraFilterByProjectInComments {\n\t\t\tif to != nil {\n\t\t\t\tepochToMS := (*to).UnixNano() / 1e6\n\t\t\t\tif ctx.ProjectFilter && ctx.Project != \"\" {\n\t\t\t\t\tjql = fmt.Sprintf(`project = %s AND updated > %d AND updated < %d order by updated asc`, ctx.Project, epochMS, epochToMS)\n\t\t\t\t} else {\n\t\t\t\t\tjql = fmt.Sprintf(`updated > %d AND updated < %d order by updated asc`, epochMS, epochToMS)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif ctx.ProjectFilter && ctx.Project != \"\" {\n\t\t\t\t\tjql = fmt.Sprintf(`project = %s AND updated > %d order by updated asc`, ctx.Project, epochMS)\n\t\t\t\t} else {\n\t\t\t\t\tjql = fmt.Sprintf(`updated > %d order by updated asc`, epochMS)\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif to != nil {\n\t\t\t\tepochToMS := (*to).UnixNano() / 1e6\n\t\t\t\tjql = fmt.Sprintf(`updated > %d AND updated < %d order by updated asc`, epochMS, epochToMS)\n\t\t\t} else {\n\t\t\t\tjql = fmt.Sprintf(`updated > %d order by updated asc`, epochMS)\n\t\t\t}\n\t\t}\n\t\tmethod := Get\n\t\tfor {\n\t\t\tvar payloadBytes []byte\n\t\t\turl := urlRoot\n\t\t\tif encodeInQuery {\n\t\t\t\t// ?startAt=0&maxResults=100&jql=updated+%3E+0+order+by+updated+asc\n\t\t\t\turl += fmt.Sprintf(`?startAt=%d&maxResults=%d&jql=`, startAt, maxResults) + neturl.QueryEscape(jql)\n\t\t\t} else {\n\t\t\t\tpayloadBytes = []byte(fmt.Sprintf(`{\"startAt\":%d,\"maxResults\":%d,\"jql\":\"%s\"}`, startAt, maxResults, jql))\n\t\t\t}\n\t\t\tvar res interface{}\n\t\t\tres, _, _, _, e = Request(\n\t\t\t\tctx,\n\t\t\t\turl,\n\t\t\t\tmethod,\n\t\t\t\theaders,\n\t\t\t\tpayloadBytes,\n\t\t\t\t[]string{},\n\t\t\t\tmap[[2]int]struct{}{{200, 200}: {}}, // JSON statuses\n\t\t\t\tnil, // Error statuses\n\t\t\t\tmap[[2]int]struct{}{{200, 200}: {}}, // OK statuses: 200\n\t\t\t\tmap[[2]int]struct{}{{200, 200}: {}}, // Cache statuses: 200\n\t\t\t\ttrue, // retry\n\t\t\t\t&cacheFor, // cache duration\n\t\t\t\tfalse, // skip in dry-run mode\n\t\t\t)\n\t\t\tif e != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tcomments, ok := res.(map[string]interface{})[\"comments\"].([]interface{})\n\t\t\tif !ok {\n\t\t\t\te = fmt.Errorf(\"unable to unmarshal comments from %+v\", DumpKeys(res))\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif ctx.Debug > 1 {\n\t\t\t\tnComments := len(comments)\n\t\t\t\tif nComments > 0 {\n\t\t\t\t\tPrintf(\"processing %d comments\\n\", len(comments))\n\t\t\t\t}\n\t\t\t}\n\t\t\tif thrN > 1 {\n\t\t\t\tmtx.Lock()\n\t\t\t}\n\t\t\tissueComments, ok := issue.(map[string]interface{})[\"comments_data\"].([]interface{})\n\t\t\tif !ok {\n\t\t\t\tissue.(map[string]interface{})[\"comments_data\"] = []interface{}{}\n\t\t\t}\n\t\t\tissueComments, _ = issue.(map[string]interface{})[\"comments_data\"].([]interface{})\n\t\t\tif !ok {\n\t\t\t\tissueComments = comments\n\t\t\t} else {\n\t\t\t\tissueComments = append(issueComments, comments...)\n\t\t\t}\n\t\t\tissue.(map[string]interface{})[\"comments_data\"] = issueComments\n\t\t\tif thrN > 1 {\n\t\t\t\tmtx.Unlock()\n\t\t\t}\n\t\t\ttotalF, ok := res.(map[string]interface{})[\"total\"].(float64)\n\t\t\tif !ok {\n\t\t\t\te = fmt.Errorf(\"unable to unmarshal total from %+v\", DumpKeys(res))\n\t\t\t\treturn\n\t\t\t}\n\t\t\tmaxResultsF, ok := res.(map[string]interface{})[\"maxResults\"].(float64)\n\t\t\tif !ok {\n\t\t\t\te = fmt.Errorf(\"unable to maxResults total from %+v\", DumpKeys(res))\n\t\t\t\treturn\n\t\t\t}\n\t\t\ttotal := int64(totalF)\n\t\t\tmaxResults = int64(maxResultsF)\n\t\t\tinc := int64(totalF)\n\t\t\tif maxResultsF < totalF {\n\t\t\t\tinc = int64(maxResultsF)\n\t\t\t}\n\t\t\tstartAt += inc\n\t\t\tif startAt >= total {\n\t\t\t\tstartAt = total\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif ctx.Debug > 0 {\n\t\t\t\tPrintf(\"processing next comments page from %d/%d\\n\", startAt, total)\n\t\t\t}\n\t\t}\n\t\tif ctx.Debug > 1 {\n\t\t\tPrintf(\"processed %d comments\\n\", startAt)\n\t\t}\n\t\treturn\n\t}\n\tvar ch chan error\n\tif thrN > 1 {\n\t\tch = make(chan error)\n\t\tgo func() {\n\t\t\t_ = processIssue(ch)\n\t\t}()\n\t} else {\n\t\terr = processIssue(nil)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\tif thrN > 1 {\n\t\tmtx.RLock()\n\t}\n\tissueFields, ok := issue.(map[string]interface{})[\"fields\"].(map[string]interface{})\n\tif thrN > 1 {\n\t\tmtx.RUnlock()\n\t}\n\tif !ok {\n\t\terr = fmt.Errorf(\"unable to unmarshal fields from issue %+v\", DumpKeys(issue))\n\t\treturn\n\t}\n\tif ctx.Debug > 1 {\n\t\tPrintf(\"before map custom: %+v\\n\", DumpPreview(issueFields, 100))\n\t}\n\ttype mapping struct {\n\t\tID string\n\t\tName string\n\t\tValue interface{}\n\t}\n\tif JiraMapCustomFields {\n\t\tm := make(map[string]mapping)\n\t\tfor k, v := range issueFields {\n\t\t\tcustomField, ok := customFields[k]\n\t\t\tif !ok {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tm[k] = mapping{ID: customField.ID, Name: customField.Name, Value: v}\n\t\t}\n\t\tfor k, v := range m {\n\t\t\tif ctx.Debug > 1 {\n\t\t\t\tprev := issueFields[k]\n\t\t\t\tPrintf(\"mapping custom fields %s: %+v -> %+v\\n\", k, prev, v)\n\t\t\t}\n\t\t\tissueFields[k] = v\n\t\t}\n\t}\n\tif ctx.Debug > 1 {\n\t\tPrintf(\"after map custom: %+v\\n\", DumpPreview(issueFields, 100))\n\t}\n\t// Extra fields\n\tif thrN > 1 {\n\t\tmtx.Lock()\n\t}\n\tesItem := j.AddMetadata(ctx, issue)\n\t// Seems like it doesn't make sense, because we just added those custom fields\n\tif JiraDropCustomFields {\n\t\tfor k, v := range issueFields {\n\t\t\tif strings.HasPrefix(strings.ToLower(k), \"customfield_\") {\n\t\t\t\tmp, _ := v.(mapping)\n\t\t\t\t_, keep := JiraKeepCustomFiled[mp.Name]\n\t\t\t\tif !keep {\n\t\t\t\t\tdelete(issueFields, k)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif ctx.Debug > 1 {\n\t\tPrintf(\"after drop: %+v\\n\", DumpPreview(issueFields, 100))\n\t}\n\tif ctx.Project != \"\" {\n\t\tissue.(map[string]interface{})[\"project\"] = ctx.Project\n\t}\n\tesItem[\"data\"] = issue\n\tif thrN > 1 {\n\t\tmtx.Unlock()\n\t\terr = <-ch\n\t}\n\tif allIssuesMtx != nil {\n\t\tallIssuesMtx.Lock()\n\t}\n\t*allIssues = append(*allIssues, esItem)\n\tnIssues := len(*allIssues)\n\tif nIssues >= ctx.ESBulkSize {\n\t\tsendToElastic := func(c chan error) (e error) {\n\t\t\tdefer func() {\n\t\t\t\tif c != nil {\n\t\t\t\t\tc <- e\n\t\t\t\t}\n\t\t\t}()\n\t\t\te = SendToElastic(ctx, j, true, UUID, *allIssues)\n\t\t\tif e != nil {\n\t\t\t\tPrintf(\"error %v sending %d issues to ElasticSearch\\n\", e, len(*allIssues))\n\t\t\t}\n\t\t\t*allIssues = []interface{}{}\n\t\t\tif allIssuesMtx != nil {\n\t\t\t\tallIssuesMtx.Unlock()\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tif thrN > 1 {\n\t\t\twch = make(chan error)\n\t\t\tgo func() {\n\t\t\t\t_ = sendToElastic(wch)\n\t\t\t}()\n\t\t} else {\n\t\t\terr = sendToElastic(nil)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t} else {\n\t\tif allIssuesMtx != nil {\n\t\t\tallIssuesMtx.Unlock()\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "240203586a022f45fc042fb2cd152d11", "score": "0.43899125", "text": "func (c *Client) FindIrQwebFieldIds(criteria *Criteria, options *Options) ([]int64, error) {\n\tids, err := c.Search(IrQwebFieldModel, criteria, options)\n\tif err != nil {\n\t\treturn []int64{}, err\n\t}\n\treturn ids, nil\n}", "title": "" }, { "docid": "46ae84699a2004de57c2ab9cf4cb5cf0", "score": "0.438588", "text": "func FilterIssuesByLabel(is Issues, withoutLabels []string) Issues {\n\tvar ret Issues\n\tfor _, issue := range is {\n\t\tfound := false\n\t\tfor _, label := range issue.Labels {\n\t\t\tif sliceContains(withoutLabels, label) {\n\t\t\t\tfound = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif !found {\n\t\t\tret = append(ret, issue)\n\t\t}\n\t}\n\treturn ret\n}", "title": "" }, { "docid": "0b9353e94fdcb0df5ce0c45635f50098", "score": "0.43847", "text": "func findIssueMetric(repo string, metric *fixItMetric) {\n\tissueQueries := createIssueQuery(repo, \"issue\")\n\tlog.Printf(\"Issue Query: %v\", issueQueries)\n\n\tallIssues, err := gh.SearchIssues(issueQueries, *sort, *order)\n\tif err != nil {\n\t\tlog.Printf(\"Failed to fetch Issues for %s: %s\", repo, err)\n\t\treturn\n\t}\n\tmetric.totalIssues += len(allIssues)\n\tfor _, issue := range allIssues {\n\t\tevents, err := gh.GetIssueEvents(repo, issue.GetNumber())\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Failed to fetch events for issue %s: %s\", issue.GetURL(), err)\n\t\t\treturn\n\t\t}\n\n\t\t// Find the person who labeled the issue.\n\t\tfor _, event := range events {\n\t\t\tif event.GetLabel() != nil {\n\t\t\t\tif event.GetLabel().GetName() == *label {\n\t\t\t\t\tmetric.issueLabeledMap[event.GetActor().GetLogin()]++\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif issue.GetState() == closedState {\n\t\t\tmetric.totalClosedIssues++\n\n\t\t\t// Find the person who closed the issue, by walking down the events in reverse order.\n\t\t\tfor i := len(events) - 1; i >= 0; i-- {\n\t\t\t\tevent := events[i]\n\t\t\t\tif event.GetEvent() == closedState && isFixItWeek(event.GetCreatedAt()) {\n\t\t\t\t\tlogin := event.GetActor().GetLogin()\n\t\t\t\t\tif login != \"istio-merge-robot\" {\n\t\t\t\t\t\t// Not counting the bot\n\t\t\t\t\t\tmetric.issueClosedMap[login]++\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "7c62a15e7a9bb269c1830a44d0ce1caa", "score": "0.43728614", "text": "func (d *Dao) Search(c context.Context, mid, zoneid int64, mobiApp, device, platform, buvid, keyword, duration, order, filtered, fromSource, recommend string, plat int8, seasonNum, movieNum, upUserNum, uvLimit, userNum, userVideoLimit, biliUserNum, biliUserVideoLimit, rid, highlight, build, pn, ps int, now time.Time) (res *search.Search, code int, err error) {\n\tvar (\n\t\treq *http.Request\n\t\tip = metadata.String(c, metadata.RemoteIP)\n\t)\n\tres = &search.Search{}\n\tparams := url.Values{}\n\tparams.Set(\"build\", strconv.Itoa(build))\n\tparams.Set(\"keyword\", keyword)\n\tparams.Set(\"main_ver\", \"v3\")\n\tparams.Set(\"highlight\", strconv.Itoa(highlight))\n\tparams.Set(\"mobi_app\", mobiApp)\n\tparams.Set(\"device\", device)\n\tparams.Set(\"userid\", strconv.FormatInt(mid, 10))\n\tparams.Set(\"tids\", strconv.Itoa(rid))\n\tparams.Set(\"page\", strconv.Itoa(pn))\n\tparams.Set(\"pagesize\", strconv.Itoa(ps))\n\tparams.Set(\"media_bangumi_num\", strconv.Itoa(seasonNum))\n\tparams.Set(\"bili_user_num\", strconv.Itoa(biliUserNum))\n\tparams.Set(\"bili_user_vl\", strconv.Itoa(biliUserVideoLimit))\n\tparams.Set(\"user_num\", strconv.Itoa(userNum))\n\tparams.Set(\"user_video_limit\", strconv.Itoa(userVideoLimit))\n\tparams.Set(\"query_rec_need\", recommend)\n\tparams.Set(\"platform\", platform)\n\tparams.Set(\"duration\", duration)\n\tparams.Set(\"order\", order)\n\tparams.Set(\"search_type\", \"all\")\n\tparams.Set(\"from_source\", fromSource)\n\tif filtered == \"1\" {\n\t\tparams.Set(\"filtered\", filtered)\n\t}\n\tparams.Set(\"zone_id\", strconv.FormatInt(zoneid, 10))\n\tparams.Set(\"media_ft_num\", strconv.Itoa(movieNum))\n\tparams.Set(\"is_new_pgc\", \"1\")\n\tparams.Set(\"is_internation\", \"1\")\n\tparams.Set(\"no_display_default\", \"game,live_room\")\n\tparams.Set(\"flow_need\", \"1\")\n\tparams.Set(\"app_highlight\", \"media_bangumi,media_ft\")\n\t// new request\n\tif req, err = d.client.NewRequest(\"GET\", d.main, ip, params); err != nil {\n\t\treturn\n\t}\n\treq.Header.Set(\"Buvid\", buvid)\n\tif err = d.client.Do(c, req, res); err != nil {\n\t\treturn\n\t}\n\tb, _ := json.Marshal(res)\n\tlog.Error(\"wocao----%s---%s---%s\", d.main+\"?\"+params.Encode(), buvid, b)\n\tif res.Code != ecode.OK.Code() {\n\t\terr = errors.Wrap(ecode.Int(res.Code), d.main+\"?\"+params.Encode())\n\t}\n\tfor _, flow := range res.FlowResult {\n\t\tflow.Change()\n\t}\n\tcode = res.Code\n\treturn\n}", "title": "" }, { "docid": "3cb6ea7b001f0971f08dafa221b67a9c", "score": "0.43717062", "text": "func GetAllIssues(c *gin.Context) {\n\n db := c.MustGet(\"db\").(*gorm.DB)\n\n var active_issues []models.Issue\n err := models.GetAllIssues(db, &active_issues)\n\n if err != nil {\n if gorm.IsRecordNotFoundError(err) {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": \"Record not found!\"})\n } else {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err})\n }\n } else {\n c.JSON(http.StatusOK, active_issues)\n }\n\n}", "title": "" }, { "docid": "533a9be196c8ef129bc202b5ab3bffb2", "score": "0.43631935", "text": "func (store *MockReportStore) Search(criteria *models.ReportFilterCriteria) ([]models.HVSReport, error) {\n\tif criteria == nil || reflect.DeepEqual(*criteria, models.ReportFilterCriteria{}) {\n\n\t\treturn nil, nil\n\t}\n\thostStore := NewMockHostStore()\n\thostStatusStore := NewMockHostStatusStore()\n\tvar reports []models.HVSReport\n\tvar hosts []*hvs.Host\n\tvar hostStatuses []hvs.HostStatus\n\tif criteria.ID != uuid.Nil {\n\t\tr, _ := store.Retrieve(criteria.ID)\n\t\tif r != nil {\n\t\t\treports = append(reports, *r)\n\t\t}\n\t} else if criteria.HostHardwareID != uuid.Nil || criteria.HostName != \"\" {\n\t\tfor _, t := range hostStore.hostStore {\n\t\t\tif criteria.HostHardwareID == *t.HardwareUuid || criteria.HostName == t.HostName {\n\t\t\t\thosts = append(hosts, t)\n\t\t\t}\n\t\t}\n\t\tfor _, h := range hosts {\n\t\t\tfor _, r := range store.reportStore {\n\t\t\t\tif h.Id == r.HostID {\n\t\t\t\t\treports = append(reports, r)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else if criteria.HostID != uuid.Nil {\n\t\tfor _, r := range store.reportStore {\n\t\t\tif criteria.HostID == r.HostID {\n\t\t\t\treports = append(reports, r)\n\t\t\t}\n\t\t}\n\t} else if criteria.HostStatus != \"\" {\n\t\thostStatuses, _ = hostStatusStore.Search(&models.HostStatusFilterCriteria{\n\t\t\tHostStatus: criteria.HostStatus,\n\t\t\tLatestPerHost: true,\n\t\t})\n\n\t\tfor _, h := range hostStatuses {\n\t\t\tfor _, r := range store.reportStore {\n\t\t\t\tif h.HostID == r.HostID {\n\t\t\t\t\treports = append(reports, r)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else if !criteria.ToDate.IsZero() {\n\t\tfor _, r := range store.reportStore {\n\t\t\tif r.Expiration.Before(criteria.ToDate) {\n\t\t\t\treports = append(reports, r)\n\t\t\t}\n\t\t}\n\t} else { // criteria is all empty return all records\n\t\tfor _, r := range store.reportStore {\n\t\t\treports = append(reports, r)\n\t\t}\n\n\t}\n\n\treturn reports, nil\n}", "title": "" }, { "docid": "4accc499725acc3725dd5077940c98fe", "score": "0.4361948", "text": "func (l *Locations) FindByIDs(ids []int) error {\n\tids = domain.UniquifyIntSlice(ids)\n\treturn DB.Where(\"id in (?)\", ids).All(l)\n}", "title": "" }, { "docid": "fce2896557f48d81fd8f26293050d436", "score": "0.4361501", "text": "func SearchQueryBuilder(year string, name string, degree string, college string) string {\n\t// default query for search...\n\tvar query =\"SELECT c.id,c.name, c.email, c.degree, c.college, c.yearOfCompletion, c.modified, max(c1.attempts)\"\n\t\tquery += \" FROM candidates c\"\n\t\tquery += \" JOIN sessions s ON c.id = s.candidateid\"\n\t\tquery += \" JOIN challenge_answers c1 ON s.id = c1.sessionid\"\n\t\tquery += \" where s.status=0 \"\n\n\tvar stmt1 string\n\t// ======================= making query for search =================================\n\n\tif(year ==\"All\"){//will search for all the year passing out candidates..\n\t\tif(name ==\"\"){\n\t\t\tif(degree == \"\" && college == \"\"){//search for all the field..\n\t\t\t\tstmt1 = fmt.Sprintf(query+\" group by c.id order by c.id asc \")\n\t\t\t\t} else if(degree == \"\"){//will search for college only..\n\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND (c.college ILIKE '%%%s%%') group by c.id order by c.id asc \",college)\n\t\t\t\t\t}else if(college == \"\"){//will search for degree only..\n\t\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND (c.degree ILIKE '%%%s%%') group by c.id order by c.id asc \",degree)\n\t\t\t\t\t\t}else{\n\t\t\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND ((c.degree ILIKE '%%%s%%') AND (c.college ILIKE '%%%s%%') ) group by c.id order by c.id asc \",degree,college)\n\t\t\t\t\t\t}\n\n\t\t} else if(degree == \"\"){\n\t\t\t if(degree == \"\" && college == \"\"){//will search for name only..\n\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND ((c.name ILIKE '%%%s%%') OR (c.email LIKE '%%%s%%')) group by c.id order by c.id asc \",name,name)\n\t\t\t\t\t} else if(degree == \"\"){// will search for both name and college fields...\n\t\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND (((c.name ILIKE '%%%s%%') OR (c.email ILIKE '%%%s%%')) AND (c.college ILIKE '%%%s%%')) group by c.id order by c.id asc \",name,name,college)\n\t\t\t\t\t\t\t}\n\n\t\t} else if(college == \"\"){//will search for name and degree both field....\n\t\t\tstmt1 = fmt.Sprintf(query+\" AND (((c.name ILIKE '%%%s%%') OR (c.email ILIKE '%%%s%%')) AND (c.degree ILIKE '%%%s%%')) group by c.id order by c.id asc \",name,name,degree)\n\t\t\t} else {//will search for all the fields..\n\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND (((c.name ILIKE '%%%s%%') OR (c.email ILIKE '%%%s%%')) AND (c.college ILIKE '%%%s%%') AND (c.degree ILIKE '%%%s%%')) group by c.id order by c.id asc \",name,name,college,degree)\n\t\t\t\t}\n\n\t} else {//will search for specific year passing out candidates..\n\t\tif(name ==\"\"){\n\t\t\tif(degree == \"\" && college == \"\"){//search for all the field with specific year..\n\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND (c.yearOfCompletion::text LIKE '%%%s%%')group by c.id order by c.id asc \",year)\n\t\t\t\t} else if(degree == \"\"){//will search for college only with specific year..\n\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND ((c.college ILIKE '%%%s%%') AND (c.yearOfCompletion::text LIKE '%%%s%%')) group by c.id order by c.id asc \",college,year)\n\t\t\t\t\t}else if(college == \"\"){//will search for degree only with specific year..\n\t\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND ((c.degree ILIKE '%%%s%%') AND (c.yearOfCompletion::text LIKE '%%%s%%')) group by c.id order by c.id asc \",degree,year)\n\t\t\t\t\t\t}else{//will search for all the fields excepting name/email..\n\t\t\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND ((c.degree ILIKE '%%%s%%') AND (c.college ILIKE '%%%s%%') AND (c.yearOfCompletion::text LIKE '%%%s%%')) group by c.id order by c.id asc \",degree,college,year)\n\t\t\t\t\t\t}\n\n\t\t} else if(degree == \"\"){\n\t\t\tif(degree == \"\" && college == \"\"){//will search for name only with specific year..\n\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND (((c.name ILIKE '%%%s%%') OR (c.email LIKE '%%%s%%')) AND (c.yearOfCompletion::text LIKE '%%%s%%')) group by c.id order by c.id asc \",name,name,year)\n\t\t\t\t} else if(degree == \"\"){// will search for both name and college fields with specific year...\n\t\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND (((c.name ILIKE '%%%s%%') OR (c.email ILIKE '%%%s%%')) AND (c.college ILIKE '%%%s%%') AND (c.yearOfCompletion::text LIKE '%%%s%%')) group by c.id order by c.id asc \",name,name,college,year)\n\t\t\t\t\t\t}\n\n\t\t} else if(college == \"\"){//will search for name and degree both field with specific year....\n\t\t\tstmt1 = fmt.Sprintf(query+\" AND (((c.name ILIKE '%%%s%%') OR (c.email ILIKE '%%%s%%')) AND (c.degree ILIKE '%%%s%%') AND (c.yearOfCompletion::text LIKE '%%%s%%')) group by c.id order by c.id asc \",name,name,degree,year)\n\t\t\t} else {//will search for all the fields with specific year..\n\t\t\t\tstmt1 = fmt.Sprintf(query+\" AND (((c.name ILIKE '%%%s%%') OR (c.email ILIKE '%%%s%%')) AND (c.college ILIKE '%%%s%%') AND (c.degree ILIKE '%%%s%%') AND (c.yearOfCompletion::text LIKE '%%%s%%')) group by c.id order by c.id asc \",name,name,college,degree,year)\n\t\t\t\t}\n\t}\n\t//==============================================================================================================================================\n\treturn stmt1\n}", "title": "" }, { "docid": "31eeab8878320c5dd30c6e60fbe529e8", "score": "0.43441588", "text": "func (v *VideoModel) getConditions(include, exclude []string) []exp.Expression {\n\tqueryCols := []string{\"title\", \"tag\", \"username\"}\n\n\tf := func(terms []string, include bool) []exp.Expression {\n\t\tvar incQuery []exp.Expression\n\n\t\tfor _, term := range terms {\n\t\t\tvar currConds []exp.Expression\n\t\t\tfor _, col := range queryCols {\n\t\t\t\tpatt := term // Prefix matching only, but we could add a reversed b-tree index for fully fuzzy matching\n\t\t\t\t// Oh no\n\t\t\t\tif col == \"tag\" {\n\t\t\t\t\tt := goqu.Dialect(\"postgres\").\n\t\t\t\t\t\tSelect(\"videos.id\").\n\t\t\t\t\t\tFrom(\n\t\t\t\t\t\t\tgoqu.T(\"videos\"),\n\t\t\t\t\t\t).Join(\n\t\t\t\t\t\tgoqu.T(\"tags\"),\n\t\t\t\t\t\tgoqu.On(goqu.Ex{\"videos.id\": goqu.I(\"tags.video_id\")})).\n\t\t\t\t\t\tWhere(goqu.I(\"tag\").Eq(patt))\n\t\t\t\t\tvar exp exp.Expression\n\t\t\t\t\tif include {\n\t\t\t\t\t\texp = goqu.I(\"videos.id\").In(t)\n\t\t\t\t\t} else {\n\t\t\t\t\t\texp = goqu.I(\"videos.id\").NotIn(t)\n\t\t\t\t\t}\n\t\t\t\t\tcurrConds = append(currConds, exp)\n\t\t\t\t\t// Oh no no no\n\t\t\t\t} else if col == \"username\" {\n\t\t\t\t\tresp, err := v.grpcClient.GetUserIDsForUsername(context.Background(), &proto.GetUserIDsForUsernameRequest{\n\t\t\t\t\t\tUsername: term + \"%\"})\n\t\t\t\t\tif err != nil || len(resp.UserIDs) == 0 {\n\t\t\t\t\t\tlog.Errorf(\"could not retrieve user ids for username %s\", term)\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\n\t\t\t\t\tin := make([]interface{}, 0, len(resp.UserIDs))\n\n\t\t\t\t\tfor _, id := range resp.UserIDs {\n\t\t\t\t\t\tin = append(in, id)\n\t\t\t\t\t}\n\t\t\t\t\tif include {\n\t\t\t\t\t\texp := goqu.I(\"userid\").In(in)\n\t\t\t\t\t\tcurrConds = append(currConds, exp)\n\t\t\t\t\t} else {\n\t\t\t\t\t\texp := goqu.I(\"userID\").NotIn(in)\n\t\t\t\t\t\tcurrConds = append(currConds, exp)\n\t\t\t\t\t}\n\n\t\t\t\t} else {\n\t\t\t\t\tif include {\n\t\t\t\t\t\texp := goqu.I(col).Eq(patt)\n\t\t\t\t\t\tcurrConds = append(currConds, exp)\n\t\t\t\t\t} else {\n\t\t\t\t\t\texp := goqu.I(col).Neq(patt)\n\t\t\t\t\t\tcurrConds = append(currConds, exp)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif include {\n\t\t\t\tincQuery = append(incQuery, goqu.Or(currConds...))\n\t\t\t} else {\n\t\t\t\tincQuery = append(incQuery, goqu.And(currConds...))\n\n\t\t\t}\n\t\t}\n\t\treturn incQuery\n\t}\n\n\t// TODO: DRY\n\tincConds := f(include, true)\n\texcConds := f(exclude, false)\n\tallConds := append(incConds, excConds...)\n\n\treturn []exp.Expression{goqu.And(allConds...)}\n}", "title": "" }, { "docid": "91d2301aeed791c5e913bfe77f7204b0", "score": "0.43400013", "text": "func (s *Index) SearchEntities(r SearchRequest) ([]string, error) {\n\tif r.Expression == \"\" {\n\t\treturn nil, ErrBadSearch\n\t}\n\n\treq := createSearchRequest(r)\n\n\t// This can only fail if the query is malformed, since the\n\t// worst that can happen is the query is empty, this can't\n\t// return an error.\n\tresult, _ := s.eIndex.Search(req)\n\tslice := extractDocIDs(result)\n\treturn slice, nil\n}", "title": "" }, { "docid": "8cb507abc4649d0ff4da6b0acca472c2", "score": "0.43354678", "text": "func (g *Gitlab) SearchProjectId(namespace string, name string) (id int, err error) {\n\n\turl, opaque := g.ResourceUrlRaw(projects_search_url, map[string]string{\n\t\t\":query\": strings.ToLower(name),\n\t})\n\n\tvar projects []*Project\n\n\tcontents, err := g.buildAndExecRequestRaw(\"GET\", url, opaque, nil)\n\tif err == nil {\n\t\terr = json.Unmarshal(contents, &projects)\n\t} else {\n\t\treturn id, err\n\t}\n\n\tfor _, project := range projects {\n\t\tif project.Namespace.Name == namespace && strings.ToLower(project.Name) == strings.ToLower(name) {\n\t\t\tid = project.Id\n\t\t}\n\t}\n\n\treturn id, err\n}", "title": "" }, { "docid": "50b2bf679f0c11888f485bded85fedcf", "score": "0.4327857", "text": "func makeSearchCondition(term string) *sqlf.Query {\n\tsearchableColumns := []string{\n\t\t\"u.commit\",\n\t\t\"u.root\",\n\t\t\"(u.state)::text\",\n\t\t\"u.failure_message\",\n\t\t\"repo.name\",\n\t\t\"u.indexer\",\n\t\t\"u.indexer_version\",\n\t}\n\n\tvar termConds []*sqlf.Query\n\tfor _, column := range searchableColumns {\n\t\ttermConds = append(termConds, sqlf.Sprintf(column+\" ILIKE %s\", \"%\"+term+\"%\"))\n\t}\n\n\treturn sqlf.Sprintf(\"(%s)\", sqlf.Join(termConds, \" OR \"))\n}", "title": "" }, { "docid": "4e87f5e053c28bc740e3656571f522d1", "score": "0.43270025", "text": "func (c *Client) Search(ctx context.Context, pred *buildbucketpb.BuildPredicate) ([]*Build, error) {\n\trv := []*Build{}\n\tcursor := \"\"\n\tfor {\n\t\treq := &buildbucketpb.SearchBuildsRequest{\n\t\t\tFields: common.SearchBuildsFields,\n\t\t\tPageToken: cursor,\n\t\t\tPredicate: pred,\n\t\t}\n\t\tresp, err := c.bc.SearchBuilds(ctx, req)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif resp == nil {\n\t\t\tbreak\n\t\t}\n\t\tfor _, b := range resp.Builds {\n\t\t\trv = append(rv, c.convertBuild(b))\n\t\t}\n\t\tcursor = resp.NextPageToken\n\t\tif cursor == \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn rv, nil\n}", "title": "" }, { "docid": "8744a20abac86c28b9efd7a25855f765", "score": "0.43246815", "text": "func Search(query string, searchType string, currentSearchResults map[string]string) (schema.Payload, error) {\n\t//1. Gets results of a query\n\t//keep it in a Go map that acts as a set\n\tstartTime := time.Now()\n\tresults := make(map[string]bool)\n\t//2. Apply same analysis as when ingesting data i.e. tokenizing and stemming\n\tqueries := Analyze(query)\n\tif len(queries) == 0 {\n\t\treturn schema.Payload{}, errors.New(\"No valid queries!\")\n\t}\n\t//Support for AND / OR (TODO: eventually add NOT)\n\tif searchType == \"AND\" {\n\t\t//3. Get list of relevant records from the invertedIndex\n\t\t//temp set holding records we've matched so far for convenience\n\t\t//avoid quadratic complexity by sequentially removing records which don't accumulate matches as we move\n\t\t//through the queries\n\t\ttempRecords := make(map[string]bool)\n\t\t//get records for first query\n\t\trecordsFirstQueryMatch := globalInvertedIndex[queries[0]]\n\t\tfor _, recordID := range recordsFirstQueryMatch {\n\t\t\ttempRecords[recordID] = true\n\t\t}\n\t\tfor recordID, _ := range tempRecords {\n\t\t\trecord := getRecordFromID(recordID)\n\t\t\tfor i := 1; i < len(queries); i++ {\n\t\t\t\t_, tokenInRecord := record.TokenFrequency[queries[i]]\n\t\t\t\tif !tokenInRecord {\n\t\t\t\t\t//token from our intersection does not exist in this record, so remove it, don't need to keep checking\n\t\t\t\t\tdelete(tempRecords, recordID)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t//now have all of the records which match all of the queries\n\t\tfor recordID, _ := range tempRecords {\n\t\t\tresults[recordID] = true\n\t\t}\n\t} else if searchType == \"OR\" {\n\t\t//3. Get list of relevant records from the invertedIndex\n\t\tfor _, query := range queries {\n\t\t\trecordsWithQuery := globalInvertedIndex[query]\n\t\t\tfor _, recordID := range recordsWithQuery {\n\t\t\t\t_, inMap := results[recordID]\n\t\t\t\tif !inMap {\n\t\t\t\t\tresults[recordID] = true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t//4. Sory by relevance - assign a score to each record that matches how relevant it is\n\t//Use the inverse document frequency\n\trecords := rank(results, queries, currentSearchResults)\n\t//convert searched time to miliseconds\n\ttime := int64(time.Now().Sub(startTime))\n\treturn schema.Payload{Time: time, Data: records, Query: queries, Length: len(records)}, nil\n\n}", "title": "" }, { "docid": "00bb7e9ea6bb176bed7b3f80337ffef4", "score": "0.4322131", "text": "func IndexIssuesHandler(params issueop.IndexIssuesParams) middleware.Responder {\n\tvar issues models.Issues\n\tvar response middleware.Responder\n\tif err := dbConnection.All(&issues); err != nil {\n\t\tzap.L().Error(\"DB Query\", zap.Error(err))\n\t\tresponse = issueop.NewIndexIssuesBadRequest()\n\t} else {\n\t\tissuePayloads := make(messages.IndexIssuesPayload, len(issues))\n\t\tfor i, issue := range issues {\n\t\t\tissuePayload := payloadForIssueModel(issue)\n\t\t\tissuePayloads[i] = &issuePayload\n\t\t}\n\t\tresponse = issueop.NewIndexIssuesOK().WithPayload(issuePayloads)\n\t}\n\treturn response\n}", "title": "" }, { "docid": "5fc9599c257f1527b667879c2aec9b75", "score": "0.43153155", "text": "func queryNotificationsByOwnerAndStatusAndDID(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tif len(args) != 3 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 3\")\n\t}\n\tif len(args[0]) <= 0 {\n\t\treturn shim.Error(\"The owner argument must be a non-empty string\")\n\t}\n\tif len(args[1]) <= 0 {\n\t\treturn shim.Error(\"The status argument must be a non-empty string\")\n\t}\n\n\towner, status, did := args[0], args[1], args[2]\n\tvar keys []string\n\tif did == \"\" {\n\t\tkeys = []string{owner, status}\n\t} else {\n\t\tkeys = []string{owner, status, did}\n\t}\n\n\treturn queryDataListByIndexAndKeys(stub, ownerStatusDIDIDIndex, keys, 3)\n}", "title": "" }, { "docid": "a974e84020af81e1d81d462bf437057c", "score": "0.4299979", "text": "func (a *ExecutionsControllerApiService) SearchForPipelineExecutionsByTrigger(ctx _context.Context, application string) apiSearchForPipelineExecutionsByTriggerRequest {\n\treturn apiSearchForPipelineExecutionsByTriggerRequest{\n\t\tapiService: a,\n\t\tctx: ctx,\n\t\tapplication: application,\n\t}\n}", "title": "" }, { "docid": "f4e167d64d5333b976d0183c4297befd", "score": "0.4294088", "text": "func (tracker *issueTracker) issuesByRelease(v *version.Version) ([]*github.Issue, error) {\n\treturn tracker.searchIssues(`milestone:\"%v\"`, v.BaseString())\n}", "title": "" }, { "docid": "183e36b91533e32ec6c1ae770cb33212", "score": "0.4285335", "text": "func searchRunsByProject(ctx context.Context, req *adminpb.SearchRunsRequest, pt *run.PageToken, limit int32) ([]*datastore.Key, error) {\n\tqb := run.ProjectQueryBuilder{\n\t\tProject: req.GetProject(),\n\t\tLimit: limit,\n\t\tStatus: req.GetStatus(), // optional\n\t}.PageToken(pt)\n\treturn qb.GetAllRunKeys(ctx)\n}", "title": "" }, { "docid": "a2a2dde7e9742cab26a220e238576402", "score": "0.4277114", "text": "func (p *searchEnvironments) search() (z []map[string]interface{}, err error) {\n\tdb, err := sql.Open(\n\t\t\"postgres\",\n\t\tcommons.BuildDSN(),\n\t)\n\tif err != nil {\n\t\tlog.Error().Err(err).Msg(\"Failed to connect to DB\")\n\t\treturn z, err\n\t}\n\tdefer db.Close()\n\n\tstmt, err := db.Prepare(\"SELECT e.*, (SELECT count(environment_id) FROM environments WHERE key LIKE '%' || $1 || '%' OR value LIKE '%' || $1 || '%') total, p.project_name FROM environments e LEFT JOIN projects p ON e.project_id = p.project_id WHERE e.key LIKE '%' || $1 || '%' OR e.value LIKE '%' || $1 || '%' ORDER BY e.date DESC OFFSET $2 LIMIT $3\")\n\tif err != nil && err != sql.ErrNoRows {\n\t\treturn z, err\n\t}\n\tdefer stmt.Close()\n\n\trows, err := stmt.Query(\n\t\tp.Q,\n\t\tp.StartLimit,\n\t\tp.EndLimit,\n\t)\n\n\tif err != nil && err != sql.ErrNoRows {\n\t\treturn z, err\n\t}\n\n\tcolumns, err := rows.Columns()\n\tif err != nil {\n\t\treturn z, err\n\t}\n\n\tvalues := make([]sql.RawBytes, len(columns))\n\tscanArgs := make([]interface{}, len(values))\n\tfor i := range values {\n\t\tscanArgs[i] = &values[i]\n\t}\n\n\tm := make([]map[string]interface{}, 0)\n\tfor rows.Next() {\n\t\terr = rows.Scan(scanArgs...)\n\t\tif err != nil {\n\t\t\treturn z, err\n\t\t}\n\t\tvar value string\n\t\tsub := make(map[string]interface{})\n\t\tfor i, col := range values {\n\t\t\tif col == nil {\n\t\t\t\tvalue = \"\"\n\t\t\t} else {\n\t\t\t\tvalue = php2go.Stripslashes(string(col))\n\t\t\t}\n\t\t\tsub[columns[i]] = value\n\t\t}\n\t\tm = append(m, sub)\n\t}\n\tif err = rows.Err(); err != nil {\n\t\treturn z, err\n\t}\n\treturn m, nil\n}", "title": "" }, { "docid": "a3abc8674d880aa0bf8d5326869015a7", "score": "0.42734236", "text": "func Get24hIssues(j aqua.Aide, repoOwner, repoName string) (interface{}, error) {\n\tvar (\n\t\tdb *sql.DB\n\t\terr error\n\t\tcount string\n\t)\n\tif db, err = ConnectDB(); err == nil {\n\t\tcurrentTimeStamp := getCurrentTime()\n\t\topenIssueCountQry := `SELECT count(*) from repo_issues\n\t\t\t\t\t\t\tWHERE created_at <= '` + currentTimeStamp + `' - INTERVAL 1 MINUTE\n\t\t\t\t\t\t\tAND created_at >'` + currentTimeStamp + `' - INTERVAL 1 Day + INTERVAL 1 MINUTE \n\t\t\t\t\t\t\tAND owner_name = '` + repoOwner + `' AND repo_name = '` + repoName + `'`\n\t\tvar results *sql.Rows\n\t\tif results, err = db.Query(openIssueCountQry); err == nil {\n\t\t\tfor results.Next() {\n\t\t\t\tif err = results.Scan(&count); err != nil {\n\t\t\t\t\tlog.Println(\"scan error :\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t\tdb.Close()\n\t\t} else {\n\t\t\terr = errors.New(\"24h qry issue \" + err.Error())\n\t\t}\n\t} else {\n\t\terr = errors.New(\"connection issue: \" + err.Error())\n\t}\n\treturn count, err\n}", "title": "" }, { "docid": "173c1599887fb9ccc9246572c3eb43e4", "score": "0.42727256", "text": "func (s *StatusGetter) LoadSearchResults() error {\n\tq := s.buildSearchQuery()\n\tc := api.NewClientFromHTTP(s.Client)\n\n\tvar resp struct {\n\t\tAssignments struct {\n\t\t\tEdges []struct {\n\t\t\t\tNode SearchResult\n\t\t\t}\n\t\t}\n\t\tReviewRequested struct {\n\t\t\tEdges []struct {\n\t\t\t\tNode SearchResult\n\t\t\t}\n\t\t}\n\t}\n\terr := c.GraphQL(s.hostname(), q, nil, &resp)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"could not search for assignments: %w\", err)\n\t}\n\n\tprs := []SearchResult{}\n\tissues := []SearchResult{}\n\treviewRequested := []SearchResult{}\n\n\tfor _, e := range resp.Assignments.Edges {\n\t\tif e.Node.Type == \"Issue\" {\n\t\t\tissues = append(issues, e.Node)\n\t\t} else if e.Node.Type == \"PullRequest\" {\n\t\t\tprs = append(prs, e.Node)\n\t\t} else {\n\t\t\tpanic(\"you shouldn't be here\")\n\t\t}\n\t}\n\n\tfor _, e := range resp.ReviewRequested.Edges {\n\t\treviewRequested = append(reviewRequested, e.Node)\n\t}\n\n\tsort.Sort(Results(issues))\n\tsort.Sort(Results(prs))\n\tsort.Sort(Results(reviewRequested))\n\n\ts.AssignedIssues = []StatusItem{}\n\ts.AssignedPRs = []StatusItem{}\n\ts.ReviewRequests = []StatusItem{}\n\n\tfor _, i := range issues {\n\t\ts.AssignedIssues = append(s.AssignedIssues, StatusItem{\n\t\t\tRepository: i.Repository.NameWithOwner,\n\t\t\tIdentifier: fmt.Sprintf(\"%s#%d\", i.Repository.NameWithOwner, i.Number),\n\t\t\tpreview: i.Title,\n\t\t})\n\t}\n\n\tfor _, pr := range prs {\n\t\ts.AssignedPRs = append(s.AssignedPRs, StatusItem{\n\t\t\tRepository: pr.Repository.NameWithOwner,\n\t\t\tIdentifier: fmt.Sprintf(\"%s#%d\", pr.Repository.NameWithOwner, pr.Number),\n\t\t\tpreview: pr.Title,\n\t\t})\n\t}\n\n\tfor _, r := range reviewRequested {\n\t\ts.ReviewRequests = append(s.ReviewRequests, StatusItem{\n\t\t\tRepository: r.Repository.NameWithOwner,\n\t\t\tIdentifier: fmt.Sprintf(\"%s#%d\", r.Repository.NameWithOwner, r.Number),\n\t\t\tpreview: r.Title,\n\t\t})\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f2604856d57ed70273fed336c7b9dfa1", "score": "0.4269472", "text": "func (repo *APITokenRepository) SearchMultiple(key string, pageNum int64, columns ...string) []*api.Token {\n\n\tvar apiTokens []*api.Token\n\tvar whereStmt []string\n\tvar sqlValues []interface{}\n\n\tfor _, column := range columns {\n\t\twhereStmt = append(whereStmt, fmt.Sprintf(\" %s = ? \", column))\n\t\tsqlValues = append(sqlValues, key)\n\t}\n\n\tsqlValues = append(sqlValues, pageNum*30)\n\trepo.conn.Raw(\"SELECT * FROM api_tokens WHERE (\"+strings.Join(whereStmt, \"||\")+\") ORDER BY api_key ASC LIMIT ?, 30\", sqlValues...).Scan(&apiTokens)\n\n\treturn apiTokens\n}", "title": "" }, { "docid": "710dc6d0560385012eea57c896a7d588", "score": "0.42577937", "text": "func SearchEmails(opts *SearchEmailOptions) ([]*SearchEmailResult, int64, error) {\n\t// Unfortunately, UNION support for SQLite in xorm is currently broken, so we must\n\t// build the SQL ourselves.\n\twhere := make([]string, 0, 5)\n\targs := make([]interface{}, 0, 5)\n\n\temailsSQL := \"(SELECT id as sortid, uid, email, is_activated, 0 as is_primary \" +\n\t\t\"FROM email_address \" +\n\t\t\"UNION ALL \" +\n\t\t\"SELECT id as sortid, id AS uid, email, is_active AS is_activated, 1 as is_primary \" +\n\t\t\"FROM `user` \" +\n\t\t\"WHERE type = ?) AS emails\"\n\targs = append(args, UserTypeIndividual)\n\n\tif len(opts.Keyword) > 0 {\n\t\t// Note: % can be injected in the Keyword parameter, but it won't do any harm.\n\t\twhere = append(where, \"(lower(`user`.full_name) LIKE ? OR `user`.lower_name LIKE ? OR emails.email LIKE ?)\")\n\t\tlikeStr := \"%\" + strings.ToLower(opts.Keyword) + \"%\"\n\t\targs = append(args, likeStr)\n\t\targs = append(args, likeStr)\n\t\targs = append(args, likeStr)\n\t}\n\n\tswitch {\n\tcase opts.IsPrimary.IsTrue():\n\t\twhere = append(where, \"emails.is_primary = ?\")\n\t\targs = append(args, true)\n\tcase opts.IsPrimary.IsFalse():\n\t\twhere = append(where, \"emails.is_primary = ?\")\n\t\targs = append(args, false)\n\t}\n\n\tswitch {\n\tcase opts.IsActivated.IsTrue():\n\t\twhere = append(where, \"emails.is_activated = ?\")\n\t\targs = append(args, true)\n\tcase opts.IsActivated.IsFalse():\n\t\twhere = append(where, \"emails.is_activated = ?\")\n\t\targs = append(args, false)\n\t}\n\n\tvar whereStr string\n\tif len(where) > 0 {\n\t\twhereStr = \"WHERE \" + strings.Join(where, \" AND \")\n\t}\n\n\tjoinSQL := \"FROM \" + emailsSQL + \" INNER JOIN `user` ON `user`.id = emails.uid \" + whereStr\n\n\tcount, err := x.SQL(\"SELECT count(*) \"+joinSQL, args...).Count()\n\tif err != nil {\n\t\treturn nil, 0, fmt.Errorf(\"Count: %v\", err)\n\t}\n\n\torderby := opts.SortType.String()\n\tif orderby == \"\" {\n\t\torderby = SearchEmailOrderByEmail.String()\n\t}\n\n\tquerySQL := \"SELECT emails.uid, emails.email, emails.is_activated, emails.is_primary, \" +\n\t\t\"`user`.name, `user`.full_name \" + joinSQL + \" ORDER BY \" + orderby\n\n\topts.setDefaultValues()\n\n\trows, err := x.SQL(querySQL, args...).Rows(new(SearchEmailResult))\n\tif err != nil {\n\t\treturn nil, 0, fmt.Errorf(\"Emails: %v\", err)\n\t}\n\n\t// Page manually because xorm can't handle Limit() with raw SQL\n\tdefer rows.Close()\n\n\temails := make([]*SearchEmailResult, 0, opts.PageSize)\n\tskip := (opts.Page - 1) * opts.PageSize\n\n\tfor rows.Next() {\n\t\tvar email SearchEmailResult\n\t\tif err := rows.Scan(&email); err != nil {\n\t\t\treturn nil, 0, err\n\t\t}\n\t\tif skip > 0 {\n\t\t\tskip--\n\t\t\tcontinue\n\t\t}\n\t\temails = append(emails, &email)\n\t\tif len(emails) == opts.PageSize {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn emails, count, err\n}", "title": "" }, { "docid": "6efbc784defa28c181d900b8e93d9e32", "score": "0.4256674", "text": "func (index *TriggerIndex) Search(filterTags []string, searchString string, onlyErrors bool, page int64, size int64) (searchResults []*moira.SearchResult, total int64, err error) {\n\tif size < 0 {\n\t\tpage = 0\n\t\tdocs, _ := index.index.DocCount()\n\t\tsize = int64(docs)\n\t}\n\n\treq := buildSearchRequest(filterTags, searchString, onlyErrors, int(page), int(size))\n\n\tsearchResult, err := index.index.Search(req)\n\tif err != nil {\n\t\treturn\n\t}\n\ttotal = int64(searchResult.Total)\n\tif searchResult.Hits.Len() == 0 {\n\t\treturn\n\t}\n\n\tfor _, result := range searchResult.Hits {\n\t\thighlights := getHighlights(result.Fragments, mapping.TriggerName, mapping.TriggerDesc)\n\t\ttriggerSearchResult := moira.SearchResult{\n\t\t\tObjectID: result.ID,\n\t\t\tHighlights: highlights,\n\t\t}\n\t\tsearchResults = append(searchResults, &triggerSearchResult)\n\t}\n\treturn\n}", "title": "" }, { "docid": "5cea4a3cdd7048a6ce727ab74a1151bf", "score": "0.42561936", "text": "func (s *Index) SearchGroups(r SearchRequest) ([]string, error) {\n\tif r.Expression == \"\" {\n\t\treturn nil, ErrBadSearch\n\t}\n\n\treq := createSearchRequest(r)\n\n\t// This can only fail if the query is malformed, since the\n\t// worst that can happen is the query is empty, this can't\n\t// return an error.\n\tresult, _ := s.gIndex.Search(req)\n\tslice := extractDocIDs(result)\n\treturn slice, nil\n}", "title": "" }, { "docid": "bade949d19c7d1e47f98ae1a9c7dddbb", "score": "0.42546594", "text": "func CheckIssues(lines []token.Position) ([]Issue, error) {\n\tfiles := make(map[string][]token.Position)\n\n\tfor _, line := range lines {\n\t\tfiles[line.Filename] = append(files[line.Filename], line)\n\t}\n\n\tissues := []Issue{}\n\n\tfor file, linesInFile := range files {\n\t\t// ensure we have the lines in ascending order\n\t\tsort.Slice(linesInFile, func(i, j int) bool { return linesInFile[i].Line < linesInFile[j].Line })\n\n\t\tdata, err := ioutil.ReadFile(file)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfileLines := strings.Split(string(data), \"\\n\")\n\n\t\tfor _, line := range linesInFile {\n\t\t\t// check the line before the problematic statement first\n\t\t\tpotentialCommentLine := line.Line - 2\n\n\t\t\t// check only if the previous line is strictly a line that begins with\n\t\t\t// the ignore comment\n\t\t\tif 0 <= potentialCommentLine && BeginsWithComment(fileLines[potentialCommentLine]) {\n\t\t\t\tissues = append(issues, Issue{statement: line, ignored: true})\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tisIgnored := HasIgnoreComment(fileLines[line.Line-1])\n\t\t\tissues = append(issues, Issue{statement: line, ignored: isIgnored})\n\t\t}\n\t}\n\n\treturn issues, nil\n}", "title": "" }, { "docid": "11b1b90fc1815453d65347117b0444df", "score": "0.4250815", "text": "func Search(q string) (result SearchResult, err error) {\n\tresult.Characters, err = searchByPrefix([]byte(q), charIDByName)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tresult.Corporations, err = searchByPrefix([]byte(q), corpIDByName)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tresult.Alliances, err = searchByPrefix([]byte(q), allIDByName)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tfor _, system := range mapdata.FindSolarSystems(q) {\n\t\tresult.SolarSystems = append(result.SolarSystems, SearchResultItem{Name: system.Name, ID: system.ID})\n\t}\n\tfor _, region := range mapdata.FindRegions(q) {\n\t\tresult.Regions = append(result.Regions, SearchResultItem{Name: region.Name, ID: region.ID})\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "7fdcfc64bbef5ae360df9f551139a102", "score": "0.42459273", "text": "func GetIssues(conf config.Config, params map[string]string) ([]Issue, error) {\n\tvalues := url.Values{}\n\tvalues.Add(\"apiKey\", conf.APIKey)\n\tfor key, val := range params {\n\t\tvalues.Add(key, val)\n\t}\n\n\turl := conf.SpaceURL + IssuesURL + \"?\" + values.Encode()\n\tbody, err := Get(url)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn nil, err\n\t}\n\n\tvar issues []Issue\n\tjson.Unmarshal(body, &issues)\n\treturn issues, nil\n}", "title": "" }, { "docid": "d94233815b4c21fafdf53a7447b014d1", "score": "0.42436972", "text": "func (service *Service) SearchProjectByID(pid string) *entity.Project {\n\n\tproject := service.conn.GetProject(pid)\n\tattachedFiles := service.conn.GetAttachedFiles(pid)\n\tproject.AttachedFiles = attachedFiles\n\treturn project\n\n}", "title": "" }, { "docid": "7afc80cf14ed063cd922e59638e80410", "score": "0.42401442", "text": "func (c *mockTeleportEventWatcher) SearchEvents(ctx context.Context, fromUTC, toUTC time.Time, namespace string, eventTypes []string, limit int, order types.EventOrder, startKey string) ([]events.AuditEvent, string, error) {\n\te := c.events\n\tc.events = make([]events.AuditEvent, 0) // nullify events\n\treturn e, \"test\", nil\n}", "title": "" }, { "docid": "8ebc35c7ae7b1066f2df789a82cb917a", "score": "0.42394176", "text": "func searchNotes(keywords string) {\n\tnoteTitles := noteTitlesBySearch(keywords)\n\tfor _, title := range noteTitles {\n\t\tfmt.Println(title)\n\t}\n}", "title": "" }, { "docid": "ab444a7f6f0f6595d83709443a7a4f3d", "score": "0.42278993", "text": "func (i *Impl) SearchNotes(f string) (n []Note) {\n\tnotes := []Note{}\n\tlike := \"%\" + f + \"%\"\n\ti.DB.Where(\"name LIKE ?\", like).Or(\"text LIKE ?\", like).Find(&notes)\n\n\treturn notes\n}", "title": "" } ]
1c7206c77ce4e1a088fa56d50b628501
AllX is like All, but panics if an error occurs.
[ { "docid": "cb05d981411f3b17e6a4bd9f14f2698c", "score": "0.69699985", "text": "func (rq *ReplyQuery) AllX(ctx context.Context) []*Reply {\n\tnodes, err := rq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" } ]
[ { "docid": "187be3a7f53ed81f863c621636519beb", "score": "0.73546845", "text": "func (epq *ExitPointQuery) AllX(ctx context.Context) []*ExitPoint {\n\tnodes, err := epq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "a0058cc49497e97dc6b563ff21ea28ea", "score": "0.73506474", "text": "func (dqq *DNSBLQueryQuery) AllX(ctx context.Context) []*DNSBLQuery {\n\tnodes, err := dqq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "1c63ccec08f9f163c1de5bf5226f5246", "score": "0.7307247", "text": "func (hdrq *HTTPDetectorResultQuery) AllX(ctx context.Context) []*HTTPDetectorResult {\n\tnodes, err := hdrq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "436abbba9c00030defe5266f09cd0f82", "score": "0.7299802", "text": "func (ciq *CarInspectionQuery) AllX(ctx context.Context) []*CarInspection {\n\tcis, err := ciq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn cis\n}", "title": "" }, { "docid": "aa4b7e947ffd00b804085c7b3d20956a", "score": "0.72902507", "text": "func (pq *PatientrightsQuery) AllX(ctx context.Context) []*Patientrights {\n\tpas, err := pq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn pas\n}", "title": "" }, { "docid": "b137fcc756d574aa33e3fd79337a8d5b", "score": "0.72657794", "text": "func (ruq *RoleUKMQuery) AllX(ctx context.Context) []*RoleUKM {\n\trus, err := ruq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn rus\n}", "title": "" }, { "docid": "f624794abef25747b8f9b7a6be421467", "score": "0.71758866", "text": "func (dq *DeviceQuery) AllX(ctx context.Context) []*Device {\n\tnodes, err := dq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "3d30f05c09f137713cb086768bf296d6", "score": "0.71741056", "text": "func (siq *ServiceInfoQuery) AllX(ctx context.Context) []*ServiceInfo {\n\tnodes, err := siq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "cfb84eea1531939107259359bf6d10c2", "score": "0.71733916", "text": "func (pdq *PingDetectorQuery) AllX(ctx context.Context) []*PingDetector {\n\tnodes, err := pdq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "671439dc3289fb7cbefe4b98ca9e7e86", "score": "0.71588534", "text": "func (ismq *ImplicitSkippedMessageQuery) AllX(ctx context.Context) []*ImplicitSkippedMessage {\n\tnodes, err := ismq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "3f4ccd9178b49bef5b61c1d66c46fbc8", "score": "0.712839", "text": "func (fq *FormulaQuery) AllX(ctx context.Context) []*Formula {\n\tnodes, err := fq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "dd266386d079fa750c8d259d7d272e99", "score": "0.7122954", "text": "func (dpq *DevicePlatformQuery) AllX(ctx context.Context) []*DevicePlatform {\n\tnodes, err := dpq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "3e002784df5d176d96170d535aebd5f5", "score": "0.7111852", "text": "func (eq *EntityQuery) AllX(ctx context.Context) []*Entity {\n\tnodes, err := eq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "f9722e1bba9d5b81fd06373b8116ed3f", "score": "0.70774674", "text": "func (rtq *RequestTargetQuery) AllX(ctx context.Context) []*RequestTarget {\n\tnodes, err := rtq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "4b27efcf0ca96a297959bf5ea88052fc", "score": "0.70261556", "text": "func (bq *BouncerQuery) AllX(ctx context.Context) []*Bouncer {\n\tnodes, err := bq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "36439fdda5294547cf451d1431aba0a3", "score": "0.70034456", "text": "func (uarq *UserActiveRecordQuery) AllX(ctx context.Context) []*UserActiveRecord {\n\tuars, err := uarq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn uars\n}", "title": "" }, { "docid": "8af7a067060df7c06c671437daca2e1f", "score": "0.69982034", "text": "func (lq *LimitpointQuery) AllX(ctx context.Context) []*Limitpoint {\n\tnodes, err := lq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "6f9add02b015d1dceec9261afe5e62d3", "score": "0.69828266", "text": "func (fq *FileimportQuery) AllX(ctx context.Context) []*Fileimport {\n\tnodes, err := fq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "8d33a2a98481aaa5bd3ebeadb6363e81", "score": "0.69718844", "text": "func (sdiq *SysDictItemQuery) AllX(ctx context.Context) []*SysDictItem {\n\tnodes, err := sdiq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "a94cc1f3df283c943ce562d087175e91", "score": "0.6963859", "text": "func (dmq *DeathMannerQuery) AllX(ctx context.Context) []*DeathManner {\n\tnodes, err := dmq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "b52395d6b080d6d53f78090ba2d723dc", "score": "0.6956598", "text": "func (sq *ShopQuery) AllX(ctx context.Context) []*Shop {\n\tnodes, err := sq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "fde2b78b8660dc29c55d6f2b524f5331", "score": "0.6952699", "text": "func (bq *BlockQuery) AllX(ctx context.Context) []*Block {\n\tnodes, err := bq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "61fc8496e870f60c17fe78ba36073a68", "score": "0.6952112", "text": "func (oq *OrganizationQuery) AllX(ctx context.Context) []*Organization {\n\tnodes, err := oq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "743bbacb8ed6254ccd9bb5ddaa10cf73", "score": "0.69516903", "text": "func All(err error) []error {\n\tvar all interface{ Errors() []error }\n\tif ok := As(err, &all); ok {\n\t\treturn all.Errors()\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "41a1b30b08e1e8d735454bf570a1cb48", "score": "0.69346315", "text": "func (biq *BinaryItemQuery) AllX(ctx context.Context) []*BinaryItem {\n\tnodes, err := biq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "ff5c98021618787acf68e993f0a73cc0", "score": "0.69317144", "text": "func (dq *DividendQuery) AllX(ctx context.Context) []*Dividend {\n\tnodes, err := dq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "d77ae2260084c3f8da9ef177520ae9b5", "score": "0.69282025", "text": "func (srq *StatusReserveQuery) AllX(ctx context.Context) []*StatusReserve {\n\tsrs, err := srq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn srs\n}", "title": "" }, { "docid": "ddf786c68dbd91bcc826d4d6a8bc25e3", "score": "0.6923945", "text": "func (trq *TradeRecordQuery) AllX(ctx context.Context) []*TradeRecord {\n\tnodes, err := trq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "54705ab5bf1cc537e1613721d4f34617", "score": "0.69215935", "text": "func (aq *AccountQuery) AllX(ctx context.Context) []*Account {\n\tnodes, err := aq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "aad77681057591a7627378c7bf25be3a", "score": "0.6860227", "text": "func (pq *PCQuery) AllX(ctx context.Context) []*PC {\n\tnodes, err := pq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "9c6971b29a0cf96bb3305c69dc413c00", "score": "0.6854232", "text": "func (ntdmq *NetTopologyDeviceMapQuery) AllX(ctx context.Context) []*NetTopologyDeviceMap {\n\tnodes, err := ntdmq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "b30814c82b457b67dd1c7db1e1df769c", "score": "0.6804801", "text": "func (srq *SysRoleQuery) AllX(ctx context.Context) []*SysRole {\n\tnodes, err := srq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "d0dad0531d031702afc81b9013d7e74e", "score": "0.6784659", "text": "func (pq *PointpendinglbtransactionQuery) AllX(ctx context.Context) []*Pointpendinglbtransaction {\n\tnodes, err := pq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "4acd5acc4e6bd32a18d584fafa855a8c", "score": "0.67837286", "text": "func (wq *WalletQuery) AllX(ctx context.Context) []*Wallet {\n\tnodes, err := wq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "6fd00410daa58b0b2ff4213ef98c9d44", "score": "0.6769292", "text": "func (clcdq *CheckListCategoryDefinitionQuery) AllX(ctx context.Context) []*CheckListCategoryDefinition {\n\tclcds, err := clcdq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn clcds\n}", "title": "" }, { "docid": "5b3a0baae0bf5a881027c6176ad3c016", "score": "0.6698144", "text": "func (giq *GroupInfoQuery) AllX(ctx context.Context) []*GroupInfo {\n\tnodes, err := giq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "8858103ee0704814c7273ee16c7a4f18", "score": "0.6696512", "text": "func (gq *GroupsQuery) AllX(ctx context.Context) []*Groups {\n\tnodes, err := gq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "899ebe9815962f4d2bd1bc791821ef21", "score": "0.66937053", "text": "func (bcq *BaselineCategoryQuery) AllX(ctx context.Context) []*BaselineCategory {\n\tnodes, err := bcq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "104593ad900d777e1d3ebe57538d0942", "score": "0.66424286", "text": "func (blq *BlobLinkQuery) AllX(ctx context.Context) []*BlobLink {\n\tnodes, err := blq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "0225191762ee01371bcafd883a82559e", "score": "0.6628752", "text": "func (pq *PropertyQuery) AllX(ctx context.Context) []*Property {\n\tprs, err := pq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn prs\n}", "title": "" }, { "docid": "54aec8b74e76f92453390c59ab898232", "score": "0.6612531", "text": "func (ppq *PoolPropertiesQuery) AllX(ctx context.Context) []*PoolProperties {\n\tnodes, err := ppq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "d1974a5b2b4851e63b46a8ab48dd1eb5", "score": "0.66039914", "text": "func (gbq *GroupBudgetQuery) AllX(ctx context.Context) []*GroupBudget {\n\tnodes, err := gbq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "90b993d7bd20905f428033f10a9feb99", "score": "0.6599358", "text": "func (woq *WorkOrderQuery) AllX(ctx context.Context) []*WorkOrder {\n\twos, err := woq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn wos\n}", "title": "" }, { "docid": "113d708d5716ace99b601d48bc05f495", "score": "0.655687", "text": "func (oumq *OrgUnitMemberQuery) AllX(ctx context.Context) []*OrgUnitMember {\n\tnodes, err := oumq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "320eb2cf78bae4ca9029424715a54056", "score": "0.654416", "text": "func (mcq *MstCustomerQuery) AllX(ctx context.Context) []*MstCustomer {\n\tnodes, err := mcq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "4c31afe7f83c7a1f92956d3ad99cbce3", "score": "0.6539778", "text": "func (wodq *WorkOrderDefinitionQuery) AllX(ctx context.Context) []*WorkOrderDefinition {\n\twods, err := wodq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn wods\n}", "title": "" }, { "docid": "b865302abbbcf0811f24f2af9c07e132", "score": "0.6532671", "text": "func (e *Elements) All() *Elements {\n\te.any = false\n\te.all = true\n\treturn e\n}", "title": "" }, { "docid": "845472d3ff7b9fe58d7b341beba326eb", "score": "0.6528735", "text": "func (cbq *ClubBranchQuery) AllX(ctx context.Context) []*ClubBranch {\n\tcbs, err := cbq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn cbs\n}", "title": "" }, { "docid": "11b184839d326e5d8b85a3b4baf9666d", "score": "0.65182805", "text": "func (mq *MusicQuery) AllX(ctx context.Context) []*Music {\n\tms, err := mq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn ms\n}", "title": "" }, { "docid": "4bbc5c887b1cd588ba182b1e642d45ba", "score": "0.6501721", "text": "func (e *ErrorCollection) All() []error {\n\treturn e.collection\n}", "title": "" }, { "docid": "d406a381901776cdcd5181baa4efb963", "score": "0.6425302", "text": "func (v *Validator) CheckAll() error {\n\tvar errs *multierror.Error\n\terrs = multierror.Append(errs, v.CheckJSONSchema())\n\terrs = multierror.Append(errs, v.CheckPlatform())\n\terrs = multierror.Append(errs, v.CheckRoot())\n\terrs = multierror.Append(errs, v.CheckMandatoryFields())\n\terrs = multierror.Append(errs, v.CheckSemVer())\n\terrs = multierror.Append(errs, v.CheckMounts())\n\terrs = multierror.Append(errs, v.CheckProcess())\n\terrs = multierror.Append(errs, v.CheckLinux())\n\terrs = multierror.Append(errs, v.CheckAnnotations())\n\tif v.platform == \"linux\" || v.platform == \"solaris\" {\n\t\terrs = multierror.Append(errs, v.CheckHooks())\n\t}\n\n\treturn errs.ErrorOrNil()\n}", "title": "" }, { "docid": "5f000b924aede75420577cf39f6ac841", "score": "0.6404145", "text": "func (ptq *PostThumbnailQuery) AllX(ctx context.Context) []*PostThumbnail {\n\tnodes, err := ptq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "17d889a15aa3d0c86c95a8b7b77f03aa", "score": "0.63026273", "text": "func (hdrq *HTTPDetectorResultQuery) All(ctx context.Context) ([]*HTTPDetectorResult, error) {\n\tctx = setContextOp(ctx, hdrq.ctx, \"All\")\n\tif err := hdrq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\tqr := querierAll[[]*HTTPDetectorResult, *HTTPDetectorResultQuery]()\n\treturn withInterceptors[[]*HTTPDetectorResult](ctx, hdrq, qr, hdrq.inters)\n}", "title": "" }, { "docid": "f3893f176b96c361494663f980406e3f", "score": "0.62992924", "text": "func (paq *PostAttachmentQuery) AllX(ctx context.Context) []*PostAttachment {\n\tnodes, err := paq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "80b33bac5636f7a304d6b8064c794e22", "score": "0.62937456", "text": "func (s *StreamImpl) All(predicate rxgo.Predicate, opts ...rxgo.Option) Stream {\n\topts = appendContinueOnError(s.ctx, opts...)\n\treturn &StreamImpl{ctx: s.ctx, observable: rxgo.FromChannel(s.observable.All(predicate, opts...).Observe(), opts...)}\n}", "title": "" }, { "docid": "54c88e340d30331eb982ab15e88eb0d5", "score": "0.62839633", "text": "func (cq *CommentQuery) AllX(ctx context.Context) []*Comment {\n\tnodes, err := cq.All(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nodes\n}", "title": "" }, { "docid": "6a94ddc0bb11c2880105d0c9efe5f571", "score": "0.6241877", "text": "func (m *Model) All(where ...interface{}) (Result, error) {\n\tvar ctx = m.GetCtx()\n\treturn m.doGetAll(ctx, false, where...)\n}", "title": "" }, { "docid": "1c38f6d5f49fab59eb518ab73f1ff68d", "score": "0.6240479", "text": "func (qs FrameQuerySet) All(ret *[]Frame) error {\n\treturn qs.db.Find(ret).Error\n}", "title": "" }, { "docid": "8f5ad2774dd3d85d6df7fbade269c612", "score": "0.62029517", "text": "func All() Whereable {\n\treturn nil\n}", "title": "" }, { "docid": "ceb36efcf92a2b595328e26969f3dde5", "score": "0.6185439", "text": "func (pdq *PingDetectorQuery) All(ctx context.Context) ([]*PingDetector, error) {\n\tctx = setContextOp(ctx, pdq.ctx, \"All\")\n\tif err := pdq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\tqr := querierAll[[]*PingDetector, *PingDetectorQuery]()\n\treturn withInterceptors[[]*PingDetector](ctx, pdq, qr, pdq.inters)\n}", "title": "" }, { "docid": "77b4074526390dee7b6548b16ec837af", "score": "0.6164926", "text": "func (dqq *DNSBLQueryQuery) All(ctx context.Context) ([]*DNSBLQuery, error) {\n\tif err := dqq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\treturn dqq.sqlAll(ctx)\n}", "title": "" }, { "docid": "7f21471c8379a48505f4b6f43a823609", "score": "0.6157664", "text": "func (eb *ErrorBag) All() []error {\n\tvar errors []error\n\tfor _, bag := range eb.bag {\n\t\tfor i := range bag {\n\t\t\terrors = append(errors, bag[i])\n\t\t}\n\t}\n\treturn errors\n}", "title": "" }, { "docid": "501b9b8295f4decbf564cabb2ac34dbd", "score": "0.61124295", "text": "func (s *Scan) All(out interface{}) error {\n\titr := &scanIter{\n\t\tscan: s,\n\t\tunmarshal: unmarshalAppend,\n\t\terr: s.err,\n\t}\n\tfor itr.Next(out) {\n\t}\n\treturn itr.Err()\n}", "title": "" }, { "docid": "4a6ed7691b5e72b00559156dafe827ee", "score": "0.6071623", "text": "func All() {\n\tintegersAndFloatsAndComplex()\n\tstringsAndRunesAndBytes()\n\tloopsAndIf()\n\tdeferExample()\n}", "title": "" }, { "docid": "72e330be731d473759164474cee7069e", "score": "0.6058486", "text": "func (q retroQuery) All(exec boil.Executor) (RetroSlice, error) {\n\tvar o []*Retro\n\n\terr := q.Bind(nil, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to Retro slice\")\n\t}\n\n\tif len(retroAfterSelectHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doAfterSelectHooks(exec); err != nil {\n\t\t\t\treturn o, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "398f96923a5f352cc680e715214ff6c1", "score": "0.6035955", "text": "func (q jetQuery) All(exec boil.Executor) (JetSlice, error) {\n\tvar o []*Jet\n\n\terr := q.Bind(nil, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to Jet slice\")\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "746022ff4ae73a4e44c5ef7e6133c0e0", "score": "0.5999688", "text": "func (q visitQuery) All(ctx context.Context, exec boil.ContextExecutor) (VisitSlice, error) {\n\tvar o []*Visit\n\n\terr := q.Bind(ctx, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to Visit slice\")\n\t}\n\n\tif len(visitAfterSelectHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doAfterSelectHooks(ctx, exec); err != nil {\n\t\t\t\treturn o, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "e972a9cef7ab02acd3c5032f6e945037", "score": "0.5969098", "text": "func (epq *ExitPointQuery) All(ctx context.Context) ([]*ExitPoint, error) {\n\tif err := epq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\treturn epq.sqlAll(ctx)\n}", "title": "" }, { "docid": "4115dc6826de615638dccebd5c679765", "score": "0.59423184", "text": "func (eq *EntityQuery) All(ctx context.Context) ([]*Entity, error) {\n\tctx = newQueryContext(ctx, TypeEntity, \"All\")\n\tif err := eq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\tqr := querierAll[[]*Entity, *EntityQuery]()\n\treturn withInterceptors[[]*Entity](ctx, eq, qr, eq.inters)\n}", "title": "" }, { "docid": "e5527e232cbe709bfcbb7378e1ec4b48", "score": "0.59338784", "text": "func All(validators ...validator.Set) validator.Set {\n\treturn allValidator{\n\t\tvalidators: validators,\n\t}\n}", "title": "" }, { "docid": "e949e044d6906fd7389b358fdc2becb6", "score": "0.5929865", "text": "func (c *Collection) All(result interface{}) error {\n\tclient := &http.Client{}\n\treq, err := http.NewRequest(\"GET\", c.collectionAPIBaseURL(), nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Add(\"Authorization\", fmt.Sprintf(\"Bearer %s\", ApiKey))\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\n\tres, err := client.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer res.Body.Close()\n\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// check if an explicit error message was returned\n\tvar errorResponse apiErrorResponse\n\t_ = json.Unmarshal(body, &errorResponse)\n\tif errorResponse != (apiErrorResponse{}) {\n\t\treturn errors.New(strings.ToLower(errorResponse.Error))\n\t}\n\n\treturn json.Unmarshal(body, &result)\n}", "title": "" }, { "docid": "a24982b7378dccb7683cda8314ca83b1", "score": "0.5914616", "text": "func (q auditPackageQuery) All(ctx context.Context, exec boil.ContextExecutor) (AuditPackageSlice, error) {\n\tvar o []*AuditPackage\n\n\terr := q.Bind(ctx, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to AuditPackage slice\")\n\t}\n\n\tif len(auditPackageAfterSelectHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doAfterSelectHooks(ctx, exec); err != nil {\n\t\t\t\treturn o, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "8b041bcd050ede30d42e76e39a43b7af", "score": "0.5895695", "text": "func (q withdrawalFiatQuery) All(ctx context.Context, exec boil.ContextExecutor) (WithdrawalFiatSlice, error) {\n\tvar o []*WithdrawalFiat\n\n\terr := q.Bind(ctx, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"postgres: failed to assign all query results to WithdrawalFiat slice\")\n\t}\n\n\tif len(withdrawalFiatAfterSelectHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doAfterSelectHooks(ctx, exec); err != nil {\n\t\t\t\treturn o, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "9f2995445cc4385e79f4f233e0039e7a", "score": "0.5877658", "text": "func (qs TransactionQuerySet) All(ret *[]Transaction) error {\n\treturn qs.db.Find(ret).Error\n}", "title": "" }, { "docid": "b6dfdd1315786ff7ea69ba9ac2dd291b", "score": "0.5855044", "text": "func (ciq *CarInspectionQuery) All(ctx context.Context) ([]*CarInspection, error) {\n\tif err := ciq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ciq.sqlAll(ctx)\n}", "title": "" }, { "docid": "1581640fdb462d3d6737180734f8f52e", "score": "0.5851245", "text": "func (ismq *ImplicitSkippedMessageQuery) All(ctx context.Context) ([]*ImplicitSkippedMessage, error) {\n\tif err := ismq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ismq.sqlAll(ctx)\n}", "title": "" }, { "docid": "943acb03782e7e639af29da70b1385b8", "score": "0.58454055", "text": "func (s *DB) All(to interface{}, options ...func(*index.Options)) error {\n\treturn s.root.All(to, options...)\n}", "title": "" }, { "docid": "a8b9d987986c533ef0e3a928ff710f21", "score": "0.58395296", "text": "func (q dailySummaryQuery) All(ctx context.Context, exec boil.ContextExecutor) (DailySummarySlice, error) {\n\tvar o []*DailySummary\n\n\terr := q.Bind(ctx, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to DailySummary slice\")\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "8e7a695b9a00f7827abaad82026b5558", "score": "0.5836583", "text": "func (lq *LimitpointQuery) All(ctx context.Context) ([]*Limitpoint, error) {\n\tif err := lq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\treturn lq.sqlAll(ctx)\n}", "title": "" }, { "docid": "69903256acf81094cf499eef16fa0318", "score": "0.5825511", "text": "func (s Select) All() (*sql.Rows, error) {\n\n\tstmt, args, err := s.render()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn s.builder.all(stmt, args)\n}", "title": "" }, { "docid": "3e4146185666bd3d3cae281accf23fdb", "score": "0.5823636", "text": "func TestAll(t *T) {\n\tfn := func(el interface{}) bool {\n\t\treturn el.(int) > 3\n\t}\n\n\t// All match case\n\tintl := []interface{}{4, 5, 6}\n\tl := NewList(intl...)\n\tok := All(fn, l)\n\tassert.Equal(t, intl, ToSlice(l))\n\tassert.Equal(t, true, ok)\n\n\t// Not all match case\n\tintl = []interface{}{3, 4, 2, 5}\n\tl = NewList(intl...)\n\tok = All(fn, l)\n\tassert.Equal(t, intl, ToSlice(l))\n\tassert.Equal(t, false, ok)\n\n\t// Degenerate case\n\tl = NewList()\n\tok = All(fn, l)\n\tassert.Equal(t, 0, Size(l))\n\tassert.Equal(t, true, ok)\n}", "title": "" }, { "docid": "3d18fba13d375c15e54c7f120bc31802", "score": "0.5801124", "text": "func All(count, batchSize int, eachFn BatchFunc) error {\n\tfor i := 0; i < count; i += batchSize {\n\t\tend := i + batchSize\n\t\tif end > count {\n\t\t\tend = count\n\t\t}\n\t\terr := eachFn(i, end)\n\t\tif err == Abort {\n\t\t\treturn nil\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "7fdcea993d402dd68613a33d5d263f99", "score": "0.579515", "text": "func (records ModelsCollection) All() (*ModelsCollection, error) {\n\treturn nil, errors.New(\"Func is not implemented\")\n}", "title": "" }, { "docid": "5d886e692aff37e946030413750aa45a", "score": "0.57777053", "text": "func (q contentUnitQuery) All(exec boil.Executor) (ContentUnitSlice, error) {\n\tvar o []*ContentUnit\n\n\terr := q.Bind(nil, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"mdbmodels: failed to assign all query results to ContentUnit slice\")\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "ba8182ba8f3225e0af6cc5dacca89bca", "score": "0.57771826", "text": "func (rtq *RequestTargetQuery) All(ctx context.Context) ([]*RequestTarget, error) {\n\tctx = setContextOp(ctx, rtq.ctx, \"All\")\n\tif err := rtq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\tqr := querierAll[[]*RequestTarget, *RequestTargetQuery]()\n\treturn withInterceptors[[]*RequestTarget](ctx, rtq, qr, rtq.inters)\n}", "title": "" }, { "docid": "1deb6f39ec2f30e1901bb159fbd7a411", "score": "0.5759742", "text": "func (mr *MockclientMockRecorder) All(ctx interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"All\", reflect.TypeOf((*Mockclient)(nil).All), ctx)\n}", "title": "" }, { "docid": "8303b5e7e793b77c0927d866a46b04ef", "score": "0.5759294", "text": "func (q apiAppQuery) All(ctx context.Context, exec boil.ContextExecutor) (APIAppSlice, error) {\n\tvar o []*APIApp\n\n\terr := q.Bind(ctx, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to APIApp slice\")\n\t}\n\n\tif len(apiAppAfterSelectHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doAfterSelectHooks(ctx, exec); err != nil {\n\t\t\t\treturn o, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "0c53375a029e5bfef72aa31bb7cb054a", "score": "0.57575244", "text": "func (pq *PCQuery) All(ctx context.Context) ([]*PC, error) {\n\tctx = setContextOp(ctx, pq.ctx, \"All\")\n\tif err := pq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\tqr := querierAll[[]*PC, *PCQuery]()\n\treturn withInterceptors[[]*PC](ctx, pq, qr, pq.inters)\n}", "title": "" }, { "docid": "669dacc8b25f9390719fccad5ac76323", "score": "0.5754844", "text": "func (q trustlineQuery) All(exec boil.Executor) (TrustlineSlice, error) {\n\tvar o []*Trustline\n\n\terr := q.Bind(nil, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"stellarcore: failed to assign all query results to Trustline slice\")\n\t}\n\n\tif len(trustlineAfterSelectHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doAfterSelectHooks(exec); err != nil {\n\t\t\t\treturn o, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "b351772b65791ed50b9c31d2f30e28b4", "score": "0.5750075", "text": "func (b *Base) All() []kindsys.Core {\n\tret := make([]kindsys.Core, len(b.all))\n\tcopy(ret, b.all)\n\treturn ret\n}", "title": "" }, { "docid": "afb2e68751212a65296cc7d5a5550026", "score": "0.57499737", "text": "func All(db sql.Executor) ([]types.ATXID, error) {\n\tvar all []types.ATXID\n\tdec := func(stmt *sql.Statement) bool {\n\t\tvar id types.ATXID\n\t\tstmt.ColumnBytes(0, id[:])\n\t\tall = append(all, id)\n\t\treturn true\n\t}\n\tif _, err := db.Exec(\"select id from atxs order by epoch asc;\", nil, dec); err != nil {\n\t\treturn nil, fmt.Errorf(\"all atxs: %w\", err)\n\t}\n\treturn all, nil\n}", "title": "" }, { "docid": "d3551db35be43b44b7a54bd97bc6aaae", "score": "0.57491064", "text": "func (idx *NonUnique) All(opts *QueryOptions) ([][]byte, error) {\n\titems, err := allValues(idx.Bucket)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn unique(items), nil\n}", "title": "" }, { "docid": "adcad564cc01931681f1adadaf203e00", "score": "0.57212937", "text": "func (rq *ReplyQuery) All(ctx context.Context) ([]*Reply, error) {\n\tctx = setContextOp(ctx, rq.ctx, \"All\")\n\tif err := rq.prepareQuery(ctx); err != nil {\n\t\treturn nil, err\n\t}\n\tqr := querierAll[[]*Reply, *ReplyQuery]()\n\treturn withInterceptors[[]*Reply](ctx, rq, qr, rq.inters)\n}", "title": "" }, { "docid": "64d13f45b556d85edf5609c325095a71", "score": "0.57156205", "text": "func (q authFullpubkeyQuery) All(ctx context.Context, exec boil.ContextExecutor) (AuthFullpubkeySlice, error) {\n\tvar o []*AuthFullpubkey\n\n\terr := q.Bind(ctx, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to AuthFullpubkey slice\")\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "14f2d966f4227323dd50788f5b285eb6", "score": "0.5707628", "text": "func (efs *Flags) All() bool {\n\treturn efs.all\n}", "title": "" }, { "docid": "fd4c53158ea6f2a4cc6c0dd0e0d3f182", "score": "0.56970406", "text": "func (mr *MockPipeMockRecorder) All(result interface{}) *gomock.Call {\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"All\", reflect.TypeOf((*MockPipe)(nil).All), result)\n}", "title": "" }, { "docid": "a79e41b3e18246f1c38de3535120a503", "score": "0.56950486", "text": "func (q userQuery) All(ctx context.Context, exec boil.ContextExecutor) (UserSlice, error) {\n\tvar o []*User\n\n\terr := q.Bind(ctx, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"dal: failed to assign all query results to User slice\")\n\t}\n\n\treturn o, nil\n}", "title": "" }, { "docid": "d3154ebbb32ee751caad22ffb9183a9a", "score": "0.5692677", "text": "func All(ff ...func()) (err error) {\n\tvar wg sync.WaitGroup\n\twg.Add(len(ff))\n\tfor _, f := range ff {\n\t\tgo func(g func()) {\n\t\t\tdefer wg.Done()\n\t\t\tdefer func() {\n\t\t\t\tif r := recover(); r != nil {\n\t\t\t\t\terr = fmt.Errorf(\"service panic: %s, errors: %v\", rt.GetFuncName(g), r)\n\t\t\t\t}\n\t\t\t}()\n\t\t\tg()\n\t\t}(f)\n\t}\n\twg.Wait()\n\treturn\n}", "title": "" }, { "docid": "696d09fea4cad786bc0078f2630828fc", "score": "0.5691545", "text": "func (r *fault) All() (faults []*entity.Fault, err error) {\n\tkeys, vals, err := r.db.GetRanged(nil, 0, false)\n\tif err != nil {\n\t\treturn\n\t}\n\tfor i, val := range vals {\n\t\tif len(keys[i]) != 8 {\n\t\t\tcontinue\n\t\t}\n\t\tfaults = append(faults, &entity.Fault{\n\t\t\tDate: time.Unix(int64(binary.BigEndian.Uint32(val[0:4])), 0),\n\t\t\tUserID: binary.BigEndian.Uint32(keys[i][4:8]),\n\t\t\tErrType: string(keys[i][6:]),\n\t\t\tUserAgent: string(val),\n\t\t})\n\t}\n\treturn\n}", "title": "" }, { "docid": "7dacd9f2c470e43e4a1f32ea375a0c33", "score": "0.5690656", "text": "func (*emptyTagService) All(ctx context.Context) ([]string, error) {\n\treturn nil, fmt.Errorf(\"sealer empty tag all, err: %s\", distribution.ErrUnsupported)\n}", "title": "" } ]
28921f9e358e4c46fd32e90827b5094a
Save saves the content at the given path to the backend
[ { "docid": "4d828541c10984ef971fd330d6577111", "score": "0.0", "text": "func (m Mock) Save(basePath string) error {\n\treturn m.fs.Walk(basePath, func(path string, info os.FileInfo, err error) error {\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif info.IsDir() {\n\t\t\tlogrus.Debugf(\"Entering directory: %s\", path)\n\t\t\treturn nil\n\t\t}\n\n\t\trel, err := filepath.Rel(basePath, path)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tlogrus.Infof(\"Processing file %s\", rel)\n\t\treturn nil\n\t})\n}", "title": "" } ]
[ { "docid": "a4abf2340d4e615a79de2f3473d04e5a", "score": "0.7536226", "text": "func (c Content) Save(path string) error {\n\tf, err := os.Create(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\n\treturn c.Write(f)\n}", "title": "" }, { "docid": "bc6c69a5c8bf8c1501f12bdd0b1f82d3", "score": "0.7099215", "text": "func (f *FolderController) Save(content string) {\n\terr := ioutil.WriteFile(f.CurrentPath, []byte(content), 0644)\n\tif err != nil {\n\t\tfmt.Print(err)\n\t\treturn\n\t}\n\tf.CurrentContent = content\n}", "title": "" }, { "docid": "4757a537d3f7e909b01e73b4c00cf810", "score": "0.68618727", "text": "func Save(path string, v interface{}) error {\n lock.Lock()\n defer lock.Unlock()\n f, err := os.Create(path)\n if err != nil {\n return err\n }\n defer f.Close()\n r, err := Encode(v)\n if err != nil {\n return err\n }\n _, err = io.Copy(f, r)\n return err\n}", "title": "" }, { "docid": "2dbecdbd6e612f2dd9560c9569062011", "score": "0.67861927", "text": "func (p *Page) Save(filename string, path string) error {\r\n\r\n\treturn io.WriteFile(path, p.Body, 0600)\r\n}", "title": "" }, { "docid": "65eae7541a5b873444865eab337b3044", "score": "0.67703426", "text": "func Save(path string, object interface{}) error {\n\tencodedobject, err := json.MarshalIndent(object, \"\", \"\\t\")\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tpanic(err)\n\t}\n\terr = ioutil.WriteFile(path, encodedobject, 0644)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tpanic(err)\n\t}\n\treturn err\n}", "title": "" }, { "docid": "d5d460c2626251ea4203aa22f4863e25", "score": "0.6726061", "text": "func (f *File) save() error {\n\tdata, err := json.Marshal(f.data)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to marshal backend data to json: %s\", err)\n\t}\n\terr = ioutil.WriteFile(f.filePath, data, filePerm)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to openfile to write to: %s\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "fc2c4b926241c7a766c5c445a28a635a", "score": "0.6721423", "text": "func Save(path string, object interface{}) error {\n\tfile, err := os.Create(path)\n\tdefer file.Close()\n\tif err == nil {\n\t\tencoder := gob.NewEncoder(file)\n\t\tencoder.Encode(object)\n\t}\n\treturn err\n}", "title": "" }, { "docid": "fc2c4b926241c7a766c5c445a28a635a", "score": "0.6721423", "text": "func Save(path string, object interface{}) error {\n\tfile, err := os.Create(path)\n\tdefer file.Close()\n\tif err == nil {\n\t\tencoder := gob.NewEncoder(file)\n\t\tencoder.Encode(object)\n\t}\n\treturn err\n}", "title": "" }, { "docid": "1825c4b964c20b0738569bb1b7e97938", "score": "0.65929645", "text": "func (f *ToutiaoChannel) Save(path string) error {\n\n\tfile, err := os.Create(path)\n\tif err == nil {\n\t\tencoder := json.NewEncoder(file)\n\t\terr = encoder.Encode(f)\n\t}\n\tfile.Close()\n\treturn err\n}", "title": "" }, { "docid": "6d7d146a32d13f512a81db40c434f73c", "score": "0.6588601", "text": "func (p *Page) save() error { //this is for the save method to save presistent storage\r\n filename := p.Title + \".txt\" //filename will be made into a txt file\r\n return ioutil.WriteFile(filename, p.Body, 0600) // this iwill be saved in the memory location 0600\r\n\r\n }", "title": "" }, { "docid": "2ef83b18494740e7053c482c9fffcd29", "score": "0.656058", "text": "func (app *encryptedFileDiskService) Save(relativePath string, content []byte) error {\n\tencrypted, err := app.encryption.Encrypt(content)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn app.service.Save(relativePath, []byte(encrypted))\n}", "title": "" }, { "docid": "a29c65386d9d0005f386fc4accedb4da", "score": "0.6540217", "text": "func (p *Page) save() error {\n\tfilename := configuration.WorkDirectory + \"/data/\" + slug.Slug(p.Title) + \".txt\"\n\treturn ioutil.WriteFile(filename, p.Body, 0600)\n}", "title": "" }, { "docid": "ec448bc3d48ea51ed1f370637771a209", "score": "0.65120393", "text": "func Save(path string, v interface{}) error {\n\tlock.Lock()\n\tdefer lock.Unlock()\n\tf, err := os.Create(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\tr, err := Marshal(v)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = io.Copy(f, r)\n\treturn err\n}", "title": "" }, { "docid": "11c86bdc491951df284e11950a7c333d", "score": "0.64903915", "text": "func (b *restBackend) Save(h backend.Handle, p []byte) (err error) {\n\tif err := h.Valid(); err != nil {\n\t\treturn err\n\t}\n\n\t<-b.connChan\n\tresp, err := b.client.Post(restPath(b.url, h), \"binary/octet-stream\", bytes.NewReader(p))\n\tb.connChan <- struct{}{}\n\n\tif resp != nil {\n\t\tdefer func() {\n\t\t\te := resp.Body.Close()\n\n\t\t\tif err == nil {\n\t\t\t\terr = e\n\t\t\t}\n\t\t}()\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif resp.StatusCode != 200 {\n\t\treturn fmt.Errorf(\"unexpected HTTP response code %v\", resp.StatusCode)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "1433469ce516151f3f35b73200471b9b", "score": "0.6470212", "text": "func Save(filename string, content []byte) error {\n\tvar err error\n\tw, err := os.Create(filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer func() {\n\t\t_ = w.Close()\n\t}()\n\t_, err = w.Write(content)\n\n\treturn err\n}", "title": "" }, { "docid": "692d8ad86f1647f0bc7da69deda5afb8", "score": "0.6429115", "text": "func (ds *DiskService) SaveFile(fileSavePath string, originUrl string) {\n\n}", "title": "" }, { "docid": "a4253fe353daca0d8e5599c49edf859d", "score": "0.64010817", "text": "func (fs *fileStore) Save(where string, what []byte) error {\n\tfs.lock(where)\n\tdefer fs.unlock(where)\n\treturn ioutil.WriteFile(fs.pathFor(where), what, 0600)\n}", "title": "" }, { "docid": "54bd33a17c900f40bebc6b9a43c0be45", "score": "0.63894165", "text": "func (fs *FileSaver) Save(content []byte, file string) error {\n\terr := ioutil.WriteFile(file, content, 0644)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "8b1e6ae506c3fdde184072fb9ec67132", "score": "0.63777125", "text": "func (p *Page) save() error {\n filename := p.Title + \".txt\"\n return ioutil.WriteFile(filename, p.Body, 0600)\n}", "title": "" }, { "docid": "94f0deabc881fdfdf248b0aa4d15ec90", "score": "0.6350097", "text": "func (l *Local) Save(path string, contents io.Reader) error {\n\treturn nil\n}", "title": "" }, { "docid": "80bfbbb9da8a77952f6664ee912cf6d0", "score": "0.6267368", "text": "func (r *Repo) Save(path string, data []byte) error {\n\tif err := ioutil.WriteFile(filepath.Join(r.location, path), data, os.ModePerm); err != nil {\n\t\treturn err\n\t}\n\tvalue := r.hashFunc(data)\n\n\tr.lock.Lock()\n\tr.hashValue[path] = value\n\tr.lock.Unlock()\n\n\treturn nil\n}", "title": "" }, { "docid": "65cca399ef1a2744970ef9c89251df21", "score": "0.6251513", "text": "func (p *Page) save() error {\n\t// Generamos un archivo con el nombre del campo inicializado en el struct.\n\t// luego escribimos el archivo en el sistema. Nombre archivo => filename, contenido, Body, permisos para r y w\n\tfilename := p.Title + \".txt\"\n\treturn ioutil.WriteFile(filename, p.Body, 0600)// el valor octal del ultimo parametro indica permisos r y w\n}", "title": "" }, { "docid": "fc996fbeadbb80d59ae513f991222af2", "score": "0.62094843", "text": "func saveHandler(w http.ResponseWriter, r *http.Request, title string) {\n\tbody := r.FormValue(\"body\")\n\tp := &Page{Title: title, Body: []byte(body)} // []byte(body) converts the string to a slice of bytes\n\terr := p.save() // This writes the data in the form to the file\n\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\thttp.Redirect(w, r, \"/view/\"+title, http.StatusFound)\n}", "title": "" }, { "docid": "53591eb34611dec714c55ab457f20ef1", "score": "0.620021", "text": "func (p *Project) Save(file string) error {\n text, _ := json.MarshalIndent(p, \"\", \" \")\n return ioutil.WriteFile(file, text, 0664)\n}", "title": "" }, { "docid": "24663781784dbdecc5cd582c93f4ee32", "score": "0.6200111", "text": "func saveHandler(w http.ResponseWriter, r *http.Request, title string) {\n\tbody := r.FormValue(\"body\")\n\tp := &Page{Title: title, Body: []byte(body)}\n\tid := newID()\n\tstart := time.Now()\n\terr := p.save(newContextWithParentID(r.Context(), id))\n\tsendSpan(\"ioutil.WriteFile\", id, start, r.Context(), map[string]interface{}{\"title\": title, \"bodylen\": len(body), \"error\": err})\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\thttp.Redirect(w, r, \"/view/\"+title, http.StatusFound)\n}", "title": "" }, { "docid": "28e8e0e7711c12a137cd1acd9b7909bf", "score": "0.6192164", "text": "func (fs *FileStore) Save(contents, docUUID, ownerID string) error {\n\tdirpath := fmt.Sprintf(\"%s/%s\", fs.rootDir, ownerID)\n\thaveDir, _ := dirExists(dirpath)\n\tif !haveDir {\n\t\terr2 := os.MkdirAll(dirpath, 0700)\n\t\tif err2 != nil {\n\t\t\treturn errors.New(fmt.Sprintf(\"failed to mkdir: %v\", err2))\n\t\t}\n\t}\n\n\tf, err := os.Create(fmt.Sprintf(\"%s/%s\", dirpath, docUUID))\n\tdefer f.Close()\n\tif err != nil {\n\t\treturn errors.New(fmt.Sprintf(\"couldn't create file: %v\", err))\n\t}\n\n\t_, err = f.WriteString(contents)\n\tif err != nil {\n\t\treturn errors.New(fmt.Sprintf(\"couldn't write the file: %v\", err))\n\t}\n\terr = f.Sync()\n\n\tif err != nil {\n\t\treturn errors.New(fmt.Sprintf(\"Unable to sync file: %v\", err))\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "23ab837222f1d40de9a5888cfbb72470", "score": "0.61872", "text": "func Save(path string, scripts []Script) {\n\tb, err := json.Marshal(scripts)\n\tif err != nil {\n\t\tl.Println(err.Error())\n\t\treturn\n\t}\n\tf, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0777)\n\tif err != nil {\n\t\tl.Println(err.Error())\n\t\treturn\n\t}\n\tdefer f.Close()\n\tf.Write(b)\n}", "title": "" }, { "docid": "2c042f82ede459f8c6537b0975b10223", "score": "0.6148725", "text": "func (s *Storage) SaveMediaPath(path string) error {\n\tif err := s.db.Write(MediaPath, MediaResource, path); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c569239e3a09cffc55777feae8202dce", "score": "0.6115463", "text": "func (s *GoogleCloudStorage) Save(path string, r io.Reader) error {\n\tw := s.client.Bucket(s.bucket).Object(path).NewWriter(context.Background())\n\tw.ACL = []storage.ACLRule{{\n\t\tEntity: storage.AllUsers,\n\t\tRole: storage.RoleReader,\n\t}}\n\tw.CacheControl = \"public, max-age=86400\"\n\n\tif _, err := io.Copy(w, r); err != nil {\n\t\treturn fmt.Errorf(\"failed to copy file to GCS bucket: %v\", err)\n\t}\n\tif err := w.Close(); err != nil {\n\t\treturn fmt.Errorf(\"failed to close GCS writer: %v\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "5593318fda4d6e5bba276fd1a4d4f8f7", "score": "0.6109174", "text": "func(p *Page) save() error{\n\tfilename:=p.Title+\".txt\"\n\treturn ioutil.WriteFile(filename,p.Body,0600)\t \n}", "title": "" }, { "docid": "b329c23c99b83e904127845076d5ebf4", "score": "0.60798883", "text": "func (p *Page) save() error {\n\tfilename := p.Title + \".txt\"\n\treturn ioutil.WriteFile(filename, p.Body, 0600)\n}", "title": "" }, { "docid": "b4fd42b6f33b5c336ba539167b48ebf3", "score": "0.6079841", "text": "func (entry *FileOutputEntry) Save(content []byte) error {\n tempPath := entry.path + \".tmp\"\n\n err := ioutil.WriteFile(tempPath, content, 0644)\n\n if err != nil {\n return err\n }\n\n // The umask is applied to the FileMode passed to WriteFile. Ensure the\n // permissions get set to allow others read access (since it is likely the\n // outputs will be published on a web server).\n err = os.Chmod(tempPath, 0644)\n\n if err != nil {\n os.Remove(tempPath)\n return err\n }\n\n err = os.Rename(tempPath, entry.path)\n\n if err != nil {\n os.Remove(tempPath)\n return err\n }\n\n return nil\n}", "title": "" }, { "docid": "ec6115c3a70a56a7ab24dfab02ccc095", "score": "0.607617", "text": "func (p *Page) save() error {\n\tfilename := p.Title + \".txt\"\n\n\treturn ioutil.WriteFile(filename, p.Body, 0600) // WriteFile returns an error or nil (if no error)\n}", "title": "" }, { "docid": "3e601f08df2fe4716c7f111600b23830", "score": "0.60753375", "text": "func (b *Local) Save(ctx context.Context, h restic.Handle, rd restic.RewindReader) error {\n\tdebug.Log(\"Save %v\", h)\n\tif err := h.Valid(); err != nil {\n\t\treturn err\n\t}\n\n\tfilename := b.Filename(h)\n\n\t// create new file\n\tf, err := fs.OpenFile(filename, os.O_CREATE|os.O_EXCL|os.O_WRONLY, backend.Modes.File)\n\n\tif b.IsNotExist(err) {\n\t\tdebug.Log(\"error %v: creating dir\", err)\n\n\t\t// error is caused by a missing directory, try to create it\n\t\tmkdirErr := os.MkdirAll(filepath.Dir(filename), backend.Modes.Dir)\n\t\tif mkdirErr != nil {\n\t\t\tdebug.Log(\"error creating dir %v: %v\", filepath.Dir(filename), mkdirErr)\n\t\t} else {\n\t\t\t// try again\n\t\t\tf, err = fs.OpenFile(filename, os.O_CREATE|os.O_EXCL|os.O_WRONLY, backend.Modes.File)\n\t\t}\n\t}\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"OpenFile\")\n\t}\n\n\t// save data, then sync\n\t_, err = io.Copy(f, rd)\n\tif err != nil {\n\t\t_ = f.Close()\n\t\treturn errors.Wrap(err, \"Write\")\n\t}\n\n\tif err = f.Sync(); err != nil {\n\t\t_ = f.Close()\n\t\treturn errors.Wrap(err, \"Sync\")\n\t}\n\n\terr = f.Close()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Close\")\n\t}\n\n\treturn setNewFileMode(filename, backend.Modes.File)\n}", "title": "" }, { "docid": "f122c17f1df989c559596358675143ac", "score": "0.6074497", "text": "func (p *Page) save() error {\n\tfilename := p.Title + \".txt\"\n\terr := ioutil.WriteFile(filename, p.Body, 0666)\n\tif err != nil {\n\t\treturn errors.New(\"error saving\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "efa5d83c73995c664d18104e6de20a45", "score": "0.60469747", "text": "func (j *Journal) Save(filename string) {\n\t_ = ioutil.WriteFile(filename, []byte(j.String()), 0644)\n}", "title": "" }, { "docid": "f5ff9433425934e0c4203008793a3144", "score": "0.60400635", "text": "func (be *Backend) Save(ctx context.Context, h restic.Handle, rd restic.RewindReader) error {\n\tif err := h.Valid(); err != nil {\n\t\treturn err\n\t}\n\n\tobjName := be.Filename(h)\n\n\tdebug.Log(\"Save %v at %v\", h, objName)\n\n\tbe.sem.GetToken()\n\n\tdebug.Log(\"InsertObject(%v, %v)\", be.container.Name, objName)\n\n\tvar err error\n\tif rd.Length() < 256*1024*1024 {\n\t\t// wrap the reader so that net/http client cannot close the reader\n\t\tdataReader := ioutil.NopCloser(rd)\n\n\t\t// if it's smaller than 256miB, then just create the file directly from the reader\n\t\terr = be.container.GetBlobReference(objName).CreateBlockBlobFromReader(dataReader, nil)\n\t} else {\n\t\t// otherwise use the more complicated method\n\t\terr = be.saveLarge(ctx, objName, rd)\n\n\t}\n\n\tbe.sem.ReleaseToken()\n\tdebug.Log(\"%v, err %#v\", objName, err)\n\n\treturn errors.Wrap(err, \"CreateBlockBlobFromReader\")\n}", "title": "" }, { "docid": "da72b73122e84795375e0746eef233e9", "score": "0.6028657", "text": "func (s *FileStorage) save() error {\n\tb, err := yaml.Marshal(s.state)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"encode data: %w\", err)\n\t}\n\terr = os.WriteFile(s.file, b, 0o600)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"write data to file: %w\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8c1656ea88196f5c8240f6bf0999d825", "score": "0.6011855", "text": "func (c Config) Save(path string) error {\n\tj, err := json.Marshal(c)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = ioutil.WriteFile(path, j, 0644)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Printf(\"Config file saved to %v\", path)\n\treturn nil\n}", "title": "" }, { "docid": "9aec525b2ba2d0fd73ff32531ab5abb9", "score": "0.6006479", "text": "func (p *Page) Save() error {\n\treturn utils.WriteFile(utils.GetTmpPath()+p.Title+\".txt\", p.Body)\n}", "title": "" }, { "docid": "334d13429ea1d86ac42b8a9ed84b67f5", "score": "0.59855616", "text": "func (c Canvas) Save(path string) {\n\tf, _ := os.Create(path)\n\tw := bufio.NewWriter(f)\n\tbuf := c.ToPPM()\n\tbuf.WriteTo(w)\n\tw.Flush()\n}", "title": "" }, { "docid": "0b109b97f68379762915926d792a7e4e", "score": "0.5980553", "text": "func (s *StoreImpl) Save(dir string) error {\n\tassetMap := make(map[string]Asset)\n\tfor k, v := range s.assets {\n\t\tassetMap[k.String()] = v\n\t}\n\tdata, err := json.MarshalIndent(&assetMap, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tpath := filepath.Join(dir, stateFileName)\n\tif err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {\n\t\treturn err\n\t}\n\tif err := ioutil.WriteFile(path, data, 0644); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "378b19cbcb9638a5ff9d90dbf89de2b8", "score": "0.5967312", "text": "func (ns Names) Save(path string) error {\n\treturn SaveList(ns.ToList(), path)\n}", "title": "" }, { "docid": "76b7e771970b015105504c01bd27648d", "score": "0.596484", "text": "func (store *Store) Save() error {\n\tstore.imutex.RLock()\n\tdefer store.imutex.RUnlock()\n\tif store.dataPath == \"\" {\n\t\treturn nil\n\t}\n\tb, err := json.Marshal(store)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn ioutil.WriteFile(store.dataPath, b, 0600)\n}", "title": "" }, { "docid": "2ba89d5c54e27c635cd2a3284a084444", "score": "0.5957353", "text": "func save(f string, t *Text) error {\n\tfile, err := os.Create(f)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tfor _, line := range t.lines {\n\t\tfile.WriteString(line.data)\n\t\tfile.WriteString(t.lineEnding)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5a35241c262e83561ec6a1a70531cdc7", "score": "0.59530294", "text": "func (be *Backend) Save(ctx context.Context, h restic.Handle, rd io.Reader) error {\n\tdebug.Log(\"Save %v\", h)\n\n\tif err := h.Valid(); err != nil {\n\t\treturn err\n\t}\n\n\tobjName := be.Filename(h)\n\t// debug.Log(\"Save key:%v\", objName)\n\t_, err := be.bucket.GetObjectMeta(objName)\n\n\tif err == nil {\n\t\tdebug.Log(\"%v already exists\", h)\n\t\treturn errors.New(\"key already exists\")\n\t}\n\n\treturn errors.Wrap(be.bucket.PutObject(objName, rd), \"bucket.PutObject\")\n}", "title": "" }, { "docid": "a22d5e4fb12633c72d2302ddbed0604c", "score": "0.5950725", "text": "func (b *Buffer) Save() error {\n\treturn b.SaveAs(b.path)\n}", "title": "" }, { "docid": "9a32d8d82333f7fcf9d9a08b0d2bed65", "score": "0.5949285", "text": "func Save(rootPath string, tweet twitter.Tweet) error {\n\t// Normalize rootPath by removing trailing path separators\n\tif strings.HasSuffix(rootPath, string(os.PathSeparator)) {\n\t\trootPath = strings.TrimRight(rootPath, string(os.PathSeparator))\n\t}\n\terr := downloadMedia(rootPath, tweet)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to download media of tweet with id %d: %v\", tweet.Id, err)\n\t}\n\tpath, err := generateFilePath(rootPath, \"\", \"json\", tweet)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to generate file path: %v\", err)\n\t}\n\tf, err := os.Create(path)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to create file with path %s: %v\", path, err)\n\t}\n\tdefer f.Close()\n\tencoder := json.NewEncoder(f)\n\tif err := encoder.Encode(tweet); err != nil {\n\t\treturn fmt.Errorf(\"unable to encode tweet with id %d into file %s: %v\", tweet.Id, path, err)\n\t}\n\tfmt.Printf(\"saved backup to %s\\n\", path)\n\treturn nil\n}", "title": "" }, { "docid": "8064e5ad783061206c3f81b24c352b59", "score": "0.5923845", "text": "func (e *EditArea) Save() {\n\tf, err := os.Create(e.Filename)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tdefer f.Close()\n\t_, err = f.WriteString(e.text.String())\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tf.Sync()\n\te.beenSaved = true\n}", "title": "" }, { "docid": "6f729d2f1997e037516ff420070f6935", "score": "0.5922949", "text": "func SaveDHT(path string, dht *structures.DHT) error {\n\tfile, err := os.Create(path)\n\tif err == nil {\n\t\tencoder := gob.NewEncoder(file)\n\t\tencoder.Encode(dht)\n\t}\n\tfile.Close()\n\treturn err\n}", "title": "" }, { "docid": "e4699d1cead91cdbcb6c9c7117d36a0b", "score": "0.5900093", "text": "func (r RedisCachedLocalImageStore) Save(img Imgmeta, content []byte) error {\n\tif content != nil {\n\t\treturn ioutil.WriteFile(path.Join(r.basepath, img.Name()), content, 0644)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "ef35938bb959ad1d0b8934656dcc1262", "score": "0.5885249", "text": "func (theater Theater) save() error {\n\tjson, err := json.Marshal(theater)\n\tif err == nil {\n\t\tif _, err := os.Stat(theater.dataDirectory()); os.IsNotExist(err) {\n\t\t\t_ = os.MkdirAll(theater.dataDirectory(), os.ModePerm)\n\t\t}\n\n\t\terr = ioutil.WriteFile(theater.filePath(), []byte(json), 0644)\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "c8d433e304664c267a2d555c680a378a", "score": "0.5882614", "text": "func (c *cache) save() error {\n\tif !c.changed {\n\t\treturn nil\n\t}\n\terr := os.MkdirAll(filepath.Dir(c.file), os.ModePerm)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := c.serialize(c.file); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "b519134d2b6adffcebb6c2e94eed55ed", "score": "0.58816046", "text": "func SaveToFile(b []byte) error {\n\terr := ioutil.WriteFile(filePath, b, 0644)\n\treturn err\n}", "title": "" }, { "docid": "c797ab0c448870dc6855869cc7dad327", "score": "0.587978", "text": "func Save(r io.Reader, path string) error {\n\t// Write out to the desired file path\n\tw, err := os.Create(path)\n\tif err != nil {\n\t\tfmt.Printf(\"Error - %s\", err)\n\t}\n\tdefer w.Close()\n\t_, err = io.Copy(w, r)\n\treturn err\n\n}", "title": "" }, { "docid": "0925a55c6b7f0c587b53d6a620f8e702", "score": "0.587932", "text": "func (kvs *KeyValueStore) Save() error {\n\tkvs.lock.Lock()\n\tdefer kvs.lock.Unlock()\n\n\tbytes, err := json.Marshal(kvs.data)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = os.WriteFile(kvs.filename, bytes, 0644)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tkvs.unsaved = false\n\n\treturn nil\n}", "title": "" }, { "docid": "3ac53eba91881ffb8f45d693bb859711", "score": "0.5875428", "text": "func (p *Page) Save() error {\n\tfilename := p.Title + \".txt\"\n\treturn ioutil.WriteFile(filename, p.Body, 0600)\n}", "title": "" }, { "docid": "3ac53eba91881ffb8f45d693bb859711", "score": "0.5875428", "text": "func (p *Page) Save() error {\n\tfilename := p.Title + \".txt\"\n\treturn ioutil.WriteFile(filename, p.Body, 0600)\n}", "title": "" }, { "docid": "b6ef0bb3d03737a7617623a0620d75d4", "score": "0.587425", "text": "func (s *State) Save(outputPath string) error {\n\textractDir := func(path string) string {\n\t\te := strings.Split(path, \"/\")\n\t\treturn strings.Join(e[:len(e)-1], \"/\")\n\t}\n\n\tdir := extractDir(outputPath)\n\n\tif err := os.MkdirAll(\".\"+string(filepath.Separator)+dir, 0777); err != nil {\n\t\treturn err\n\t}\n\n\tfile, err := os.Create(outputPath)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\tw := bufio.NewWriter(file)\n\n\tdefer func() {\n\t\tw.Flush()\n\t\tfile.Close()\n\t}()\n\n\tvs := bytes.NewBuffer(make([]byte, 0))\n\n\tfor i := 0; i < s.Size(); i++ {\n\t\tword, _ := s.Word(i)\n\t\tvs.WriteString(fmt.Sprintf(\"%v \", word))\n\t\tvs.WriteString(fmt.Sprintf(\"%v\\n\", formatTensor(s.emb.m[i])))\n\t}\n\n\tw.WriteString(fmt.Sprintf(\"%v\", vs.String()))\n\n\treturn nil\n}", "title": "" }, { "docid": "3ec8016625ea1cde3a74c2f7fbbdf119", "score": "0.5872383", "text": "func saveHandler(w http.ResponseWriter, r *http.Request) {\n // title := r.URL.Path[len(\"/save/\"):]\n title, err := getTitle(w, r)\n if err != nil {\n return\n }\n\n\n body_str := r.FormValue(\"body\")\n p := &Page{Title: title, Body:[]byte(body_str)}\n p.save()\n http.Redirect(w, r, \"/view/\"+title, http.StatusFound)\n\n}", "title": "" }, { "docid": "d5e9827efbb6d0de711955149f962311", "score": "0.5869642", "text": "func Save(folder string, filename string, src interface{}) error {\n\tfilepath := path.Join(folder, filename)\n\tos.MkdirAll(folder, 0755)\n\n\tfile, err := os.OpenFile(filepath, os.O_WRONLY|os.O_CREATE, 0644)\n\tif err != nil {\n\t\t// safely ignore if we miss a cache hit\n\t\treturn nil\n\t}\n\tdefer file.Close()\n\n\tif err := json.NewEncoder(file).Encode(src); err != nil {\n\t\treturn fmt.Errorf(\"can't encode '%s' cache: %v\", filepath, err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "cc8d2bf67cc42259cddca9ebe76e9498", "score": "0.58678085", "text": "func SaveToFile(path string, data []byte) (filename string) {\n\tfilename = uniuri.NewLen(32)\n\t_ = os.WriteFile(filepath.Join(path, filename), data, 0770)\n\treturn\n}", "title": "" }, { "docid": "f9f58f7710fbbfa1b0d6d30d8aae6efc", "score": "0.5846711", "text": "func (f *Frontend) Save() error {\n\tf.mu.Lock()\n\tdefer f.mu.Unlock()\n\n\tss, err := getSaveState()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := f.storage.Save(f.GetHashPath(), ss); err != nil {\n\t\treturn err\n\t}\n\tss = nil\n\n\tif sram := getSaveRAM(); sram != nil {\n\t\tif err := f.storage.Save(f.GetSRAMPath(), sram); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tsram = nil\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4b5565c40122454bdc744c4e0cf1fcd5", "score": "0.5825626", "text": "func (l Location) Save() error {\n return ioutil.WriteFile(locationF(), l.toJson(), 0644)\n}", "title": "" }, { "docid": "ba34ccecfb276cb6677dc599475d3d4b", "score": "0.5824472", "text": "func save(note Note) error {\n\tif note.Label != \"\" {\n\t\tif err := os.MkdirAll(\"notes/\"+note.Label, os.ModePerm); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tfilename := filepath.Join(\"notes\", note.Label, note.Title)\n\tif _, err := os.Stat(filename); err == nil {\n\t\treturn errors.New(\"file already exists\")\n\t} else if !os.IsNotExist(err) {\n\t\treturn err\n\t}\n\t// Check if a template exists\n\tcfg := config.New()\n\ttemplatePath, ok := cfg.GetTemplatePath(\"notes\", note.Label)\n\tif !ok {\n\t\tf, err := os.OpenFile(filename, os.O_RDONLY|os.O_CREATE, 0644)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer f.Close()\n\t} else {\n\t\tdata, err := ioutil.ReadFile(templatePath)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\t// Write data to dst\n\t\terr = ioutil.WriteFile(filename, data, 0644)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e01aeb4935a04bf5557c1edb20d1f533", "score": "0.5818703", "text": "func (self *KadDb) save(path string, cb func(*NodeRecord, Node)) error {\n\tdefer self.lock.Unlock()\n\tself.lock.Lock()\n\n\tvar n int\n\n\tfor _, b := range self.Nodes {\n\t\tfor _, node := range b {\n\t\t\tn++\n\t\t\tnode.After = time.Now()\n\t\t\tnode.Seen = time.Now()\n\t\t\tif cb != nil {\n\t\t\t\tcb(node, node.node)\n\t\t\t}\n\t\t}\n\t}\n\n\tdata, err := json.MarshalIndent(self, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = ioutil.WriteFile(path, data, os.ModePerm)\n\tif err != nil {\n\t\tlog.Warn(fmt.Sprintf(\"unable to save kaddb with %v nodes to %v: %v\", n, path, err))\n\t} else {\n\t\tlog.Info(fmt.Sprintf(\"saved kaddb with %v nodes to %v\", n, path))\n\t}\n\treturn err\n}", "title": "" }, { "docid": "df3d44a4c42541139b6accb72360b65c", "score": "0.58147424", "text": "func (a *ZoomEyeAgent) Save(name string, result *zoomeye.SearchResult) (string, error) {\n\tpath := filepath.Join(a.conf.DataPath, name+\".json\")\n\tif err := writeObject(path, result); err != nil {\n\t\treturn \"\", err\n\t}\n\tpath, _ = filepath.Abs(path)\n\treturn path, nil\n}", "title": "" }, { "docid": "9c82ec576d0c0307535abbcf8aab4bfc", "score": "0.58028984", "text": "func (s *store) save() error {\n\t// No need to lock here, has write-lock from s.StoreTweets\n\n\tf, err := os.Create(s.location)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Unable to open store for writing\")\n\t}\n\tdefer f.Close()\n\n\tzf, _ := gzip.NewWriterLevel(f, gzip.BestCompression) // #nosec G104: Ignore error as using a compression constant\n\tdefer func() {\n\t\tzf.Flush()\n\t\tzf.Close()\n\t}()\n\n\treturn errors.Wrap(gob.NewEncoder(zf).Encode(s.s), \"Unable to encode store\")\n}", "title": "" }, { "docid": "f47b7b601c7232c0647418a19e6ed5d4", "score": "0.57942706", "text": "func (cache *Cache) Save() error {\n\tif cache.path == \"\" {\n\t\treturn nil\n\t}\n\n\tdata, err := json.Marshal(cache)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = ioutil.WriteFile(cache.path, data, 0644)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "d37a2aaf864a17270717d3f88a50971c", "score": "0.57891893", "text": "func (server *ShipyardService) save() error {\n\tdata, err := json.Marshal(server)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = ioutil.WriteFile(server.datafile, data, 0600)\n\treturn err\n}", "title": "" }, { "docid": "dcbab87bc805f106360d56ccb68f86e7", "score": "0.5786724", "text": "func (ss *SkySync) save() error {\n\treturn persist.SaveJSON(persistMetadata, ss.persistData(), persistFileName)\n}", "title": "" }, { "docid": "c33c27a0e0c4858d7800be15286ab952", "score": "0.5782899", "text": "func (bc *bzCommand) save(path string) (string, error) {\n\tfileType, err := getFileType(path)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\targs := []string{fileType, fsUtilCmdSave}\n\n\t// directory save requires root path\n\tif fileType == fsUtilCmdDirectory {\n\t\targs = append(args, fsUtilCmdRoot, path, fsUtilCmdGlobWildCard)\n\t} else {\n\t\targs = append(args, path)\n\t}\n\tlog.Info(args)\n\n\tstdout, _, st, err := bc.runCmd(args)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"Error running command: %s\", err)\n\t}\n\tif st.State != execer.COMPLETE || st.ExitCode != 0 || st.Error != \"\" {\n\t\treturn \"\", fmt.Errorf(\"Error execing save. ProcessStatus: %v\", st)\n\t}\n\n\terr = validateFsUtilSaveOutput(stdout)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\ts, err := splitFsUtilSaveOutput(stdout)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tsize, err := strconv.ParseInt(s[1], 10, 64)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tid := bazel.SnapshotID(s[0], size)\n\treturn id, nil\n}", "title": "" }, { "docid": "e5bee8508c0b458b0535b39c47ae05e2", "score": "0.57725966", "text": "func handlerSave(w http.ResponseWriter, r *http.Request) {\n\tif !handleCORS(w, r) {\n\t\treturn\n\t}\n\n\t// Check method and read POST body.\n\t// Limit is set to maxSize+1 to allow distinguishing between exactly maxSize\n\t// and larger than maxSize requests.\n\trequestBody := getPostBody(w, r, *maxSize+1)\n\tif requestBody == nil {\n\t\treturn\n\t}\n\tif len(requestBody) > *maxSize {\n\t\tstorageError(w, http.StatusBadRequest, \"Program too large.\")\n\t\treturn\n\t}\n\n\t// TODO(ivanpi): Check if bundle is parseable. Format/lint?\n\n\tbLink, bData, err := storage.StoreBundleLinkAndData(string(requestBody))\n\tif err != nil {\n\t\tstorageInternalError(w, \"Error storing bundle: \", err)\n\t\treturn\n\t}\n\n\tstorageRespond(w, http.StatusOK, fullResponseFromLinkAndData(bLink, bData))\n}", "title": "" }, { "docid": "e817ecd4309e095e74019faf48550b24", "score": "0.57723457", "text": "func SaveFile(p string, v interface{}) error {\n\tdata, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = os.MkdirAll(filepath.Dir(p), 755)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn ioutil.WriteFile(p, data, 0600)\n}", "title": "" }, { "docid": "87e369a994c1ff476f87a0d495fa55b6", "score": "0.5768551", "text": "func SaveDocument(id int, col string, ext string, content string) {\n\n\tif col == \"jwt\" {\n\t\treturn\n\t}\n\n\tpath := CSDir + \"/\" + col + \"/\" + strconv.Itoa(id)[0:2] + \"/\" + strconv.Itoa(id)[2:4]\n\n\tif _, err := os.Stat(path); os.IsNotExist(err) {\n\t\tos.MkdirAll(path, 0777)\n\t}\n\n\tf, err := os.Create(getPathToID(id, col) + ext)\n\tcheck(err)\n\t_, err = f.WriteString(content)\n\tcheck(err)\n\n\treturn\n}", "title": "" }, { "docid": "2417d676e35a0e35d08677ee525140a8", "score": "0.5767505", "text": "func (we *workflowEntries) save() error {\n\n\tdbFile, err := os.OpenFile(we.DBFile, os.O_RDWR|os.O_CREATE, 0644)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := dbFile.Truncate(0); err != nil {\n\t\tlog.Error(err)\n\t}\n\n\t_, err = dbFile.Seek(0, 0)\n\tif err != nil {\n\t\tlog.Error(err)\n\t}\n\n\tdefer func() {\n\t\tif err := dbFile.Close(); err != nil {\n\t\t\tlog.Errorf(\"Error closing filename %v\", err)\n\t\t}\n\t}()\n\n\tdata, err := json.Marshal(we.Entries)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_, err = dbFile.Write(data)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := dbFile.Sync(); err != nil {\n\t\tlog.Errorf(\"Error syncing db file %v\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f1b1c29681812235f0392e890b3475d1", "score": "0.57463205", "text": "func StoreContent(w http.ResponseWriter, r *http.Request, s Server) {\n\n\tvars := mux.Vars(r)\n\n\tsize, f, err := writeRequestFileToTemp(r.Body)\n\tif err != nil {\n\t\tproblem.Error(w, r, problem.Problem{Detail: err.Error()}, http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tdefer cleanupTempFile(f)\n\n\tt := pack.NewTask(vars[\"name\"], f, size)\n\tresult := s.Source().Post(t)\n\n\tif result.Error != nil {\n\t\tproblem.Error(w, r, problem.Problem{Detail: result.Error.Error()}, http.StatusBadRequest)\n\t\treturn\n\t}\n\n\t// must come *after* w.Header().Add()/Set(), but before w.Write()\n\tw.WriteHeader(http.StatusCreated)\n\n\tjson.NewEncoder(w).Encode(result.ID)\n}", "title": "" }, { "docid": "743dd90dd658e019ef6a2cfc7eaaf679", "score": "0.57364005", "text": "func (u *Upload) Save() {\n\tr := u.Ctx.Request()\n\terr := r.ParseMultipartForm(15485760)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\tf, h, err := r.FormFile(\"upload\")\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\tdefer f.Close()\n\to, err := os.OpenFile(h.Filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\tdefer o.Close()\n\tio.Copy(o, f)\n\tfmt.Println(\"OK\")\n}", "title": "" }, { "docid": "3e2d38f0fbf64f0cbc25c12c4ae70400", "score": "0.5712616", "text": "func (s *fsStore) Save(listRef reference.Reference, manifest reference.Reference, image types.ImageManifest) error {\n\tif err := s.createManifestListDirectory(listRef.String()); err != nil {\n\t\treturn err\n\t}\n\tfilename := manifestToFilename(s.root, listRef.String(), manifest.String())\n\tbytes, err := json.Marshal(image)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn os.WriteFile(filename, bytes, 0o644)\n}", "title": "" }, { "docid": "3b11e53e683c788771c37c6d2be00202", "score": "0.5704207", "text": "func (l *List) Save(filename string) error {\n\tcontent, err := json.Marshal(l)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn ioutil.WriteFile(filename, content, 0644)\n}", "title": "" }, { "docid": "cd720117448ee310cdc0aa0b4cc74e2c", "score": "0.57035464", "text": "func (p Projects) Save(path string) {\n\tutil.SaveToYaml(p, path, \"projects\")\n}", "title": "" }, { "docid": "00dea6c8d10db783f4846bc8fd748d98", "score": "0.56889147", "text": "func (l *List) Save(filename string) error {\n\tjs, err := json.Marshal(l)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn ioutil.WriteFile(filename, js, 0644)\n}", "title": "" }, { "docid": "45de61ed66b8c399bd758eff9bbff4d2", "score": "0.56861717", "text": "func (e *Ebook) Save(outputPath string) {\n\tif outputPath == \"\" {\n\t\toutputPath = \"ebook.epub\"\n\t}\n\te.downloadImages()\n\te.writeChapters()\n\te.writeContentOPF()\n\te.writeTOC()\n\te.downloadCoverImage()\n\te.writeCSS()\n\te.downloadStylesheet()\n\te.generateEpub(outputPath)\n}", "title": "" }, { "docid": "55c5f794f086d5ef7836d68c6ba29813", "score": "0.56821465", "text": "func (b *Buffer) SaveAs(filename string) error {\n\tb.UpdateRules()\n\terr := ioutil.WriteFile(filename, []byte(b.text), 0644)\n\tif err == nil {\n\t\tb.savedText = b.text\n\t\tb.netInsertions = 0\n\t}\n\treturn err\n}", "title": "" }, { "docid": "b6bdb8d5e8e643bd7df6378dc2c2da97", "score": "0.567789", "text": "func (c *TitlesCache) Save() error {\n\tif err := os.MkdirAll(filepath.Dir(c.Path), 0777); err != nil {\n\t\treturn fmt.Errorf(\"save titles cache: %s\", err)\n\t}\n\tf, err := os.Create(c.Path)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"save titles cache: %s\", err)\n\t}\n\tdefer f.Close()\n\tif err := gob.NewEncoder(f).Encode(c.Titles); err != nil {\n\t\treturn fmt.Errorf(\"save titles cache %s: %s\", c.Path, err)\n\t}\n\tif err := f.Close(); err != nil {\n\t\treturn fmt.Errorf(\"save titles cache: %s\", err)\n\t}\n\tc.Updated = false\n\treturn nil\n}", "title": "" }, { "docid": "31eddac74ef6f8486f2a068a79312d68", "score": "0.56760204", "text": "func writeToFile(path string, content []byte) error {\n\t// Create file\n\tf, err := os.Create(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\n\t// Write to file\n\t_, err = f.Write(content)\n\treturn err\n}", "title": "" }, { "docid": "84388b1e28a22f45ffeb31482fb30952", "score": "0.56731457", "text": "func save(state *smtp.State) {\n\n\tfilename := \"mailstore/\" + fileNameForState(state)\n\n\terr := helpers.EncodeFile(filename, state)\n\tif err != nil {\n\t\tlog.Fatal(\"Couldn't save mail to disk: \", err.Error())\n\t}\n\n\tlog.WithFields(log.Fields{\n\t\t\"Ip\": state.Ip.String(),\n\t\t\"SessionId\": state.SessionId.String(),\n\t}).Debug(\"Serialized mail to disk: \", filename)\n\n}", "title": "" }, { "docid": "af6eb1a287c05daf13809deed8737e9b", "score": "0.56695664", "text": "func save() {\n\tclient := utils.Client(tlsKey, tlsCert, caCert, tlsServerName, disableTLS)\n\tvar statusFilter = helm.ReleaseListStatuses([]release.Status_Code{\n\t\trelease.Status_DEPLOYED,\n\t})\n\treleaseResp, err := client.ListReleases(statusFilter)\n\tutils.PanicCheck(err)\n\tvar buffer bytes.Buffer\n\treleases := releaseResp.GetReleases()\n\ttargetReleases := targetReleases(releases)\n\tfor i, release := range targetReleases {\n\t\tif i > 0 {\n\t\t\tbuffer.WriteString(\",\")\n\t\t}\n\t\tsEnc, errb := utils.EncodeRelease(release)\n\t\tutils.PanicCheck(errb)\n\t\tbuffer.WriteString(sEnc)\n\t}\n\tutils.PanicCheck(ioutil.WriteFile(textFilename(), buffer.Bytes(),\n\t\tos.FileMode.Perm(0644)))\n\tutils.PanicCheck(archiver.TarGz.Make(archiveFilename(),\n\t\t[]string{textFilename()}))\n\tlog.Println(\"Wrote \" + strconv.Itoa(len(releases)) + \" Helm Releases to file\")\n\tos.Remove(textFilename())\n}", "title": "" }, { "docid": "2e9977fa487cdf50b43522a80b5fa85b", "score": "0.5662076", "text": "func (msg *PubSubMessage) save() error {\n\tctx := context.Background()\n\tclient, err := storage.NewClient(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tbucketName := os.Getenv(\"BUCKET\")\n\tobjName := time.Now().UTC().Format(time.RFC3339) + \".json\"\n\tobj := client.Bucket(bucketName).Object(objName)\n\twc := obj.NewWriter(ctx)\n\tif _, err := wc.Write(msg.Data); err != nil {\n\t\treturn err\n\t}\n\tif err := wc.Close(); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "c0c69ffce0cd71ba4ad2328b95c0da9a", "score": "0.5656858", "text": "func saveToDisk(f *providers.File, path string, overwrite bool) error {\n\n\tvar extraFlags int = os.O_EXCL\n\n\tif overwrite {\n\t\textraFlags = 0\n\t}\n\n\tfile, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|extraFlags, 0766)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer file.Close()\n\n\tlog.Infof(\"Starting download for %s@%s into %s\", f.Name, f.Version, path)\n\tbar := pb.Full.Start64(f.Length)\n\tbarReader := bar.NewProxyReader(f.Data)\n\t_, err = io.Copy(file, barReader)\n\tbar.Finish()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "3767c7ff584887f3e76b2dbcd1adff5c", "score": "0.5642965", "text": "func (sm SetMetadata) Save(metadataFile string) {\n\n\tmetadataBytes, _ := json.Marshal(sm)\n\tioutil.WriteFile(metadataFile, metadataBytes, 0755)\n}", "title": "" }, { "docid": "77ab5629d77af2def6b3231f3658ac69", "score": "0.5637704", "text": "func (b *bill) save() {\n\tcurrentTime := time.Now()\n\tdata := []byte(b.formatBill())\n\terr := os.WriteFile(\"bills/\"+b.name+\" \"+currentTime.Format(\"01022006150405\")+\".txt\", data, 0644)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(\"A conta foi salva no arquivo!\")\n}", "title": "" }, { "docid": "690a8a941506ea296250a3bd4706a29f", "score": "0.56197107", "text": "func (stor Stor) Save(filename string) error {\n\tvar err error\n\tvar data []byte\n\tswitch strings.ToLower(path.Ext(filename)) {\n\tcase \".json\":\n\t\tdata, err = json.Marshal(stor)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\tcase \".yaml\":\n\t\tdata, err = yaml.Marshal(stor)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\tdefault:\n\t\tdata, err = yaml.Marshal(stor)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn ioutil.WriteFile(filename, data, 0644)\n}", "title": "" }, { "docid": "dda1e1aa067d9eb42004bf00123de9b3", "score": "0.56103265", "text": "func SaveToFile(tplPath string, c Config, destPath string) error {\n\ttemplate, err := ioutil.ReadFile(tplPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tstr, err := GetText(string(template), c)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn ioutil.WriteFile(destPath, []byte(str), 0644)\n}", "title": "" }, { "docid": "151eb214a1b2da5c252da13c736faa3b", "score": "0.5602787", "text": "func SaveHandler(w http.ResponseWriter, r *http.Request) (int, error) {\n\tvalid, _ := ValidURLFromCtx(r.Context())\n\tbody := r.FormValue(\"body\")\n\tp := NewPage(valid, []byte(body))\n\terr := p.save()\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\thttp.Redirect(w, r, \"/view/\"+p.Path, http.StatusFound)\n\treturn http.StatusFound, nil\n}", "title": "" }, { "docid": "6c20f9f050820565a2bae90abdb2e9ea", "score": "0.5595493", "text": "func (c *Cache) Save(filePath string) error {\n\tcurr := c.curr.Load()\n\tconcurrency := cgroup.AvailableCPUs()\n\treturn curr.SaveToFileConcurrent(filePath, concurrency)\n}", "title": "" }, { "docid": "29ca01dffff7998c99d43c7330a51249", "score": "0.55893165", "text": "func SaveGob(path string, d interface{}) error {\n\tf, err := os.Create(path)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create file %v: %v\", path, err)\n\t}\n\tdefer f.Close()\n\tw := bufio.NewWriter(f)\n\tdefer w.Flush()\n\tenc := gob.NewEncoder(w)\n\tif err = enc.Encode(d); err != nil {\n\t\treturn fmt.Errorf(\"failed to encode data: %v\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "407c424bf83777b8475433189fb2c5db", "score": "0.55870354", "text": "func saveToFile() (err error) {\n\tconfStr, err := json.Marshal(runtimeConf)\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// write the whole body at once\n\truntimeConf.UpdatedAt = time.Now()\n\terr = ioutil.WriteFile(confFilename, confStr, 0644)\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "05e0289a405c673a0e14565400dfa90f", "score": "0.55753905", "text": "func Save() error { return d.Save() }", "title": "" }, { "docid": "e739f75e0fadf62a4770cbc4c1d37529", "score": "0.55626595", "text": "func saveHandler(w http.ResponseWriter, r *http.Request) {\n\n\tpostInx, _ := strconv.Atoi(r.URL.Path[len(\"/save/\"):])\n\tif posts[postInx] == \"\" {\n\t\tpostInx = len(posts) + 1\n\t}\n\n\tbody := r.FormValue(\"body\")\n\n\tposts[postInx] = body\n\thttp.Redirect(w, r, \"/\", http.StatusFound)\n}", "title": "" }, { "docid": "33e203a615a3b6d8603f02355677dcb4", "score": "0.55585897", "text": "func (p *Store) Save(fh *multipart.FileHeader) (string, int64, error) {\n\tname := uuid.New().String() + path.Ext(fh.Filename)\n\tsrc, err := fh.Open()\n\tif err != nil {\n\t\treturn \"\", 0, err\n\t}\n\tdefer src.Close()\n\tdst, err := os.Create(path.Join(p.root, name))\n\tif err != nil {\n\t\treturn \"\", 0, err\n\t}\n\tdefer dst.Close()\n\tsize, err := io.Copy(dst, src)\n\treturn p.home + \"/\" + name, size, err\n}", "title": "" } ]
25b6b2beec36dc69ade2687839354186
LargeIcon Icon of virtual machine or template.
[ { "docid": "b7df8703a056b5130151b1832bf87a77", "score": "0.7829689", "text": "func (p *VmBase) LargeIcon() (*Icon, bool) {\n\tif p.largeIcon != nil {\n\t\treturn p.largeIcon, true\n\t}\n\treturn nil, false\n}", "title": "" } ]
[ { "docid": "49738bd5c0c61935c486d5374ec1ff2b", "score": "0.7817417", "text": "func (p *Vm) LargeIcon() (*Icon, bool) {\n\tif p.largeIcon != nil {\n\t\treturn p.largeIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "1f3f222bb3db89b16785fabd990776c2", "score": "0.7655188", "text": "func (p *Template) LargeIcon() (*Icon, bool) {\n\tif p.largeIcon != nil {\n\t\treturn p.largeIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "cac8a812f58c704129ab8a9a6b9b5ac3", "score": "0.75920737", "text": "func (p *InstanceType) LargeIcon() (*Icon, bool) {\n\tif p.largeIcon != nil {\n\t\treturn p.largeIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "bd304f748fb649a6f7fd57bb27fc9bfb", "score": "0.7394899", "text": "func (p *OperatingSystemInfo) LargeIcon() (*Icon, bool) {\n\tif p.largeIcon != nil {\n\t\treturn p.largeIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "f1d72f6046280e65d45aca85431b50ae", "score": "0.697013", "text": "func (p *Snapshot) LargeIcon() (*Icon, bool) {\n\tif p.largeIcon != nil {\n\t\treturn p.largeIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "d0547c5738e22165b28d3d1aa1a07eb3", "score": "0.6942986", "text": "func (p *Vm) SetLargeIcon(attr *Icon) {\n\tp.largeIcon = attr\n}", "title": "" }, { "docid": "50f0cd7843ec8ae62e042fddbd8e94be", "score": "0.69155186", "text": "func (p *InstanceType) SetLargeIcon(attr *Icon) {\n\tp.largeIcon = attr\n}", "title": "" }, { "docid": "4ae5c80a52fa33d69f2fff1f2105eb75", "score": "0.69102716", "text": "func (p *Template) SetLargeIcon(attr *Icon) {\n\tp.largeIcon = attr\n}", "title": "" }, { "docid": "ac8e390680a72b7bd644b01b2cb90ffa", "score": "0.6898179", "text": "func (p *VmBase) SetLargeIcon(attr *Icon) {\n\tp.largeIcon = attr\n}", "title": "" }, { "docid": "768cc69d94dc717a4dfc0fdec628f40c", "score": "0.6652479", "text": "func (p *OperatingSystemInfo) SetLargeIcon(attr *Icon) {\n\tp.largeIcon = attr\n}", "title": "" }, { "docid": "94f6bf1ebfe07d08b2a0341b3bf1ae78", "score": "0.6563418", "text": "func (p *VmBase) MustLargeIcon() *Icon {\n\tif p.largeIcon == nil {\n\t\tpanic(\"the largeIcon must not be nil, please use LargeIcon() function instead\")\n\t}\n\treturn p.largeIcon\n}", "title": "" }, { "docid": "eea67e4881803f1b5979f7c12598f2fb", "score": "0.65597445", "text": "func (p *Vm) MustLargeIcon() *Icon {\n\tif p.largeIcon == nil {\n\t\tpanic(\"the largeIcon must not be nil, please use LargeIcon() function instead\")\n\t}\n\treturn p.largeIcon\n}", "title": "" }, { "docid": "4ba60839c567204fb848d6ed7c2f6514", "score": "0.644729", "text": "func (p *InstanceType) MustLargeIcon() *Icon {\n\tif p.largeIcon == nil {\n\t\tpanic(\"the largeIcon must not be nil, please use LargeIcon() function instead\")\n\t}\n\treturn p.largeIcon\n}", "title": "" }, { "docid": "0155b45bbcf6f4dcbb450a096beca7bd", "score": "0.64445424", "text": "func (p *VmBase) SmallIcon() (*Icon, bool) {\n\tif p.smallIcon != nil {\n\t\treturn p.smallIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "c5022ccb099cfdf72d35573b12e21eb7", "score": "0.64400965", "text": "func (p *OperatingSystemInfo) MustLargeIcon() *Icon {\n\tif p.largeIcon == nil {\n\t\tpanic(\"the largeIcon must not be nil, please use LargeIcon() function instead\")\n\t}\n\treturn p.largeIcon\n}", "title": "" }, { "docid": "247b0ed8aa1f97ddcf6ceccad5871c7b", "score": "0.637695", "text": "func (p *Vm) SmallIcon() (*Icon, bool) {\n\tif p.smallIcon != nil {\n\t\treturn p.smallIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "7db521af813726ddad77246c22e1b4a3", "score": "0.6302209", "text": "func (p *Template) MustLargeIcon() *Icon {\n\tif p.largeIcon == nil {\n\t\tpanic(\"the largeIcon must not be nil, please use LargeIcon() function instead\")\n\t}\n\treturn p.largeIcon\n}", "title": "" }, { "docid": "e7f13018d8c070b15204cb6fa3859284", "score": "0.628921", "text": "func (p *InstanceType) SmallIcon() (*Icon, bool) {\n\tif p.smallIcon != nil {\n\t\treturn p.smallIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "56aa4fc29a49c6126b193d5f2a942c27", "score": "0.62829053", "text": "func (p *Snapshot) SetLargeIcon(attr *Icon) {\n\tp.largeIcon = attr\n}", "title": "" }, { "docid": "3c17938fd52721c58b23155e892debef", "score": "0.61262715", "text": "func (p *Template) SmallIcon() (*Icon, bool) {\n\tif p.smallIcon != nil {\n\t\treturn p.smallIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "8884d3f7cba0cd82c2e975257b9da79a", "score": "0.6067174", "text": "func (p *OperatingSystemInfo) SmallIcon() (*Icon, bool) {\n\tif p.smallIcon != nil {\n\t\treturn p.smallIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "79d4b9353d8b852697e061e9307d313f", "score": "0.58991396", "text": "func (p *Snapshot) MustLargeIcon() *Icon {\n\tif p.largeIcon == nil {\n\t\tpanic(\"the largeIcon must not be nil, please use LargeIcon() function instead\")\n\t}\n\treturn p.largeIcon\n}", "title": "" }, { "docid": "81af14bbe214f6a19aa23e39f3bf6fdd", "score": "0.5732675", "text": "func (v *embed) Icon() string {\n\treturn v.Icon_\n}", "title": "" }, { "docid": "bb9407269e87b30855cdec353b448ad5", "score": "0.5652402", "text": "func (p *Snapshot) SmallIcon() (*Icon, bool) {\n\tif p.smallIcon != nil {\n\t\treturn p.smallIcon, true\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "b313cc64882534eea32e8690a8ac6cd1", "score": "0.55297464", "text": "func (b *Block) Icon() rune {\n\treturn icon\n}", "title": "" }, { "docid": "e056b9a70d5c6eb84a6780d4a4f9e4bc", "score": "0.5507903", "text": "func (p *Product) LargeImage() string {\n\treturn smallImgRe.ReplaceAllLiteralString(p.Image, \"pl.jpg\")\n}", "title": "" }, { "docid": "a09cacbbba804dc2044cac8811c3c420", "score": "0.5448069", "text": "func (f F) Icon() string {\n\treturn f.Profile\n}", "title": "" }, { "docid": "8b65d7acd467286ad54b9f3c0d1728d5", "score": "0.5447483", "text": "func (self *TraitFileInfo) GetIcon() (return__ *C.GIcon) {\n\treturn__ = C.g_file_info_get_icon(self.CPointer)\n\treturn\n}", "title": "" }, { "docid": "605437616f6a2ee974a332c3d0892bc8", "score": "0.54333997", "text": "func Icon(ico string) Option {\n\treturn func(o *Options) {\n\t\tif o.Metadata == nil {\n\t\t\to.Metadata = make(map[string]string)\n\t\t}\n\t\to.Metadata[\"icon\"] = ico\n\t}\n}", "title": "" }, { "docid": "605437616f6a2ee974a332c3d0892bc8", "score": "0.54333997", "text": "func Icon(ico string) Option {\n\treturn func(o *Options) {\n\t\tif o.Metadata == nil {\n\t\t\to.Metadata = make(map[string]string)\n\t\t}\n\t\to.Metadata[\"icon\"] = ico\n\t}\n}", "title": "" }, { "docid": "19d61ab653397ad6eabeb92020d6e53c", "score": "0.5402752", "text": "func (messagetype MessageType) getIcon(server *Server) string {\n\tconfiguredIcon := server.Config.ServerIconUrl\n\tif configuredIcon != \"\" || server.Config.ChannelID == \"\" {\n\t\treturn configuredIcon\n\t}\n\tguild, err := getGuildForChannel(session, server.Config.ChannelID)\n\tif err == nil {\n\t\treturn \"https://cdn.discordapp.com/icons/\" + guild.ID + \"/\" + guild.Icon + \".png\"\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "f00c07c95dfb5c35bdd400dbfb227a78", "score": "0.530846", "text": "func (o *AddOn) Icon() string {\n\tif o != nil && o.icon != nil {\n\t\treturn *o.icon\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "b82b74f3895fbb5b58ce86459efc9d54", "score": "0.53009313", "text": "func (self *TraitFileInfo) GetSymbolicIcon() (return__ *C.GIcon) {\n\treturn__ = C.g_file_info_get_symbolic_icon(self.CPointer)\n\treturn\n}", "title": "" }, { "docid": "eb417a7800dd55fb191a961440ffc3c0", "score": "0.5244706", "text": "func (item *MenuItem) SetTemplateIcon(templateIconBytes []byte, regularIconBytes []byte) {\n\tcstr := (*C.char)(unsafe.Pointer(&templateIconBytes[0]))\n\tC.setMenuItemIcon(cstr, (C.int)(len(templateIconBytes)), C.int(item.id), true)\n}", "title": "" }, { "docid": "b369084728830bd0adf0cc223d502236", "score": "0.52090657", "text": "func IconLTE(v string) predicate.Category {\n\treturn predicate.Category(func(s *sql.Selector) {\n\t\ts.Where(sql.LTE(s.C(FieldIcon), v))\n\t})\n}", "title": "" }, { "docid": "ddc7ce73b987a3746cfd06233e28fbfd", "score": "0.51897824", "text": "func NewIcon(in io.ReadSeeker) (out GuildIcon, err error) {\n\tinner, err := NewImageData(in)\n\n\tif err != nil {\n\t\treturn\n\t}\n\n\tout = &guild.IconImpl{Root: inner}\n\n\tif inner.Height() > IconMaxHeight || inner.Width() > IconMaxWidth {\n\t\treturn out, ErrBadIconSize\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "6f9f1bdc2e177842c0481a69ccc9f52c", "score": "0.5179778", "text": "func (o LookupSimpleMonitorResultOutput) IconId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupSimpleMonitorResult) string { return v.IconId }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "8f729b5cafdc78c72ad5181978cc2687", "score": "0.513953", "text": "func (*IconArg) Descriptor() ([]byte, []int) {\n\treturn file_icon_proto_rawDescGZIP(), []int{0}\n}", "title": "" }, { "docid": "c6b71c7b29cf23e26a046cfdcb4f7870", "score": "0.511085", "text": "func (t *Theme) GetEmoticon() (value string, ok bool) {\n\tif t == nil {\n\t\treturn\n\t}\n\tif !t.Flags.Has(6) {\n\t\treturn value, false\n\t}\n\treturn t.Emoticon, true\n}", "title": "" }, { "docid": "1fdc232b6a9f062c871820337e5e2692", "score": "0.50973576", "text": "func NewIcon(class string, attributes Attributes) (Icon, error) {\n\treturn Icon{\n\t\tClass: class,\n\t\tAttributes: attributes,\n\t}, nil\n}", "title": "" }, { "docid": "2eb02215bd798aac53739c483734ca38", "score": "0.50868404", "text": "func LoadIcon(db gorp.SqlExecutor, appID int64) (string, error) {\n\ticon, err := db.SelectStr(\"SELECT icon FROM application WHERE id = $1\", appID)\n\n\treturn icon, sdk.WithStack(err)\n}", "title": "" }, { "docid": "6f81f3fb309bc332243975b3151ebfa3", "score": "0.50854367", "text": "func SetTemplateIcon(templateIconBytes []byte, regularIconBytes []byte) {\n\tcstr := (*C.char)(unsafe.Pointer(&templateIconBytes[0]))\n\tC.setIcon(cstr, (C.int)(len(templateIconBytes)), true)\n}", "title": "" }, { "docid": "8e0f159ab1ca7d0a5073315530d98813", "score": "0.50830966", "text": "func (c *Client) Icon(pkgID string) (*Icon, error) {\n\tconst errPrefix = \"cannot retrieve icon\"\n\n\tresponse, err := c.raw(\"GET\", fmt.Sprintf(\"/v2/icons/%s/icon\", pkgID), nil, nil, nil)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"%s: failed to communicate with server: %s\", errPrefix, err)\n\t}\n\tdefer response.Body.Close()\n\n\tif response.StatusCode != 200 {\n\t\treturn nil, fmt.Errorf(\"%s: Not Found\", errPrefix)\n\t}\n\n\tre := regexp.MustCompile(`attachment; filename=(.+)`)\n\tmatches := re.FindStringSubmatch(response.Header.Get(\"Content-Disposition\"))\n\n\tif matches == nil || matches[1] == \"\" {\n\t\treturn nil, fmt.Errorf(\"%s: cannot determine filename\", errPrefix)\n\t}\n\n\tcontent, err := ioutil.ReadAll(response.Body)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"%s: %s\", errPrefix, err)\n\t}\n\n\ticon := &Icon{\n\t\tFilename: matches[1],\n\t\tContent: content,\n\t}\n\n\treturn icon, nil\n}", "title": "" }, { "docid": "b75ed893eaf233568743bd239439e989", "score": "0.5046577", "text": "func NewIcon(res fyne.Resource) *Icon {\n\ticon := &Icon{}\n\ticon.ExtendBaseWidget(icon)\n\ticon.SetResource(res) // force the image conversion\n\n\treturn icon\n}", "title": "" }, { "docid": "c9daacaa872afb97f6418b4fd177f8a4", "score": "0.50202775", "text": "func Icon(name string) image.Image {\n\tif icons == nil {\n\t\tloadIcons() // one-time initialization\n\t}\n\treturn icons[name]\n}", "title": "" }, { "docid": "62964337f93029035e5c7fb9fd85fdbd", "score": "0.5018488", "text": "func (n *Node) Large() *Node {\n\treturn n.setAttr(\"size\", \"large\")\n}", "title": "" }, { "docid": "95c5b0458414cf2f894d35220bf38c8e", "score": "0.5018109", "text": "func (self *Themeset) FindIcon(names []string, size int) (*Icon, bool) {\n\treturn self.FindIconViaTheme(self.DefaultTheme, names, size)\n}", "title": "" }, { "docid": "fe3a2716fd2abd0544033ecdc8cde1b8", "score": "0.50128305", "text": "func GetIconPathOnDisk() string {\n\trelIcnsPath := path.Join(os.Args[0], \"..\", \"..\", \"Resources\", \"octonotify.icns\")\n\ticnsPath, err := filepath.Abs(relIcnsPath)\n\tif err != nil {\n\t\tlogrus.WithError(err).Fatal(\"cannot build icon path\")\n\t}\n\treturn icnsPath\n}", "title": "" }, { "docid": "6bde2c4138ffe444510ca07182c7e1c9", "score": "0.50010103", "text": "func NewIcon(data []byte) (*Icon, error) {\n\t_, err := iconvg.DecodeMetadata(data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &Icon{src: data, Color: color.NRGBA{A: 0xff}}, nil\n}", "title": "" }, { "docid": "b18f5de10fb059937d869892a08dd3da", "score": "0.49804682", "text": "func (p *InstanceType) SetSmallIcon(attr *Icon) {\n\tp.smallIcon = attr\n}", "title": "" }, { "docid": "296b1006bf37902f2116f359705fa508", "score": "0.4966762", "text": "func (o MetadataOutput) IconId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *Metadata) pulumi.StringPtrOutput { return v.IconId }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "f9faceae3c7582042870d26c9b23cc46", "score": "0.49202627", "text": "func (p *VmBase) MustSmallIcon() *Icon {\n\tif p.smallIcon == nil {\n\t\tpanic(\"the smallIcon must not be nil, please use SmallIcon() function instead\")\n\t}\n\treturn p.smallIcon\n}", "title": "" }, { "docid": "09a4820a40eab951fd2552d7db98e8ce", "score": "0.4912714", "text": "func (p *VmBase) SetSmallIcon(attr *Icon) {\n\tp.smallIcon = attr\n}", "title": "" }, { "docid": "6f25785269203331a81cd193376f5ff5", "score": "0.4897172", "text": "func (p *InstanceType) MustSmallIcon() *Icon {\n\tif p.smallIcon == nil {\n\t\tpanic(\"the smallIcon must not be nil, please use SmallIcon() function instead\")\n\t}\n\treturn p.smallIcon\n}", "title": "" }, { "docid": "3cf3752fd6fe4232b6fada3f097a92b7", "score": "0.48875627", "text": "func (p *Vm) MustSmallIcon() *Icon {\n\tif p.smallIcon == nil {\n\t\tpanic(\"the smallIcon must not be nil, please use SmallIcon() function instead\")\n\t}\n\treturn p.smallIcon\n}", "title": "" }, { "docid": "35d3cf227fceafe81af45cd534d10bcc", "score": "0.48854855", "text": "func (m *TimeOffReason) GetIconType()(*TimeOffReasonIconType) {\n return m.iconType\n}", "title": "" }, { "docid": "8f2821942d6fc3e631fd8f3b9492d5cf", "score": "0.4880998", "text": "func GetIconPath(buf []byte) string {\n\tif len(buf) == 0 {\n\t\treturn \"\"\n\t}\n\tpath := (\"photos/\" + strconv.Itoa(fileCnt))\n\tfile, err := os.Create(path)\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\tfileCnt++\n\tfile.Write(buf)\n\tfile.Close()\n\treturn path\n}", "title": "" }, { "docid": "3c479dc7f1b8f8227dc44bedfc123f6a", "score": "0.48614994", "text": "func (self *TraitFileInfo) SetIcon(icon *C.GIcon) {\n\tC.g_file_info_set_icon(self.CPointer, icon)\n\treturn\n}", "title": "" }, { "docid": "ca6b2069dd78c0bee456f28932ce72f5", "score": "0.48442632", "text": "func (t *Theme) GetIcon(file os.FileInfo) (icon string) {\n\tvar err error\n\n\tif file.IsDir() {\n\t\ticon, err = t.folders.Load(file.Name())\n\t\tif err != nil {\n\t\t\ticon, _ = t.folders.Load(defaultIcon)\n\t\t}\n\t\treturn icon\n\t}\n\n\ticon, err = t.files.Load(strings.ToLower(file.Name()))\n\tif err == nil {\n\t\treturn icon\n\t}\n\n\tif strings.Contains(file.Name(), \".\") {\n\t\tsl := strings.Split(file.Name(), \".\")\n\t\textension := sl[len(sl)-1]\n\t\ticon, err = t.extensions.Load(strings.ToLower(extension))\n\t\tif err == nil {\n\t\t\treturn icon\n\t\t}\n\t}\n\n\ticon, _ = t.files.Load(defaultIcon)\n\treturn icon\n}", "title": "" }, { "docid": "4e64c4b8641c86a19c93f90c60cda93b", "score": "0.4843366", "text": "func (p *Vm) SetSmallIcon(attr *Icon) {\n\tp.smallIcon = attr\n}", "title": "" }, { "docid": "c590e56311538010f69e591552f215e3", "score": "0.48187932", "text": "func (c *Controller) AppIcon(ctx *handler.Context, request *handler.Request, response *handler.Response) {\n\tfilename := request.StringParam(\"filename\", \"favicon.png\")\n\tencodedBlob, found := static.Binaries[filename]\n\tif !found {\n\t\tlogger.Info(\"[Controller:AppIcon] This icon doesn't exists: %s\", filename)\n\t\tresponse.HTML().NotFound()\n\t\treturn\n\t}\n\n\tblob, err := base64.StdEncoding.DecodeString(encodedBlob)\n\tif err != nil {\n\t\tlogger.Error(\"[Controller:AppIcon] %v\", err)\n\t\tresponse.HTML().NotFound()\n\t\treturn\n\t}\n\n\tresponse.Cache(\"image/png\", static.BinariesChecksums[filename], blob, 48*time.Hour)\n}", "title": "" }, { "docid": "36cd37c5bf91c615fdcde3cc2126482e", "score": "0.4812336", "text": "func (m *TimeOffReason) SetIconType(value *TimeOffReasonIconType)() {\n m.iconType = value\n}", "title": "" }, { "docid": "aebf79a88edc016f7689950824ae61c0", "score": "0.4800061", "text": "func (o UpgradeHistoryEntryOutput) VmImage() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v UpgradeHistoryEntry) *string { return v.VmImage }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "f353f10dc7e282ed36fcd81bd19ed80c", "score": "0.4799408", "text": "func ChangeMountIcon(oldMount string, newMount string) error {\n\tif oldMount != \"\" {\n\t\t// DeleteKey doesn't work if there are subkeys\n\t\tregistry.DeleteKey(registry.CURRENT_USER, `SOFTWARE\\Classes\\Applications\\Explorer.exe\\Drives\\`+oldMount[:1]+`\\DefaultIcon`)\n\t\tregistry.DeleteKey(registry.CURRENT_USER, `SOFTWARE\\Classes\\Applications\\Explorer.exe\\Drives\\`+oldMount[:1]+`\\DefaultLabel`)\n\t\tregistry.DeleteKey(registry.CURRENT_USER, `SOFTWARE\\Classes\\Applications\\Explorer.exe\\Drives\\`+oldMount[:1])\n\t\tnotifyShell(oldMount)\n\t}\n\tif newMount == \"\" {\n\t\treturn nil\n\t}\n\tk, _, err := registry.CreateKey(registry.CURRENT_USER, `SOFTWARE\\Classes\\Applications\\Explorer.exe\\Drives\\`+newMount[:1]+`\\DefaultIcon`, registry.SET_VALUE|registry.CREATE_SUB_KEY|registry.WRITE)\n\tdefer k.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\tkeybaseExe, err := utils.BinPath()\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Use the second icon bound into keybase.exe - hence the 1\n\terr = k.SetStringValue(\"\", keybaseExe+\",1\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Also give a nice label\n\tk2, _, err := registry.CreateKey(registry.CURRENT_USER, `SOFTWARE\\Classes\\Applications\\Explorer.exe\\Drives\\`+newMount[:1]+`\\DefaultLabel`, registry.SET_VALUE|registry.CREATE_SUB_KEY|registry.WRITE)\n\tdefer k2.Close()\n\terr = k2.SetStringValue(\"\", \"Keybase\")\n\tnotifyShell(newMount)\n\treturn err\n}", "title": "" }, { "docid": "7c6ed526c294d8296da7997c509c5d9b", "score": "0.47902232", "text": "func PhidTypeToIcon(phidType constants.PhidType) Icon {\n\tswitch phidType {\n\tcase constants.PhidTypeCommit:\n\t\treturn IconCommits\n\tcase constants.PhidTypeTask:\n\t\treturn IconTasks\n\tcase constants.PhidTypeDifferentialRevision:\n\t\treturn IconRevisions\n\tdefault:\n\t\treturn IconDefault\n\t}\n}", "title": "" }, { "docid": "0ff5b08d47250a1c5aa2f710d9a823bc", "score": "0.47897005", "text": "func (o LookupLoadBalancerResultOutput) IconId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupLoadBalancerResult) string { return v.IconId }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "c11b3bb53103478a15cc2307d87b733b", "score": "0.4786579", "text": "func (d *DesktopBuilder) compileIcon(assetDir string, iconFile string) error {\n\treturn nil\n}", "title": "" }, { "docid": "b94302f2b3aff3707ef6cdd21793e2eb", "score": "0.4774743", "text": "func (o *InlineObject1124) GetIcon() AnyOfmicrosoftGraphWorkbookIcon {\n\tif o == nil || o.Icon == nil {\n\t\tvar ret AnyOfmicrosoftGraphWorkbookIcon\n\t\treturn ret\n\t}\n\treturn *o.Icon\n}", "title": "" }, { "docid": "40a4f8360e955621ff576f3dcc28947b", "score": "0.4773331", "text": "func (self *Display) SetDisplayIcon(bitmap *Bitmap) {\n C.al_set_display_icon(self.handle, bitmap.handle)\n}", "title": "" }, { "docid": "171b3a03af8eb6c3011179b246c28aac", "score": "0.47691905", "text": "func (s *Session) GuildIcon(guildID int64, options ...RequestOption) (img image.Image, err error) {\n\tg, err := s.Guild(guildID, options...)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif g.Icon == \"\" {\n\t\terr = ErrGuildNoIcon\n\t\treturn\n\t}\n\n\tbody, err := s.RequestWithBucketID(\"GET\", EndpointGuildIcon(guildID, g.Icon), nil, nil, EndpointGuildIcon(guildID, \"\"), options...)\n\tif err != nil {\n\t\treturn\n\t}\n\n\timg, _, err = image.Decode(bytes.NewReader(body))\n\treturn\n}", "title": "" }, { "docid": "608f9ac54cb29d6cfd9838f2c5a38f88", "score": "0.47688496", "text": "func SetIcon(iconBytes []byte) {\n\tf, err := ioutil.TempFile(\"\", \"systray_temp_icon\")\n\tif err != nil {\n\t\tlog.Errorf(\"Unable to create temp icon: %v\", err)\n\t\treturn\n\t}\n\tdefer f.Close()\n\t_, err = f.Write(iconBytes)\n\tif err != nil {\n\t\tlog.Errorf(\"Unable to write icon to temp file %v: %v\", f.Name(), f)\n\t\treturn\n\t}\n\t// Need to close file before we load it to make sure contents is flushed.\n\tf.Close()\n\tname, err := strUTF16(f.Name())\n\tif err != nil {\n\t\tlog.Errorf(\"Unable to convert name to string pointer: %v\", err)\n\t\treturn\n\t}\n\t_setIcon.Call(name.Raw())\n}", "title": "" }, { "docid": "3c663e7dac5342f67cda1bd31bc1e5d7", "score": "0.47552386", "text": "func (o ApplicationSpecDescriptorIconsOutput) Size() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationSpecDescriptorIcons) *string { return v.Size }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "2b319573e6897bf73bc23b67120c65a4", "score": "0.47365403", "text": "func (i *Icon) MinSize() fyne.Size {\n\ti.ExtendBaseWidget(i)\n\treturn i.BaseWidget.MinSize()\n}", "title": "" }, { "docid": "6543cc70dc9b83ed1a67a7622bc6b827", "score": "0.47303274", "text": "func (o LookupEnhancedDBResultOutput) IconId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupEnhancedDBResult) string { return v.IconId }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "752f2416e578cc9746d39f23d12e3061", "score": "0.4725501", "text": "func (o MachineImageIamMemberOutput) MachineImage() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *MachineImageIamMember) pulumi.StringOutput { return v.MachineImage }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "e7b12da89187c32549876eeb5633704f", "score": "0.4725464", "text": "func (p *Template) SetSmallIcon(attr *Icon) {\n\tp.smallIcon = attr\n}", "title": "" }, { "docid": "b754a3008ff7ae90578e9d8aa6665203", "score": "0.47163668", "text": "func (p *OperatingSystemInfo) MustSmallIcon() *Icon {\n\tif p.smallIcon == nil {\n\t\tpanic(\"the smallIcon must not be nil, please use SmallIcon() function instead\")\n\t}\n\treturn p.smallIcon\n}", "title": "" }, { "docid": "b27c201713fccc2fc0c6fdf52c210677", "score": "0.47047645", "text": "func (t *Theme) SetEmoticon(value string) {\n\tt.Flags.Set(6)\n\tt.Emoticon = value\n}", "title": "" }, { "docid": "e18321cb5e8172b5d456b56ab168bec6", "score": "0.47036853", "text": "func SetIcon(iconBytes []byte) {\n\tcstr := (*C.char)(unsafe.Pointer(&iconBytes[0]))\n\tC.setIcon(cstr, (C.int)(len(iconBytes)), false)\n}", "title": "" }, { "docid": "64fa77e9bf4605464d9bb0011df81ef2", "score": "0.46954545", "text": "func NewIconLabel(msg string) *Label {\n\n\tl := new(Label)\n\tl.initialize(msg, StyleDefault.FontIcon)\n\treturn l\n}", "title": "" }, { "docid": "73b1b60ce94a01b4720c3a9a413b6b53", "score": "0.46903598", "text": "func getIconHandle(icoData []byte, dirEntry _ICONDIRENTRY) w32.HICON {\n\tret, _ := createIconFromResourceEx(\n\t\t&icoData[dirEntry.dwImageOffset],\n\t\tw32.DWORD(0),\n\t\ttrue,\n\t\tw32.DWORD(0x30000),\n\t\tint32(dirEntry.bWidth),\n\t\tint32(dirEntry.bHeight),\n\t\t_LR_DEFAULT_COLOR)\n\treturn w32.HICON(ret)\n}", "title": "" }, { "docid": "60c95a47ce06a55608a3f84ea8b0f377", "score": "0.46877882", "text": "func (recv *AppLaunchContext) SetIcon(icon *gio.Icon) {\n\tc_icon := (*C.GIcon)(icon.ToC())\n\n\tC.gdk_app_launch_context_set_icon((*C.GdkAppLaunchContext)(recv.native), c_icon)\n\n\treturn\n}", "title": "" }, { "docid": "ec59a78d0b8902eaa3c778f8b63003c2", "score": "0.46842393", "text": "func (_this *PasswordCredential) IconURL() string {\n\tvar ret string\n\tvalue := _this.Value_JS.Get(\"iconURL\")\n\tret = (value).String()\n\treturn ret\n}", "title": "" }, { "docid": "38284623f052442f80b51aa75b8445fa", "score": "0.46790338", "text": "func (smc *SysMenuCreate) SetIcon(s string) *SysMenuCreate {\n\tsmc.mutation.SetIcon(s)\n\treturn smc\n}", "title": "" }, { "docid": "ff05870b14de797cb2cc84cef4424703", "score": "0.46646404", "text": "func (m *MessageActionSetChatTheme) GetEmoticon() (value string) {\n\tif m == nil {\n\t\treturn\n\t}\n\treturn m.Emoticon\n}", "title": "" }, { "docid": "45a3cb15a8c059eba7a488de2540cb8c", "score": "0.4664189", "text": "func (o UpgradeHistoryEntryResponseOutput) VmImage() pulumi.StringOutput {\n\treturn o.ApplyT(func(v UpgradeHistoryEntryResponse) string { return v.VmImage }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "94ac71eb70b5044f5f44df4e8635546c", "score": "0.4662149", "text": "func (t *_NotifyIcon) Stop() {\n\tlog.Println(\"Removig notification icon\")\n\tshellNotifyIcon(_NIM_DELETE, &t.nid)\n\tw32.DestroyIcon(t.nid.HIcon)\n\tw32.PostQuitMessage(0)\n}", "title": "" }, { "docid": "24ccb61cb31660aab8f9aa751114d575", "score": "0.46508795", "text": "func (o *InlineObject1124) GetIconOk() (AnyOfmicrosoftGraphWorkbookIcon, bool) {\n\tif o == nil || o.Icon == nil {\n\t\tvar ret AnyOfmicrosoftGraphWorkbookIcon\n\t\treturn ret, false\n\t}\n\treturn *o.Icon, true\n}", "title": "" }, { "docid": "4d27d94b84537a74a063b565d20d2af2", "score": "0.4640679", "text": "func LinuxLarge(num int) *gce.Instance {\n\treturn linuxSwarmingBot(num, gce.MACHINE_TYPE_HIGHCPU_64)\n}", "title": "" }, { "docid": "17461ca3b44d0b17ae1071987be2a68a", "score": "0.45938838", "text": "func newIdentIcon(\n\ttext string,\n\tnamespace string,\n\tsize int,\n\tdensity int,\n\tisRandom bool,\n\trand *rand.Rand,\n\thashFunction func([]byte) []byte,\n\tfillColorFunction func([]byte) color.Color,\n\tbackgroundColorFunction func([]byte, color.Color) color.Color,\n) (*IdentIcon, error) {\n\n\tif text == \"\" {\n\t\t// Text is the minimum requirement to generate an IdentIcon.\n\t\treturn nil, errors.New(\"Text can't be empty\")\n\t}\n\n\tif size < MinSize {\n\t\t// Smaller values will generate a meaningless Generator.\n\t\treturn nil, errors.New(\n\t\t\t\"Size cannot be less than \" + strconv.Itoa(MinSize),\n\t\t)\n\t}\n\n\tif density < 1 {\n\t\treturn nil, errors.New(\n\t\t\t\"Density cannot be less than 1\",\n\t\t)\n\t}\n\n\tidenticon := IdentIcon{\n\t\tText: text,\n\t\tNamespace: namespace,\n\t\tSize: size,\n\t\tDensity: density,\n\t\tisRandom: isRandom,\n\t\trand: rand,\n\t\thashFunction: hashFunction,\n\t\tfillColorFunction: fillColorFunction,\n\t\tbackgroundColorFunction: backgroundColorFunction,\n\t}\n\n\t// Reflection Line\n\tidenticon.drawableWidth = identicon.Size / 2\n\n\t// Since the canvas is a symmetrical reflection make sure to:\n\t// - Handle even and odd Canvas sizes\n\tif identicon.Size%2 == 1 {\n\t\t// Is odd, the vertical middle point exist.\n\t\tidenticon.drawableWidth++\n\t}\n\n\treturn &identicon, nil\n}", "title": "" }, { "docid": "f09e74f70065223cbcdeab6ddc7ee4a3", "score": "0.45906824", "text": "func (item *MenuItem) SetIcon(iconBytes []byte) {\n\tcstr := (*C.char)(unsafe.Pointer(&iconBytes[0]))\n\tC.setMenuItemIcon(cstr, (C.int)(len(iconBytes)), C.int(item.id), false)\n}", "title": "" }, { "docid": "957503522f6ef5a72d684745a6715527", "score": "0.45895228", "text": "func EmbeddedIconPath() string {\n\tswitch runtime.GOOS {\n\tcase \"windows\":\n\t\treturn path.Join(resourcesDir, EmbeddedIconIco)\n\n\tcase \"darwin\":\n\t\treturn path.Join(resourcesDir, EmbeddedIconIcns)\n\n\tdefault:\n\t\treturn path.Join(resourcesDir, EmbeddedIconPng)\n\t}\n}", "title": "" }, { "docid": "a6ae96cf56484f0d296e0bbc1a816089", "score": "0.45889246", "text": "func (a Agent) DownloadIcon(sid int, iconID uint32) ([]byte, error) {\n\tif iconID == 0 {\n\t\treturn []byte(\"\"), nil\n\t}\n\tf, err := a.FileInfo(sid, 0, \"\", fmt.Sprintf(\"/icon_%d\", iconID))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tft, err := a.InitDownload(sid, f[0], \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ticon, err := a.DownloadFile(ft)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn icon, nil\n}", "title": "" }, { "docid": "58ce6f8234c9bed1cfc149cfedebcc57", "score": "0.4585733", "text": "func (o *InlineObject1124) HasIcon() bool {\n\tif o != nil && o.Icon != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ad3893d0acce02d3bf4d7682edad124d", "score": "0.4584951", "text": "func (self *TraitFileInfo) SetSymbolicIcon(icon *C.GIcon) {\n\tC.g_file_info_set_symbolic_icon(self.CPointer, icon)\n\treturn\n}", "title": "" }, { "docid": "38fc71274b42c65491afc1c28fdf5cd0", "score": "0.45803237", "text": "func (request *MattermostMessage) SetIcon(\n\ticon string,\n) {\n\trequest.IconURL = icon\n}", "title": "" }, { "docid": "de795c10f006003a7914a9196a3e3e2d", "score": "0.45651937", "text": "func (ai AppImage) Icon() (io.ReadCloser, string, error) {\n\tif ai.Desktop == nil {\n\t\treturn nil, \"\", errors.New(\"desktop file wasn't parsed\")\n\t}\n\ticon := ai.Desktop.Section(\"Desktop Entry\").Key(\"Icon\").Value()\n\tif icon == \"\" {\n\t\treturn nil, \"\", errors.New(\"desktop file doesn't specify an icon\")\n\t}\n\tif strings.HasSuffix(icon, \".png\") || strings.HasSuffix(icon, \".svg\") {\n\t\trdr, err := ai.reader.FileReader(icon)\n\t\tif err == nil {\n\t\t\treturn rdr, icon, nil\n\t\t}\n\t}\n\trootFils := ai.reader.ListFiles(\"/\")\n\tfor _, fil := range rootFils {\n\t\tif strings.HasPrefix(fil, icon) {\n\t\t\tif fil == icon+\".png\" {\n\t\t\t\trdr, err := ai.reader.FileReader(fil)\n\t\t\t\tif err != nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\treturn rdr, fil, nil\n\t\t\t} else if fil == icon+\".svg\" {\n\t\t\t\trdr, err := ai.reader.FileReader(fil)\n\t\t\t\tif err != nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\treturn rdr, fil, nil\n\t\t\t}\n\t\t}\n\t}\n\treturn nil, \"\", errors.New(\"Cannot find the AppImage's icon: \" + icon)\n}", "title": "" }, { "docid": "6abfda9a74a84b667cb72cfd781e7410", "score": "0.45630258", "text": "func (p *Template) MustSmallIcon() *Icon {\n\tif p.smallIcon == nil {\n\t\tpanic(\"the smallIcon must not be nil, please use SmallIcon() function instead\")\n\t}\n\treturn p.smallIcon\n}", "title": "" }, { "docid": "7b95046f7df4811985c493f4433bcbbd", "score": "0.4558718", "text": "func (ai AppImage) Thumbnail() (io.ReadCloser, error) {\n\treturn ai.reader.FileReader(\".DirIcon\")\n}", "title": "" }, { "docid": "42c645abc8c1c380deddad4532d1cd9a", "score": "0.45569146", "text": "func (n *Node) XXLarge() *Node {\n\treturn n.setAttr(\"size\", \"xx-large\")\n}", "title": "" } ]
7d55db73a7f36c73eb45af0dfd9f7d15
The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string).
[ { "docid": "dfb445c19d26f0d93b7486b11b6cc52c", "score": "0.0", "text": "func (o AmazonS3CompatibleLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AmazonS3CompatibleLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" } ]
[ { "docid": "02b0ac52f48867b1a8b2a0bee42f34fa", "score": "0.6930561", "text": "func (o HDInsightOnDemandLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HDInsightOnDemandLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "a5b1e4124c1335b3ad7100842f59c9d4", "score": "0.6833226", "text": "func (o HDInsightLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HDInsightLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "a7ebabf70074b56379fe0f15bdfe26c5", "score": "0.6820457", "text": "func (o ShopifyLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v ShopifyLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "5318c1664a1f470223d374fee5bc715d", "score": "0.6811413", "text": "func (o GreenplumLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v GreenplumLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "7b4b5a69e2443a97f232ed0031808694", "score": "0.68083423", "text": "func (o CosmosDbLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v CosmosDbLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "d1984c071aec7aa21e9402ec6d4cce21", "score": "0.6806948", "text": "func (o OracleCloudStorageLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v OracleCloudStorageLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "cbbe1f37c8afb881818933eb890e7bbb", "score": "0.68053126", "text": "func (o MagentoLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MagentoLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "fc9260464fc63e508e45271f02cb7a2c", "score": "0.67912567", "text": "func (o MySqlLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MySqlLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "1af170b295882f65942c0a5a4a268344", "score": "0.6790339", "text": "func (o SalesforceMarketingCloudLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SalesforceMarketingCloudLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "277623d9daa73a7883320c91efb7a6bc", "score": "0.67880833", "text": "func (o AzureSqlDWLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureSqlDWLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "d7e865bc8862121b172441080c998f01", "score": "0.6785291", "text": "func (o HiveLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HiveLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "1b8c328cd4e92fdf84da1268d12d56ab", "score": "0.6780075", "text": "func (o AzureDatabricksDeltaLakeLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureDatabricksDeltaLakeLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "d391c19559ca646f391c1d51f3b15ee9", "score": "0.6776216", "text": "func (o EloquaLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v EloquaLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "e91ffbe4e50392e4fa28bf0a7c5aad39", "score": "0.6756412", "text": "func (o MariaDBLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MariaDBLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "e9d3ac484389874171f2b80b5e086e58", "score": "0.6755561", "text": "func (o AzureDataLakeStoreLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureDataLakeStoreLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "f54f5f1e9aa3ab7195274613bae4a31c", "score": "0.6751941", "text": "func (o OracleServiceCloudLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v OracleServiceCloudLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "c70ff4ffb2cc56b71223fb6917d2beb3", "score": "0.67452574", "text": "func (o AzureStorageLinkedServiceOutput) EncryptedCredential() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v AzureStorageLinkedService) *string { return v.EncryptedCredential }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "a9e52a3c4e2227543243c310a08ea1bb", "score": "0.67382705", "text": "func (o DrillLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v DrillLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "016e9302fde48ff0f979ed2a13f492f1", "score": "0.6735892", "text": "func (o HDInsightOnDemandLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HDInsightOnDemandLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "4ac7b925c75091405d308781b74bb6ee", "score": "0.6735469", "text": "func (o AzureTableStorageLinkedServiceOutput) EncryptedCredential() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v AzureTableStorageLinkedService) *string { return v.EncryptedCredential }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "64e9e15dedd10992ec8e35e390f16950", "score": "0.6735091", "text": "func (o Db2LinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v Db2LinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "14fc190ba6a389385775af5d32da76f5", "score": "0.67227226", "text": "func (o AzureDataLakeAnalyticsLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureDataLakeAnalyticsLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "b29f4304e80e5c191978e013830bbf28", "score": "0.6721946", "text": "func (o AzureSqlDatabaseLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureSqlDatabaseLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "619638674f5750e5f0ee9568444e8883", "score": "0.6715296", "text": "func (o AzureSqlMILinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureSqlMILinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "6f626f914350a4a7bc57800a2721588c", "score": "0.67042124", "text": "func (o ODataLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v ODataLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "a8d8a8d926b4dd3271742a519a99eecc", "score": "0.67039585", "text": "func (o SqlServerLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SqlServerLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "63d0b40df66e5921dd55c1bc2b924708", "score": "0.67008257", "text": "func (o OracleLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v OracleLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "41c007463680e22c69c5a964ebf4c7fc", "score": "0.66990715", "text": "func (o OdbcLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v OdbcLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "9ce8927a9d42df99ce0bd1879f27e4c7", "score": "0.6690918", "text": "func (o MongoDbLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MongoDbLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "b269aa6ec0dd7d92cdeac9832385c81f", "score": "0.6686018", "text": "func (o AzureDataLakeStoreLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureDataLakeStoreLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "705452194c813f7141511351b104f0d0", "score": "0.668571", "text": "func (o NetezzaLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v NetezzaLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "9b3ae7e7305fa5ad1e4a6d175b8e559f", "score": "0.66827637", "text": "func (o MySqlLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MySqlLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "33ef2802584a0cd880d42f768207d409", "score": "0.66815686", "text": "func (o OracleCloudStorageLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v OracleCloudStorageLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "2fd7e3418bd7a7d6892641c2362a8fc4", "score": "0.66805893", "text": "func (o GoogleBigQueryLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v GoogleBigQueryLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "17cbc2bad4cf0cad876333cdf90fd3e7", "score": "0.6680343", "text": "func (o CassandraLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v CassandraLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "afe9de4ec56822c9aff386458e3f8d68", "score": "0.66799325", "text": "func (o AzureBlobStorageLinkedServiceOutput) EncryptedCredential() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v AzureBlobStorageLinkedService) *string { return v.EncryptedCredential }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "af9e25e115245f016e8367b8d85a4a14", "score": "0.66790044", "text": "func (o JiraLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v JiraLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "65a81eddf8716c84c08181189e12c002", "score": "0.66774404", "text": "func (o SapHanaLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SapHanaLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "2403caaff0809d85243c4ff4eaa3d9c3", "score": "0.66750664", "text": "func (o VerticaLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v VerticaLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "aaa89ebab48b63a4cc544f1c6723fcc0", "score": "0.6670366", "text": "func (o InformixLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v InformixLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "b9a477e5cf9f23a0b7c90b41013c05f8", "score": "0.6670056", "text": "func (o SparkLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SparkLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "26b2d65bb486caf5c8282a31c3a3d734", "score": "0.66684705", "text": "func (o AzureDatabricksDeltaLakeLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureDatabricksDeltaLakeLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "e2ec08a27552f000db3c726c3d5deaa7", "score": "0.6667367", "text": "func (o AzureSqlDWLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureSqlDWLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "6d6e32eb7ab83d6bb2ca5c37a2fa13db", "score": "0.6665106", "text": "func (o AzureDatabricksLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureDatabricksLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "846674bcdd2a0890b3c5b8bd354e9616", "score": "0.6663442", "text": "func (o CouchbaseLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v CouchbaseLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "7e88fa8645713bedf25c8f169a86f8bc", "score": "0.6663005", "text": "func (o HDInsightLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HDInsightLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "9de8c5e24ad4bfa239480465afbc2011", "score": "0.6660743", "text": "func (o PrestoLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v PrestoLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "b920fe534bc1aae44ad5f456ff36ff22", "score": "0.6657417", "text": "func (o AzureMariaDBLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureMariaDBLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "1b1ac061004cf0601df657a0fd7ccfc0", "score": "0.665404", "text": "func (o OracleServiceCloudLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v OracleServiceCloudLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "3c4db023fcf88c56b0bdd8c2b35e708e", "score": "0.6652995", "text": "func (o SapCloudForCustomerLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SapCloudForCustomerLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "23087229136832108d1e79f6518707de", "score": "0.6648645", "text": "func (o DynamicsLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v DynamicsLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "c5979ce5d98ed481c1163ca285925f6f", "score": "0.66473955", "text": "func (o ZohoLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v ZohoLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "5883d07dd57ec3a9504ebc077508d639", "score": "0.664668", "text": "func (o DynamicsAXLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v DynamicsAXLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "b1a38a51c5ebedac6dc9738134a83d13", "score": "0.6643945", "text": "func (o AmazonRedshiftLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AmazonRedshiftLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "90d901b3f74a6097ec41e44c221b1dbb", "score": "0.66397834", "text": "func (o AzureDataLakeAnalyticsLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureDataLakeAnalyticsLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "9f0ddab5968adee8d80220114192415c", "score": "0.663954", "text": "func (o SalesforceMarketingCloudLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SalesforceMarketingCloudLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "0eb2a002f5392d9e7abefae15f1a6fa0", "score": "0.6638503", "text": "func (o AzureTableStorageLinkedServiceResponseOutput) EncryptedCredential() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v AzureTableStorageLinkedServiceResponse) *string { return v.EncryptedCredential }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "0dd9974938d52d56e740e0962fe106bc", "score": "0.6638129", "text": "func (o MarketoLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MarketoLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "4174645b204e392f5010381dd2f91a9a", "score": "0.6635395", "text": "func (o HBaseLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HBaseLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "acfde16b289d4aa68bcd870dda6be070", "score": "0.66340494", "text": "func (o CosmosDbLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v CosmosDbLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "094c9cd76203bc698cada7dba6aa9340", "score": "0.6632901", "text": "func (o MagentoLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MagentoLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "2c4ca5c2640b37b7a3818622f353400c", "score": "0.663222", "text": "func (o AzureStorageLinkedServiceResponseOutput) EncryptedCredential() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v AzureStorageLinkedServiceResponse) *string { return v.EncryptedCredential }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "1d6fccb029a51951e0abfcaed9c5af46", "score": "0.66310257", "text": "func (o CommonDataServiceForAppsLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v CommonDataServiceForAppsLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "5d9d78c995e5a3f6ae816ea532df2891", "score": "0.6626317", "text": "func (o DynamicsCrmLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v DynamicsCrmLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "6e6df571d1c60f773c1043cbd0e1c15d", "score": "0.66248447", "text": "func (o SalesforceLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SalesforceLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "debbfc94a843140fe37922cf84726d84", "score": "0.66229", "text": "func (o ImpalaLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v ImpalaLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "bf932c254a62c31269159fc941c197e7", "score": "0.66203254", "text": "func (o ResponsysLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v ResponsysLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "ba70cc5b182ba97c7a6e85cf3a0e60be", "score": "0.66152984", "text": "func (o SalesforceServiceCloudLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SalesforceServiceCloudLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "2fbc713e3d6442cf8ff1c62fa82e79f8", "score": "0.66033053", "text": "func (o MicrosoftAccessLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MicrosoftAccessLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "6b97098957e62cb07a8eca7f46af51e4", "score": "0.66005665", "text": "func (o SharePointOnlineListLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SharePointOnlineListLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "6ba10efd45ca56befe612a031d926622", "score": "0.6599483", "text": "func (o GreenplumLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v GreenplumLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "7b388debd4bc5abfa112555e210bd920", "score": "0.65983623", "text": "func (o PhoenixLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v PhoenixLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "825b24d58add393cd1a501f06dc6b3eb", "score": "0.6594046", "text": "func (o AzureBlobStorageLinkedServiceResponseOutput) EncryptedCredential() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v AzureBlobStorageLinkedServiceResponse) *string { return v.EncryptedCredential }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "de960d4b3b9436689e5815c86bad30f7", "score": "0.65889025", "text": "func (o GoogleBigQueryLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v GoogleBigQueryLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "81a54e105cc3efbcf7d3a95fb144fd50", "score": "0.65882665", "text": "func (o EloquaLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v EloquaLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "0ed7dced45ddafa8ac545bd41cb8e7ad", "score": "0.65881497", "text": "func (o AzureSqlDatabaseLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureSqlDatabaseLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "0d698acac00450a05de5fa6a4e651cb1", "score": "0.6584252", "text": "func (o ShopifyLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v ShopifyLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "bdde80a7d26735b3dfd81e1a0533f842", "score": "0.6583237", "text": "func (o MariaDBLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MariaDBLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "e240d63b05b367dcc236fc351c866345", "score": "0.6582959", "text": "func (o TeradataLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v TeradataLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "30dc9f09269128ea1504b6a1eb799982", "score": "0.6577959", "text": "func (o HiveLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HiveLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "3c19c840f3742c1f1d459f35a4fa54d9", "score": "0.6574844", "text": "func (o AzureSearchLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureSearchLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "2c23a68df9d90d01953a8a0602611d59", "score": "0.65719765", "text": "func (o SapEccLinkedServiceOutput) EncryptedCredential() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SapEccLinkedService) *string { return v.EncryptedCredential }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "2e1eaea352b0ea9c03378cb3a9ef9598", "score": "0.6571173", "text": "func (o MongoDbLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v MongoDbLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "9e05a2936c9c048418d99509814dd461", "score": "0.6567791", "text": "func (o AzureMySqlLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureMySqlLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "a2ec19ce70128e126ea34a9ebd1805c2", "score": "0.6565422", "text": "func (o AzureDatabricksLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureDatabricksLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "99d718a60f14ceed89c83ba8778772be", "score": "0.6562712", "text": "func (o XeroLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v XeroLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "3c3ed6a75612e03ff67128675b9bfa8c", "score": "0.65608805", "text": "func (o GoogleCloudStorageLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v GoogleCloudStorageLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "c272bea8a2e2f0a2a345610a0ab5fcfe", "score": "0.6556979", "text": "func (o PostgreSqlLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v PostgreSqlLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "4b8c9aaa7c637141e4f60b6f65641806", "score": "0.65563476", "text": "func (o QuickBooksLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v QuickBooksLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "75e226cd67b8174693e6c027c4fea4d1", "score": "0.65533", "text": "func (o SnowflakeLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SnowflakeLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "415253536529cc250b925854cb1b3f4a", "score": "0.6553296", "text": "func (o Office365LinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v Office365LinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "1044fd0d6522301615ed419fc05f350d", "score": "0.6550736", "text": "func (o Db2LinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v Db2LinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "fe3ef3da5a574d07810d1907edaecfb3", "score": "0.65502167", "text": "func (o AzureSearchLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureSearchLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "15a32b4e476b96c21c1352b91910f2d5", "score": "0.6546425", "text": "func (o SquareLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SquareLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "e7181b4efa25710d60ec505390da06bd", "score": "0.6545643", "text": "func (o AzurePostgreSqlLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzurePostgreSqlLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "3ceb7b4e01ba3f213b71b83127ac2b70", "score": "0.6542819", "text": "func (o SalesforceServiceCloudLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v SalesforceServiceCloudLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "ddb2ad310fb8d37a24e3e6a951db01ee", "score": "0.6540545", "text": "func (o AzureMariaDBLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureMariaDBLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "5e05d13d74a8ea23767da3a237a9d04d", "score": "0.6540017", "text": "func (o HttpLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HttpLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "7be991318372fdde2d59b4c41c0471ec", "score": "0.6538972", "text": "func (o AzureSqlMILinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureSqlMILinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "f3bf8fd2efa94df0ba10dd9b2e4d4c86", "score": "0.6532426", "text": "func (o AzureMySqlLinkedServiceResponseOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v AzureMySqlLinkedServiceResponse) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" }, { "docid": "64db31d16ff1e0fc0932873496b993ec", "score": "0.65306854", "text": "func (o HubspotLinkedServiceOutput) EncryptedCredential() pulumi.AnyOutput {\n\treturn o.ApplyT(func(v HubspotLinkedService) interface{} { return v.EncryptedCredential }).(pulumi.AnyOutput)\n}", "title": "" } ]
7d53f35eecaa63d1fded6ad92f9c097a
WithHTTPClient adds the HTTPClient to the get senders params
[ { "docid": "94a07cdddd83d57c9899367fcf54a0ba", "score": "0.76215374", "text": "func (o *GetSendersParams) WithHTTPClient(client *http.Client) *GetSendersParams {\n\to.SetHTTPClient(client)\n\treturn o\n}", "title": "" } ]
[ { "docid": "91e11ca4d63091550e55d28eca43e91b", "score": "0.80215585", "text": "func (o *GetSendersParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "7138d62676e0f7c88a9c56c3fb5b86e3", "score": "0.70748955", "text": "func WithHTTPClient(doer HttpRequestDoer) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.Client = doer\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "f8f2b2e4dee7a084ebf57faa1b96a04d", "score": "0.7062172", "text": "func (o *GetLTENetworkIDSubscribersParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "341b37e52b36cf71381b3b8d73f28838", "score": "0.7053967", "text": "func (o *GetPeersParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "46204c6e89281296268bb96bc4b975a5", "score": "0.6978956", "text": "func (o *PostLTENetworkIDSubscribersParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "5ad35a225965e5a386543976267e6482", "score": "0.6946476", "text": "func WithHTTPClient(httpclient *http.Client) ClientOption {\n\treturn func(client *Client) {\n\t\tclient.httpClient = httpclient\n\t}\n}", "title": "" }, { "docid": "eb24c861114fcc26b2fb5b34ffbc6ac6", "score": "0.68720454", "text": "func (o *GetHoldingsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "acecfeb07d42cf7d1b684189eb674801", "score": "0.68471396", "text": "func WithHTTPClient(client httpClient) Opt {\n\treturn func(o *Client) {\n\t\to.http = client\n\t}\n}", "title": "" }, { "docid": "2741bcf359839d616dd0168bfc82bd34", "score": "0.6837015", "text": "func (o *SendTestSMSParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "13eb599c63b5aefd2926a08722c2a553", "score": "0.6826683", "text": "func (o *GetPassesParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "6bbe2a9ea0d3914f053cb8cf505b1a01", "score": "0.6823558", "text": "func NewGetSendersParamsWithHTTPClient(client *http.Client) *GetSendersParams {\n\tvar ()\n\treturn &GetSendersParams{\n\t\tHTTPClient: client,\n\t}\n}", "title": "" }, { "docid": "ffe4c0aa465c5350572dd3a6888af7f4", "score": "0.6820194", "text": "func (o *GetClientsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "afba9a362decc94ef3f57ec6c4d4350b", "score": "0.678178", "text": "func WithHTTPClient(client *http.Client) Option {\n\treturn func(c *Client) {\n\t\tc.httpClient = client\n\t}\n}", "title": "" }, { "docid": "9937239bedbae26831811c459c05e105", "score": "0.6781221", "text": "func (o *PollersDataGetParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "834f92be073c6bf9d2a19b66759659ac", "score": "0.6776082", "text": "func (o *AddPartsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "437cddd2b0fa8ece8d70d8d4fa07e5e7", "score": "0.6770958", "text": "func (o *GetUsersThisParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "6becae18b18bdf697e3bee06570358f6", "score": "0.6759931", "text": "func WithHTTPClient(httpcl *http.Client) Option {\n\treturn func(c *httpClient) {\n\t\tc.client = httpcl\n\t}\n}", "title": "" }, { "docid": "c93f7b94a56b0335f0a526cf09a47426", "score": "0.6757466", "text": "func (o *ListTicketsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "0bfbd646efe2a278c7b5e679fe9528c0", "score": "0.67512816", "text": "func WithHTTPClient(client HTTPClient) ClientOpt {\n\treturn func(o *clientOptions) {\n\t\to.http = client\n\t}\n}", "title": "" }, { "docid": "0bfbd646efe2a278c7b5e679fe9528c0", "score": "0.67512816", "text": "func WithHTTPClient(client HTTPClient) ClientOpt {\n\treturn func(o *clientOptions) {\n\t\to.http = client\n\t}\n}", "title": "" }, { "docid": "0c8b0ae829dc0c9764d9f366251f1cf1", "score": "0.67435014", "text": "func (o *GetaspecificRtcNetworkParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "8ad0e704fb6d27049ebf2eae4358f7cd", "score": "0.6725134", "text": "func (o *ListCryptokeysParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "a26353e6d92bac552ec79888cc05ef8e", "score": "0.6724912", "text": "func WithHTTPClient(httpClient *http.Client) Option {\n\treturn func(er *EarningsReport) error {\n\t\ter.httpClient = httpClient\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "2f64c605404b1d03c306f997304c1e12", "score": "0.6722257", "text": "func (o *GetMandatesParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "2b4edf099a402618f84476d4e1ec9c64", "score": "0.6718496", "text": "func (o *ChatGetChannelsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "45c54fe1b717af24ef6b57bc1f2e4a1c", "score": "0.6709991", "text": "func (o *KeyManagerKeysGetParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "5b965c0526a84805d23c125f50c2a8d5", "score": "0.6706001", "text": "func (o *GetMigrateTransmittersParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "14a3eaddd172a77b897fbec622360d54", "score": "0.6680452", "text": "func WithHTTPClient(c *http.Client) ClientOption {\n\treturn func(config *clientConfig) {\n\t\tconfig.client = c\n\t}\n}", "title": "" }, { "docid": "173a0a4fdbc5ce8c522dbe0c3ea6bebb", "score": "0.6667362", "text": "func WithHTTPClient(client *http.Client) ClientOption {\n\treturn func(opts *ClientOptions) {\n\t\topts.client = client\n\t}\n}", "title": "" }, { "docid": "f1c255ca886c423ba71637910018b0e9", "score": "0.6659278", "text": "func WithHTTPClient(client *http.Client) Option {\n\treturn withHTTPClient{client}\n}", "title": "" }, { "docid": "c11cefa72ad8653b61b821127c7b10ec", "score": "0.6650348", "text": "func WithHTTPClient(c *http.Client) ClientOption {\n\treturn func(client *Client) error {\n\t\tif _, ok := c.Transport.(*transport); !ok {\n\t\t\tt := c.Transport\n\t\t\tif t != nil {\n\t\t\t\tc.Transport = &transport{Base: t}\n\t\t\t} else {\n\t\t\t\tc.Transport = &transport{Base: http.DefaultTransport}\n\t\t\t}\n\t\t}\n\t\tclient.httpClient = c\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "993836100f21d9f0d872e91eee62c868", "score": "0.6648309", "text": "func (o *ExtendedAddressSuggestionsUsingGETParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "d1f59dabfacf71fb997c3ef653a0e59d", "score": "0.66382325", "text": "func (o *GetStakingRedelegationsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "e909b8f0a705fd5f5f8129a0c15a0f36", "score": "0.66343963", "text": "func (o *GetACartParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "4e0ee9ebe8699fc49dadf7ba019c9a29", "score": "0.6632452", "text": "func (o *QueryItemsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "fd0bf1994ec20886ea42d4f5ed29f51e", "score": "0.6627071", "text": "func (o *GetSupplierParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "3837cd541eca1d5defbd4fc9a41b996a", "score": "0.66254044", "text": "func (o *QueryByWiqlParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "5259983d042d06666742c13c94cce467", "score": "0.66157234", "text": "func (o *GetUsersParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "3edfbb17a27ccd06eedafcf9d8d2cbd1", "score": "0.6606884", "text": "func (o *GetSubcriptionForMetroUsingGETParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "3157f7e37300c7ba440645812b30b769", "score": "0.66045815", "text": "func (o *SendJobCommandParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "74797a0b453984a516ef48bf0ab07c15", "score": "0.66043836", "text": "func (o *ActivateSimParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "4256c3fac7e1481456645d949f5377b5", "score": "0.6595", "text": "func (c *Client) WithHTTPClient(client *http.Client) *Client {\n c.HTTPClient = client\n return c\n}", "title": "" }, { "docid": "8def099ae13baf2a395a4247f3a769b0", "score": "0.6594787", "text": "func (o *BikePointSearchParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "8e41c20fe49086ff319b8195dea4e340", "score": "0.6588728", "text": "func (o *ListAttachmentsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "a0d5b4d27ede7cfbe4b3c5c2c2f897c0", "score": "0.6583336", "text": "func (o *DownloadContentUsingGETParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "e98ea1854b7d569d026f1facd82a3a0b", "score": "0.65770036", "text": "func (o *GetAllSubcriptionsUsingGETParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "8547ec45a4cb4537876ce5fbf13fbb69", "score": "0.65738", "text": "func (o *GetLTENetworkIDSubscribersParams) WithHTTPClient(client *http.Client) *GetLTENetworkIDSubscribersParams {\n\to.SetHTTPClient(client)\n\treturn o\n}", "title": "" }, { "docid": "362ed3d356a3275894d38ac5aabc5d80", "score": "0.65725034", "text": "func WithHTTPClient(c *http.Client) Option {\n\treturn func(o *options) {\n\t\to.httpClient = c\n\t}\n}", "title": "" }, { "docid": "0b8590cebfdd785c77c958c043bfea57", "score": "0.6564095", "text": "func (o *GetWebhookParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "2a3d6d7446ef8ea25421b858a13a2269", "score": "0.65619135", "text": "func (o *SearchGeoLocationsUsingGETParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "bf4adb9b11a77ccab3482358b561a7a8", "score": "0.65597796", "text": "func (o *GetGrillLightsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "fb6fdc252dc440b2428b4654252f7ce2", "score": "0.6553071", "text": "func (o *GetDevicesParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "7cd9ed6342c0b46c6d50658fc98b213c", "score": "0.65510625", "text": "func (c *Client) WithHTTPClient(newHTTPClient httpclient.HTTPClient) {\n\tc.httpClient = newHTTPClient\n}", "title": "" }, { "docid": "82e4de695d2a6607fe9adf4347f2bf8e", "score": "0.65503573", "text": "func (o *GetPrivateGetUserTradesByInstrumentAndTimeParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "a1bc178300abea825766568b062cf5d2", "score": "0.6548804", "text": "func (o *AddCallerIDParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "3d09eb84b81ecb62f098ab9eba60a3f7", "score": "0.654629", "text": "func (o *ImageJoinParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "851bae2ae6e85fd630d7dcffca18f9fb", "score": "0.65449226", "text": "func (o *PostOrdersOrderFidOffersParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "0759a40f5aad46de6644ca96c44a6791", "score": "0.6540142", "text": "func WithHTTPClient(client *http.Client) Option {\n\treturn func(options *Options) {\n\t\toptions.Client = client\n\t}\n}", "title": "" }, { "docid": "8153cc6c3dd026cf813cb41ba52f6fb1", "score": "0.6539966", "text": "func (o *GetAllProviderAccountsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "f8200f64087b9337e3b5eb29ebb6a553", "score": "0.65364647", "text": "func (o *AnnouncementGetParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "5f44718f619142ef8c54308d879c6c37", "score": "0.6535484", "text": "func (o *GetActivitiesTCXParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "7dfaef2d736d620ff9fee3c2ba5c7bd5", "score": "0.65347904", "text": "func (o *GetPrivateAddToAddressBookParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "debe7ec2c4bb8c4255f2fc31334cbad0", "score": "0.6534621", "text": "func (o *ExtrasImageAttachmentsListParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "1747e6fbec6a2b203152923147438344", "score": "0.65333897", "text": "func (o *QueryRewardsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "96df2afff9b949e978db515a49e64865", "score": "0.6521076", "text": "func (o *GetTotalActiveSessionParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "025008c369afd304452e9f3d8e737280", "score": "0.6519469", "text": "func (o *GetLogsByUserParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "bc84ff57b8390cbbc6be51126b0a0d4e", "score": "0.6518721", "text": "func (o *GetPtzChannelsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "49027a7e257ccc48e1d88c33d0498b8b", "score": "0.65182453", "text": "func (o *GetAllUsersParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "91455f2b2ca70da08d84d73ad8607d85", "score": "0.6515386", "text": "func (o *QuerylockedParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "9a0b483b295a6baa4b27264edd6878da", "score": "0.65070456", "text": "func (o *GetAdapterExtEthInterfacesMoidParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "54198195ad972381c177d3376aac826e", "score": "0.65017426", "text": "func WithHTTPClient(httpClient *http.Client) ClientOption {\n\treturn func(c *uaHTTPClient) {\n\t\tc.httpClient = httpClient\n\t}\n}", "title": "" }, { "docid": "8a3be7f384ede1f63373d501bd265290", "score": "0.6497873", "text": "func (o *AccountsGetAccountsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "758e611dde4ca7d217090230381c7f43", "score": "0.6496906", "text": "func (o *QueryScheduledScansParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "de61138cf17449afa72754a38a88cf61", "score": "0.6494496", "text": "func (o *DeleteLTENetworkIDSubscribersSubscriberIDParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "935dadddcbb08d900275a55f0d314aee", "score": "0.64940566", "text": "func WithHTTPClient(httpClient HTTPClient) Option {\n\treturn func(t *tinifyClient) error {\n\t\tif httpClient == nil {\n\t\t\treturn errFailedSetHTTPClient\n\t\t}\n\n\t\tt.httpClient = httpClient\n\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "b13e28b16e2050384de383e227ada8d0", "score": "0.6485765", "text": "func (o *CreateRuleWaiverParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "e9ec8c14f553a062b44cc272248ae3bb", "score": "0.64803725", "text": "func (o *FilesGetParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "8445260c5f7bfeae3f4cfed01c3d9957", "score": "0.64783955", "text": "func (o *GetBannedIpitemsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "dceb4da698e2c91e083edd609a5b2b1b", "score": "0.64782584", "text": "func (o *GetaspecificCallListParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "016ca4b895736235b21788408e499dd0", "score": "0.6477333", "text": "func (o *CreateAndConfirmPreregParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "bd8476b1b3e5e2a1cad53dff0136dd2b", "score": "0.64746904", "text": "func (o *MonitorCheckGetTransactionDetailsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "26293331979e12be56fd2c0958ae9b1c", "score": "0.6472421", "text": "func (o *GetAccountIdentsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "03fe949b8823ba130c287fdaa4772dc5", "score": "0.64718366", "text": "func (o *AddComponentParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "6386990f69373c06c82c651aa69f96b4", "score": "0.6469928", "text": "func (o *ListsPostParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "14f56873f3641a79791df1299cbff3f8", "score": "0.6469216", "text": "func (o *GetStatisticParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "0e9007a74ce707eec7fd6d26aae53b82", "score": "0.64684206", "text": "func (o *SyncUsingPOSTParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "1551efda15e11ace9fe2aa777531f329", "score": "0.6468366", "text": "func (o *ListRecommendationsParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" }, { "docid": "e7db854aba359e3940b45e3c4a70c5e6", "score": "0.6468183", "text": "func (o *GetPricesParams) SetHTTPClient(client *http.Client) {\n\to.HTTPClient = client\n}", "title": "" } ]
30ed319206ea8acb79d313a3c17866ed
GetSourcesOk returns a tuple with the Sources field value and a boolean to check if the value has been set.
[ { "docid": "5a244abfb927fb1d5a52a677ca12a288", "score": "0.8398799", "text": "func (o *LogsDateRemapper) GetSourcesOk() (*[]string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Sources, true\n}", "title": "" } ]
[ { "docid": "579f89486f6de73bd92c3e307597616e", "score": "0.8369922", "text": "func (o *StackEvent) GetSourcesOk() (*[]string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Sources, true\n}", "title": "" }, { "docid": "8ebba59d1d0e3ea2aa73e954174f76ff", "score": "0.69490397", "text": "func (o *RebuildOperationResponse) GetSpecificSourcesOk() (*[]string, bool) {\n\tif o == nil || o.SpecificSources == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SpecificSources, true\n}", "title": "" }, { "docid": "ccba453353f242e0a4fb487e87a7db13", "score": "0.68081814", "text": "func (o *CreditFreddieMacAssetVOA24) GetVALIDATION_SOURCESOk() (*ValidationSources, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.VALIDATION_SOURCES, true\n}", "title": "" }, { "docid": "90c395e1877455718a47a8757140b46a", "score": "0.6572799", "text": "func (c *Config) LenSources() int {\n\treturn len(c.Sources)\n}", "title": "" }, { "docid": "b08a800311f2584c8ca31acffaf7935e", "score": "0.64437693", "text": "func (q *QueryResult) GetSources() *Sources {\n if q.Sources != nil {\n return q.Sources\n }\n return nil\n}", "title": "" }, { "docid": "1e112e6858c32db003593c9ab0aa1893", "score": "0.6378987", "text": "func (o *StoragePureProtectionGroup) GetSourceOk() (*string, bool) {\n\tif o == nil || o.Source == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Source, true\n}", "title": "" }, { "docid": "1964ef3f0a66bf2061116b244176b53d", "score": "0.63039213", "text": "func (o *CustomProcessMetadataKey) GetSourceOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Source, true\n}", "title": "" }, { "docid": "a54297245440e073db42caa2eebef499", "score": "0.6300908", "text": "func (c *Client) GetSources(p parameters) (SourceResults, error) {\n\tvar o SourceResults\n\tap := allowedParameters{ //List of allowed parameters and their allowed types\n\t\t\"country\": \"string\",\n\t\t\"category\": \"string\",\n\t\t\"lanague\": \"string\"}\n\tu, err := p.buildURL(c.APIUrl+apiSourcePath, &ap)\n\tif err != nil {\n\t\treturn o, err\n\t}\n\td, err := c.makeRequest(u)\n\tif err != nil {\n\t\treturn o, err\n\t}\n\terr = json.Unmarshal(d, &o)\n\tif err != nil {\n\t\treturn o, nil\n\t}\n\treturn o, nil\n}", "title": "" }, { "docid": "905000d0e0a82240e53bc71a45ac7880", "score": "0.62179196", "text": "func (o *GetRandomRecipes200ResponseRecipesInner) GetSourceUrlOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.SourceUrl, true\n}", "title": "" }, { "docid": "1c9763c531d6a6dfa49a793404f5276d", "score": "0.6204241", "text": "func (o ConnectionMonitorTestGroupResponseOutput) Sources() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v ConnectionMonitorTestGroupResponse) []string { return v.Sources }).(pulumi.StringArrayOutput)\n}", "title": "" }, { "docid": "5e3f411613990c1d155502c60bd67440", "score": "0.6163317", "text": "func (o *InlineObject1108) GetSourceDataOk() (AnyOfobject, bool) {\n\tif o == nil || o.SourceData == nil {\n\t\tvar ret AnyOfobject\n\t\treturn ret, false\n\t}\n\treturn *o.SourceData, true\n}", "title": "" }, { "docid": "161ef40fc8ed97b2f1e0be31eac41b8b", "score": "0.61208373", "text": "func (o *Endpoint) GetSourceIdOk() (*string, bool) {\n\tif o == nil || o.SourceId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SourceId, true\n}", "title": "" }, { "docid": "555632ff1250f842fcf1d85460d2af26", "score": "0.6118472", "text": "func (m *HasPayloadLinkResultItem) GetSources()([]DeviceAndAppManagementAssignmentSource) {\n val, err := m.GetBackingStore().Get(\"sources\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.([]DeviceAndAppManagementAssignmentSource)\n }\n return nil\n}", "title": "" }, { "docid": "78de0b40ddfc12ca60563eb194d2a4dc", "score": "0.60474735", "text": "func (o *SourceTypesCollection) GetLinksOk() (*CollectionLinks, bool) {\n\tif o == nil || o.Links == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Links, true\n}", "title": "" }, { "docid": "682eb7a369bc7f3edf8a93eed170aed7", "score": "0.6044001", "text": "func (f *FredClient) GetSources(params map[string]interface{}) (*FredType, error) {\n\n\tfc, err := f.operate(params, sourcesParam)\n\n\tif err != nil {\n\t\tf.logError(sourcesParam, err)\n\t\treturn nil, err\n\t}\n\n\treturn fc, nil\n\n}", "title": "" }, { "docid": "0546d0a9190f604f9ae9347ca3ecafec", "score": "0.6022802", "text": "func (o ConnectionMonitorTestGroupOutput) Sources() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v ConnectionMonitorTestGroup) []string { return v.Sources }).(pulumi.StringArrayOutput)\n}", "title": "" }, { "docid": "1bae43074b49bd249b0ea88dd86d4545", "score": "0.60100156", "text": "func (o *LogsDateRemapper) GetSources() []string {\n\tif o == nil {\n\t\tvar ret []string\n\t\treturn ret\n\t}\n\n\treturn o.Sources\n}", "title": "" }, { "docid": "90bc094dfda76e623b8c1a672379dc60", "score": "0.5977954", "text": "func (c *Config) GetSources() []*Source {\n\treturn c.Sources\n}", "title": "" }, { "docid": "ff797cc6b30a00d7a1785e1c745f8e60", "score": "0.59374774", "text": "func (o *StackEvent) GetUrlsOk() (*[]string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Urls, true\n}", "title": "" }, { "docid": "4807c307bd71a7f086ec8b5280578b39", "score": "0.5928494", "text": "func (o *ForecastDefinition) GetDataSourceOk() (*string, bool) {\n\tif o == nil || o.DataSource == nil {\n\t\treturn nil, false\n\t}\n\treturn o.DataSource, true\n}", "title": "" }, { "docid": "11bc7f3b17bb8e51f56d3b8d8ade6e6e", "score": "0.5917413", "text": "func (o *StackEvent) GetSources() []string {\n\tif o == nil {\n\t\tvar ret []string\n\t\treturn ret\n\t}\n\n\treturn o.Sources\n}", "title": "" }, { "docid": "31adefcf867e24f6c4a48101a4cde107", "score": "0.59050417", "text": "func (o *RebuildOperationResponse) HasSpecificSources() bool {\n\tif o != nil && o.SpecificSources != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "1198415c9e561ebb41dd16a6029846ad", "score": "0.5886506", "text": "func (o *ImportOperationResponse) GetSourceDirOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.SourceDir, true\n}", "title": "" }, { "docid": "883499db3b9bd7db5caddfe6872b2762", "score": "0.587393", "text": "func (o *EnabledProviderAccount) GetSourceIdOk() (*string, bool) {\n\tif o == nil || IsNil(o.SourceId) {\n\t\treturn nil, false\n\t}\n\treturn o.SourceId, true\n}", "title": "" }, { "docid": "5b8a5662598f091ffb7cf26072b75d4e", "score": "0.585968", "text": "func (o *InlineResponse20065ProjectFiles) GetFileSourceOk() (*string, bool) {\n\tif o == nil || o.FileSource == nil {\n\t\treturn nil, false\n\t}\n\treturn o.FileSource, true\n}", "title": "" }, { "docid": "fbf335bf43837f8fecc8c04695b05062", "score": "0.58559185", "text": "func (o *ResourceService) GetSourceServiceOk() (*string, bool) {\n\tif o == nil || o.SourceService == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SourceService, true\n}", "title": "" }, { "docid": "0d75f2f84c14368be6a2da90e5866e17", "score": "0.5850916", "text": "func (c *Client) Sources(p params) (SourcesResponse, error) {\n\treq, err := c.newRequest(\"GET\", \"/v2/sources\", p, nil)\n\n\tvar res SourcesResponse\n\tif err != nil {\n\t\treturn res, err\n\t}\n\t_, err = c.do(req, &res)\n\tif err != nil {\n\t\treturn res, err\n\t}\n\n\tif res.Status == \"error\" {\n\t\treturn res, &apiError{res.Code, res.Message}\n\t}\n\n\treturn res, nil\n}", "title": "" }, { "docid": "62b303e671cff8b1e5e139679dec6aef", "score": "0.58388823", "text": "func (o *LogMetricConfig) GetLogSourceFiltersOk() (*[]LogSourceFilter, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.LogSourceFilters, true\n}", "title": "" }, { "docid": "6671ddc1b3137c9cf0b538ccd35f808a", "score": "0.57983476", "text": "func (c *Client) GetSources(options *QueryOptions) (*GetSampleSourceResponse, error) {\n\tres := &GetSampleSourceResponse{}\n\tpath := fmt.Sprintf(\"/sources%s\", query2String(options))\n\terr := c.requestAndParseResponse(\"GET\", path, nil, res)\n\treturn res, err\n}", "title": "" }, { "docid": "42a3c94001a7d64e11321a803c501eab", "score": "0.5746887", "text": "func (o *UsageAttributionBody) GetTagConfigSourceOk() (*string, bool) {\n\tif o == nil || o.TagConfigSource == nil {\n\t\treturn nil, false\n\t}\n\treturn o.TagConfigSource, true\n}", "title": "" }, { "docid": "97210a9697ad09493989f06bc09e22ac", "score": "0.56969285", "text": "func (o *ConvertAmounts200Response) GetSourceAmountOk() (*float32, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.SourceAmount, true\n}", "title": "" }, { "docid": "2dd21a39bacc628d74707c18b1158537", "score": "0.56545764", "text": "func (f *FileSystemSource) GetSourcesInfo() []pipeline.SourceInfo {\n\n\tsources := make([]pipeline.SourceInfo, len(f.filesInfo))\n\tvar i = 0\n\tfor _, file := range f.filesInfo {\n\t\tsources[i] = pipeline.SourceInfo{SourceName: file.SourceURI, TargetAlias: file.TargetAlias, Size: uint64(file.FileStats.Size())}\n\t\ti++\n\t}\n\n\treturn sources\n}", "title": "" }, { "docid": "7f9d86f5e2fae544f6d1b68019dfa8ad", "score": "0.56288284", "text": "func (o *GetRandomRecipes200ResponseRecipesInner) GetSourceNameOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.SourceName, true\n}", "title": "" }, { "docid": "fa24a9ff92e8a9f4690bd17b610423eb", "score": "0.5625395", "text": "func (o *ApplianceNetworkStatusAllOf) GetSourceHostnameOk() (*string, bool) {\n\tif o == nil || o.SourceHostname == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SourceHostname, true\n}", "title": "" }, { "docid": "de517f226a893133ddeaaf8cffb5de42", "score": "0.5624494", "text": "func (o *SourceTypesCollection) GetDataOk() (*[]SourceType, bool) {\n\tif o == nil || o.Data == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Data, true\n}", "title": "" }, { "docid": "30a014a5b51e5f870edf366e4e09132c", "score": "0.562142", "text": "func (o *DnsEvent) GetSrcPortOk() (*int32, bool) {\n\tif o == nil || o.SrcPort == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SrcPort, true\n}", "title": "" }, { "docid": "54cd1a9505cf7cc6beef966e86405806", "score": "0.56072634", "text": "func (o *DnsEvent) GetSrcIDOk() (*string, bool) {\n\tif o == nil || o.SrcID == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SrcID, true\n}", "title": "" }, { "docid": "f6fa79e27d5b1e865669ba0ab6fdf5cb", "score": "0.55602694", "text": "func (s *SourceService) FindSources(ctx context.Context, opt platform.FindOptions) ([]*platform.Source, int, error) {\n\tu, err := newURL(s.Addr, sourcePath)\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\treq, err := http.NewRequest(\"GET\", u.String(), nil)\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\tSetToken(s.Token, req)\n\n\thc := newClient(u.Scheme, s.InsecureSkipVerify)\n\tresp, err := hc.Do(req)\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\tif err := CheckError(resp); err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\tvar bs []*platform.Source\n\tif err := json.NewDecoder(resp.Body).Decode(&bs); err != nil {\n\t\treturn nil, 0, err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn bs, len(bs), nil\n}", "title": "" }, { "docid": "921e9a93ef5f04137be3490833f32385", "score": "0.5553907", "text": "func (N *NewsFeedAPI) GetSources(Query models.NewsQueryParameter) (models.Sources, error) {\n\tvar sourcesFeed models.Sources\n\tquerystring, err := N.BuildQueryString(\"sources\", Query)\n\tfmt.Println(querystring)\n\tresp, err := http.Get(N.Config.APIURL + querystring)\n\tif err != nil {\n\t\tlog.Println(\"Error query the newsapi service sources\")\n\t\treturn sourcesFeed, err\n\t}\n\tjson.NewDecoder(resp.Body).Decode(&sourcesFeed)\n\treturn sourcesFeed, err\n}", "title": "" }, { "docid": "bba0e6e30b345f151958cfa8bddbd23e", "score": "0.55526483", "text": "func GetSources(g1 *viper.Viper) ([]Sources, error) {\n\tvar subtreeKey = \"sources\"\n\tvar cfg []Sources\n\t//for key, value := range SourcesTemplate {\n\t//\tg1.SetDefault(key, value)\n\t//}\n\n\t//g1.AutomaticEnv()\n\t// config already read. substree passed\n\terr := g1.UnmarshalKey(subtreeKey, &cfg)\n\tif err != nil {\n\t\tlog.Fatal(\"error when parsing \", subtreeKey, \" config: \", err)\n\t\t//No sources, so nothing to run\n\t}\n\tfor i, s := range cfg {\n\t\tcfg[i] = populateDefaults(s)\n\t}\n\treturn cfg, err\n}", "title": "" }, { "docid": "be6003bbce97876f921e5f7377108d85", "score": "0.5499034", "text": "func (o *NormalizedProjectRevisionThirdPartyProvider) GetSubjectSourceOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SubjectSource.Get(), o.SubjectSource.IsSet()\n}", "title": "" }, { "docid": "e6c8a513e156982bc30c72c0bcc8a184", "score": "0.5482338", "text": "func (o *FormulaAndFunctionApmDependencyStatsQueryDefinition) GetDataSourceOk() (*FormulaAndFunctionApmDependencyStatsDataSource, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.DataSource, true\n}", "title": "" }, { "docid": "97f39853f2bfed96eabf53cfbe6bfbe2", "score": "0.54795676", "text": "func (o DomainOutput) Sources() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v *Domain) pulumi.StringArrayOutput { return v.Sources }).(pulumi.StringArrayOutput)\n}", "title": "" }, { "docid": "ca56ff9e703c0cfbd0f78393ee167bcd", "score": "0.5474325", "text": "func (o *ConvertAmounts200Response) GetSourceUnitOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.SourceUnit, true\n}", "title": "" }, { "docid": "d31f60c33e1790565612369babd902b0", "score": "0.54192054", "text": "func (h *SourceHandler) handleGetSources(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\n\treq, err := decodeGetSourcesRequest(ctx, r)\n\tif err != nil {\n\t\tEncodeError(ctx, err, w)\n\t\treturn\n\t}\n\n\tsrcs, _, err := h.SourceService.FindSources(ctx, req.findOptions)\n\tif err != nil {\n\t\tEncodeError(ctx, err, w)\n\t\treturn\n\t}\n\n\tres := newSourcesResponse(srcs)\n\n\tif err := encodeResponse(ctx, w, http.StatusOK, res); err != nil {\n\t\tlogEncodingError(h.Logger, r, err)\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "943de49e4fe7f1a0e5a1090afafb87e0", "score": "0.54038393", "text": "func (o *DnsEvent) GetSrcIPOk() (*string, bool) {\n\tif o == nil || o.SrcIP == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SrcIP, true\n}", "title": "" }, { "docid": "2a24ce43dae2ed863133ad19a86803b0", "score": "0.5382648", "text": "func (o *DnsEvent) GetSrcHostOk() (*string, bool) {\n\tif o == nil || o.SrcHost == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SrcHost, true\n}", "title": "" }, { "docid": "65e9572091a60f43f1342e87dcc7d33a", "score": "0.5380239", "text": "func (o *PipelineFilter) GetConnectorsOk() (*[]string, bool) {\n\tif o == nil || o.Connectors == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Connectors, true\n}", "title": "" }, { "docid": "12fef53da00b1583032816ce43f23134", "score": "0.5369796", "text": "func (GitSource) IsSource() {}", "title": "" }, { "docid": "b1b32dc1613ecefa8ce6e7de44a768dc", "score": "0.5346758", "text": "func (o *VersionedRemoteProcessGroup) GetTargetUrisOk() (*string, bool) {\n\tif o == nil || o.TargetUris == nil {\n\t\treturn nil, false\n\t}\n\treturn o.TargetUris, true\n}", "title": "" }, { "docid": "44e82a6abb42f9303e35bbf4a61ef849", "score": "0.5343165", "text": "func SourcesShowFunc() {\n\topt := &SourcesShowOption{\n\t\tFrom: \"\",\n\t\tKey: \"MUST_EDIT_IT\",\n\t\tTo: \"\",\n\t}\n\tv, resp, err := client.Sources.Show(opt)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\tos.Exit(1)\n\t}\n\tfmt.Println(resp.StatusCode)\n\tspew.Dump(v)\n}", "title": "" }, { "docid": "f72771a048f2b91cbd3950cbf7b1f2b9", "score": "0.53420097", "text": "func (m *MediaMutation) SourceURICleared() bool {\n\t_, ok := m.clearedFields[media.FieldSourceURI]\n\treturn ok\n}", "title": "" }, { "docid": "72b578c3c752098a3f9b523d2120016e", "score": "0.53391045", "text": "func (o *DnsEvent) GetSrcUserOk() (*string, bool) {\n\tif o == nil || o.SrcUser == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SrcUser, true\n}", "title": "" }, { "docid": "90ff25b4ae4b5bca24aa9818ee8c70b8", "score": "0.53338027", "text": "func (o *GetSourceDevice200ResponseConfigInterface) GetAddressesOk() (*GetSourceDevice200ResponseConfigInterfaceAddresses, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Addresses, true\n}", "title": "" }, { "docid": "60e69fbb6ec066103cede90e6e17acc6", "score": "0.53065234", "text": "func (o InstanceAccessControlAttributesAttributeValueOutput) Sources() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v InstanceAccessControlAttributesAttributeValue) []string { return v.Sources }).(pulumi.StringArrayOutput)\n}", "title": "" }, { "docid": "464b18957a37016c036ca6b6dd6f8828", "score": "0.529801", "text": "func (o *FiltersSnapshot) GetStatesOk() (*[]string, bool) {\n\tif o == nil || o.States == nil {\n\t\treturn nil, false\n\t}\n\treturn o.States, true\n}", "title": "" }, { "docid": "c4db244bb45ebf11ec27839b973f9c9c", "score": "0.52978665", "text": "func (m *HasPayloadLinkResultItem) SetSources(value []DeviceAndAppManagementAssignmentSource)() {\n err := m.GetBackingStore().Set(\"sources\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "1c9e956177b906734afd59b6c12ccde4", "score": "0.5284007", "text": "func (stream *MultiStream) ShouldLogSourceInfo() bool {\n\tfor _, s := range stream.streams {\n\t\tif s.ShouldLogSourceInfo() {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "854d28730be71f82733e1e441cb5dbe1", "score": "0.5278301", "text": "func (o *RebuildOperationResponse) GetSpecificSources() []string {\n\tif o == nil || o.SpecificSources == nil {\n\t\tvar ret []string\n\t\treturn ret\n\t}\n\treturn *o.SpecificSources\n}", "title": "" }, { "docid": "9f1e74349c5f982d343b78be23e888b6", "score": "0.52369916", "text": "func IsErrSourceInUse(err error) bool {\n\t_, ok := err.(ErrSourceInUse)\n\treturn ok\n}", "title": "" }, { "docid": "813605141fd2117ffa750e6944ab3681", "score": "0.5232977", "text": "func (o *GetRandomRecipes200ResponseRecipesInner) GetSpoonacularSourceUrlOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.SpoonacularSourceUrl, true\n}", "title": "" }, { "docid": "83e1e7e4d4d635f2a4cb853d10196fd1", "score": "0.52295125", "text": "func (o *StoragePureProtectionGroup) GetTargetsOk() ([]string, bool) {\n\tif o == nil || o.Targets == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Targets, true\n}", "title": "" }, { "docid": "fc46e4131982240ad222d55faba94882", "score": "0.52244234", "text": "func (o AlertRuleFusionOutput) Sources() AlertRuleFusionSourceArrayOutput {\n\treturn o.ApplyT(func(v *AlertRuleFusion) AlertRuleFusionSourceArrayOutput { return v.Sources }).(AlertRuleFusionSourceArrayOutput)\n}", "title": "" }, { "docid": "e4400f242f2e9bb3283eeaef04d080bc", "score": "0.5221111", "text": "func (o *StoragePureProtectionGroup) HasSource() bool {\n\tif o != nil && o.Source != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "b593a8413c781a3435d0b21ec76a2cbb", "score": "0.52046776", "text": "func (o *StorageNetAppSnapMirrorRelationship) GetSourcePathOk() (*string, bool) {\n\tif o == nil || o.SourcePath == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SourcePath, true\n}", "title": "" }, { "docid": "c348c92c68ef842391ec50404892edc6", "score": "0.5204113", "text": "func SourcesRawFunc() {\n\topt := &SourcesRawOption{Key: \"MUST_EDIT_IT\"}\n\tv, resp, err := client.Sources.Raw(opt)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\tos.Exit(1)\n\t}\n\tfmt.Println(resp.StatusCode)\n\tspew.Dump(v)\n}", "title": "" }, { "docid": "4ed0ce85cdeeac425d9d78c6aa2f5374", "score": "0.52007425", "text": "func (o *SmsTextualMessage) GetDestinationsOk() (*[]SmsDestination, bool) {\n\tif o == nil || o.Destinations == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Destinations, true\n}", "title": "" }, { "docid": "34a64ba7f36e39f509e83a4836cdd403", "score": "0.51998454", "text": "func (f *MACAddressFields) HasSOUR() bool {\n\treturn has1stBit(f.Flags)\n}", "title": "" }, { "docid": "9eed9634d2a02ba402ebb1f08ea77640", "score": "0.51975864", "text": "func (c *Client) FindSources(ctx context.Context, opt platform.FindOptions) ([]*platform.Source, int, error) {\n\tss := []*platform.Source{}\n\terr := c.db.View(func(tx *bolt.Tx) error {\n\t\tsrcs, err := c.findSources(ctx, tx, opt)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tss = srcs\n\t\treturn nil\n\t})\n\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\treturn ss, len(ss), nil\n}", "title": "" }, { "docid": "a260c9aff938f0275f69216ecd4dc925", "score": "0.51892704", "text": "func (o InputOutput) Sources() InputSourceArrayOutput {\n\treturn o.ApplyT(func(v *Input) InputSourceArrayOutput { return v.Sources }).(InputSourceArrayOutput)\n}", "title": "" }, { "docid": "58f63af75d5f66436d0ff903906bd4a7", "score": "0.5172841", "text": "func HasMountSources(devfileContainer *dw.ContainerComponent) bool {\n\tvar mountSources bool\n\tif devfileContainer.MountSources == nil {\n\t\tmountSources = true\n\t} else {\n\t\tmountSources = *devfileContainer.MountSources\n\t}\n\treturn mountSources\n}", "title": "" }, { "docid": "f82d4ef7bf15117e98cd0d2227fae777", "score": "0.5172551", "text": "func (ssp *SampleSet) HasSource(sn SourceName) bool {\n\t_, ok := ssp.Samples[sn]\n\treturn ok\n}", "title": "" }, { "docid": "8653ef5ae798e96c104807ac105f1948", "score": "0.5165966", "text": "func (mo *LicenseManager) SourceAvailable() (bool, error) {\n\tp, err := mo.currentProperty(\"sourceAvailable\")\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tif p != nil {\n\t\treturn p.(bool), nil\n\t}\n\treturn false, nil\n\n}", "title": "" }, { "docid": "fb384e0b5c20b4530f7345126ecb5a45", "score": "0.51579213", "text": "func (o *Site) GetSitesOk() ([]MicrosoftGraphSite, bool) {\n\tif o == nil || o.Sites == nil {\n\t\tvar ret []MicrosoftGraphSite\n\t\treturn ret, false\n\t}\n\treturn *o.Sites, true\n}", "title": "" }, { "docid": "fa8ce023d1e2a37d2416abee69974d24", "score": "0.5156615", "text": "func (o *FiltersVirtualGateway) GetLinkStatesOk() (*[]string, bool) {\n\tif o == nil || o.LinkStates == nil {\n\t\treturn nil, false\n\t}\n\treturn o.LinkStates, true\n}", "title": "" }, { "docid": "50a7aae9feefc5b79f9d7ef3ba418245", "score": "0.5147159", "text": "func buildSources(terraformSources *[]interface{}) *[]wavefront.Source {\n\twavefrontSources := make([]wavefront.Source, len(*terraformSources))\n\n\tfor i, t := range *terraformSources {\n\t\tt := t.(map[string]interface{})\n\n\t\twavefrontSources[i] = wavefront.Source{\n\t\t\tName: t[\"name\"].(string),\n\t\t\tQuery: t[\"query\"].(string),\n\t\t}\n\t\tif t[\"disabled\"] != nil {\n\t\t\twavefrontSources[i].Disabled = t[\"disabled\"].(bool)\n\t\t}\n\t\tif t[\"scatter_plot_source\"] != nil {\n\t\t\twavefrontSources[i].ScatterPlotSource = t[\"scatter_plot_source\"].(string)\n\t\t}\n\t\tif t[\"query_builder_enabled\"] != nil {\n\t\t\twavefrontSources[i].QuerybuilderEnabled = t[\"query_builder_enabled\"].(bool)\n\t\t}\n\t\tif t[\"source_description\"] != nil {\n\t\t\twavefrontSources[i].SourceDescription = t[\"source_description\"].(string)\n\t\t}\n\t}\n\n\treturn &wavefrontSources\n}", "title": "" }, { "docid": "214747f2aab2eebe56d20c73c7914c6b", "score": "0.5132241", "text": "func (o *FiltersVirtualGateway) GetStatesOk() (*[]string, bool) {\n\tif o == nil || o.States == nil {\n\t\treturn nil, false\n\t}\n\treturn o.States, true\n}", "title": "" }, { "docid": "279b7bb53ea62e6cd9253f184a00dd64", "score": "0.5113907", "text": "func (repo SourceDynamoDBRepository) GetAllSources() ([]handlers.Source, error) {\n\treturn nil, nil\n}", "title": "" }, { "docid": "07253bbcac942141c154fa8b563a3742", "score": "0.5103157", "text": "func (o *InlineResponse20065ProjectFiles) HasFileSource() bool {\n\tif o != nil && o.FileSource != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "0e21cd4d850a17be2d87ed49c6995d73", "score": "0.5098669", "text": "func (target *BuildTarget) AllSources() []BuildInput {\n\tif target.NamedSources == nil {\n\t\treturn target.Sources\n\t}\n\treturn target.allBuildInputs(target.Sources, target.NamedSources)\n}", "title": "" }, { "docid": "ce640b7b8c593368d7b60bba0a2d2c31", "score": "0.5079229", "text": "func (o *DnsEvent) GetSrcMacOk() (*string, bool) {\n\tif o == nil || o.SrcMac == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SrcMac, true\n}", "title": "" }, { "docid": "c6d1c35575e05e48667e23369599cd31", "score": "0.5071533", "text": "func (o *FiltersFlexibleGpu) GetStatesOk() (*[]string, bool) {\n\tif o == nil || o.States == nil {\n\t\treturn nil, false\n\t}\n\treturn o.States, true\n}", "title": "" }, { "docid": "ab9404765306c60038861b53320aca93", "score": "0.5060225", "text": "func (o *CreditFreddieMacAssetVOA24) GetVALIDATION_SOURCES() ValidationSources {\n\tif o == nil {\n\t\tvar ret ValidationSources\n\t\treturn ret\n\t}\n\n\treturn o.VALIDATION_SOURCES\n}", "title": "" }, { "docid": "cfe2aee872ab45ff30ac7ff2aa035683", "score": "0.50597125", "text": "func (o *StackEvent) GetResourcesOk() (*[]StackEventResource, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Resources, true\n}", "title": "" }, { "docid": "c041273cfed0cde50852cc6084917412", "score": "0.50525904", "text": "func (c *restClient) ListSources(ctx context.Context, req *vmmigrationpb.ListSourcesRequest, opts ...gax.CallOption) *SourceIterator {\n\tit := &SourceIterator{}\n\treq = proto.Clone(req).(*vmmigrationpb.ListSourcesRequest)\n\tunm := protojson.UnmarshalOptions{AllowPartial: true, DiscardUnknown: true}\n\tit.InternalFetch = func(pageSize int, pageToken string) ([]*vmmigrationpb.Source, string, error) {\n\t\tresp := &vmmigrationpb.ListSourcesResponse{}\n\t\tif pageToken != \"\" {\n\t\t\treq.PageToken = pageToken\n\t\t}\n\t\tif pageSize > math.MaxInt32 {\n\t\t\treq.PageSize = math.MaxInt32\n\t\t} else if pageSize != 0 {\n\t\t\treq.PageSize = int32(pageSize)\n\t\t}\n\t\tbaseUrl, err := url.Parse(c.endpoint)\n\t\tif err != nil {\n\t\t\treturn nil, \"\", err\n\t\t}\n\t\tbaseUrl.Path += fmt.Sprintf(\"/v1/%v/sources\", req.GetParent())\n\n\t\tparams := url.Values{}\n\t\tparams.Add(\"$alt\", \"json;enum-encoding=int\")\n\t\tif req.GetFilter() != \"\" {\n\t\t\tparams.Add(\"filter\", fmt.Sprintf(\"%v\", req.GetFilter()))\n\t\t}\n\t\tif req.GetOrderBy() != \"\" {\n\t\t\tparams.Add(\"orderBy\", fmt.Sprintf(\"%v\", req.GetOrderBy()))\n\t\t}\n\t\tif req.GetPageSize() != 0 {\n\t\t\tparams.Add(\"pageSize\", fmt.Sprintf(\"%v\", req.GetPageSize()))\n\t\t}\n\t\tparams.Add(\"pageToken\", fmt.Sprintf(\"%v\", req.GetPageToken()))\n\n\t\tbaseUrl.RawQuery = params.Encode()\n\n\t\t// Build HTTP headers from client and context metadata.\n\t\thds := append(c.xGoogHeaders, \"Content-Type\", \"application/json\")\n\t\theaders := gax.BuildHeaders(ctx, hds...)\n\t\te := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {\n\t\t\tif settings.Path != \"\" {\n\t\t\t\tbaseUrl.Path = settings.Path\n\t\t\t}\n\t\t\thttpReq, err := http.NewRequest(\"GET\", baseUrl.String(), nil)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\thttpReq.Header = headers\n\n\t\t\thttpRsp, err := c.httpClient.Do(httpReq)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer httpRsp.Body.Close()\n\n\t\t\tif err = googleapi.CheckResponse(httpRsp); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tbuf, err := io.ReadAll(httpRsp.Body)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif err := unm.Unmarshal(buf, resp); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\treturn nil\n\t\t}, opts...)\n\t\tif e != nil {\n\t\t\treturn nil, \"\", e\n\t\t}\n\t\tit.Response = resp\n\t\treturn resp.GetSources(), resp.GetNextPageToken(), nil\n\t}\n\n\tfetch := func(pageSize int, pageToken string) (string, error) {\n\t\titems, nextPageToken, err := it.InternalFetch(pageSize, pageToken)\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tit.items = append(it.items, items...)\n\t\treturn nextPageToken, nil\n\t}\n\n\tit.pageInfo, it.nextFunc = iterator.NewPageInfo(fetch, it.bufLen, it.takeBuf)\n\tit.pageInfo.MaxSize = int(req.GetPageSize())\n\tit.pageInfo.Token = req.GetPageToken()\n\n\treturn it\n}", "title": "" }, { "docid": "823a537a486381c49452a78a9a7e9b14", "score": "0.5048199", "text": "func (r *Instance) SourceDestCheck() pulumi.BoolOutput {\n\treturn (pulumi.BoolOutput)(r.s.State[\"sourceDestCheck\"])\n}", "title": "" }, { "docid": "cd0b904cdc5e526e4afd016847983197", "score": "0.5034478", "text": "func LoadSources(format string, src []byte, more ...[]byte) error {\n\treturn dc.LoadSources(format, src, more...)\n}", "title": "" }, { "docid": "03e1daba3f1c3a4605b8cc839cb4e645", "score": "0.502439", "text": "func (v Newsfeed) GetSuggestedSources(params NewsfeedGetSuggestedSourcesParams) (*NewsfeedGetSuggestedSourcesResponse, error) {\n\tr, err := v.API.Request(\"newsfeed.getSuggestedSources\", params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar resp NewsfeedGetSuggestedSourcesResponse\n\terr = json.Unmarshal(r, &resp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}", "title": "" }, { "docid": "feff9219c4f72d03baa88787b1746c3a", "score": "0.50234807", "text": "func (o *EquipmentPsu) GetPsuInputSrcOk() (*string, bool) {\n\tif o == nil || o.PsuInputSrc == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PsuInputSrc, true\n}", "title": "" }, { "docid": "fea09800b4ce7bab8954198f31fe53f4", "score": "0.50164133", "text": "func (o *InlineObject1108) HasSourceData() bool {\n\tif o != nil && o.SourceData != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "19e6f7d8328c3d1d5d40772994441d19", "score": "0.5012943", "text": "func (resp *WAFevents) GetSourceCount(filter string) int {\n\tcount := 0\n\tfor _, v := range resp.Result {\n\t\tif v.Source == filter {\n\t\t\tcount++\n\t\t}\n\t}\n\treturn count\n}", "title": "" }, { "docid": "01253e9b44d2b2789492bc8fe0d036c7", "score": "0.49991408", "text": "func (ld LockedProjectPropertiesDelta) SourceChanged() bool {\n\treturn ld.SourceBefore != ld.SourceAfter\n}", "title": "" }, { "docid": "754205e848c53724e065649408a7b07c", "score": "0.4979866", "text": "func (o *WorkspaceLinks) GetSnippetsOk() (*Link, bool) {\n\tif o == nil || o.Snippets == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Snippets, true\n}", "title": "" }, { "docid": "d0f2be7017e2b8499e5b1b98f9e3fc3d", "score": "0.49745154", "text": "func (o *Endpoint) HasSourceId() bool {\n\tif o != nil && o.SourceId != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "bb966c46abf7284dab09cdd2c5b14d6e", "score": "0.4958463", "text": "func CountSources() int64 {\n\tcount, _ := db.GetEngine(db.DefaultContext).Count(new(Source))\n\treturn count\n}", "title": "" }, { "docid": "2e43a85a4d0fc48a20f69c87a7bc002a", "score": "0.49416935", "text": "func (o *StackEvent) SetSources(v []string) {\n\to.Sources = v\n}", "title": "" }, { "docid": "fa0d328129783884df39c1d80f99d26d", "score": "0.4933279", "text": "func HasUrlAliasViaSource(iSource string) bool {\n\tif has, err := Engine.Where(\"source = ?\", iSource).Get(new(UrlAlias)); err != nil {\n\t\treturn false\n\t} else {\n\t\tif has {\n\t\t\treturn true\n\t\t}\n\t\treturn false\n\t}\n}", "title": "" }, { "docid": "7203dc51a27b39da1cf014096998670f", "score": "0.49248916", "text": "func (c *Client) ExpectLoadedSourcesResponse(t *testing.T) *dap.LoadedSourcesResponse {\n\tt.Helper()\n\tm := c.ExpectMessage(t)\n\tr, ok := m.(*dap.LoadedSourcesResponse)\n\tif !ok {\n\t\tt.Fatalf(\"got %#v, want *dap.LoadedSourcesResponse\", m)\n\t}\n\treturn r\n}", "title": "" }, { "docid": "ed82e666181321c58f5fb0777adce849", "score": "0.49109516", "text": "func (c *Client) ListSources(ctx context.Context, req *vmmigrationpb.ListSourcesRequest, opts ...gax.CallOption) *SourceIterator {\n\treturn c.internalClient.ListSources(ctx, req, opts...)\n}", "title": "" }, { "docid": "954e08f21b6857367427d010736653e8", "score": "0.4910134", "text": "func getMapboxSourcesFromURL(sourcesURL string) ([]string, error) {\n\tallSources, err := getSourcesFromSourcesURL(sourcesURL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tmapboxSources := make([]string, 0)\n\tfor _, source := range allSources {\n\t\tsourceParts := strings.Split(source, \".\")\n\t\tif len(sourceParts) != 2 {\n\t\t\treturn nil, fmt.Errorf(\"Encountered invalid source in the source URL: %v\", source)\n\t\t}\n\t\tusername := sourceParts[0]\n\t\tif username == \"mapbox\" {\n\t\t\tmapboxSources = append(mapboxSources, source)\n\t\t}\n\t}\n\n\treturn mapboxSources, nil\n}", "title": "" } ]
cd3dcbcd89e145b76958c04778519d96
FindOrCreateNode returns corresponding Node. If such Node doesn't exist, create and return new Node.
[ { "docid": "53a1cfc0adad8a6886b403a1402f85f3", "score": "0.8066528", "text": "func (m *Model) FindOrCreateNode(origin entities.Entity) *Node {\n\tif _, ok := m.Nodes[origin.B().Type()]; !ok {\n\t\tm.Nodes[origin.B().Type()] = make(map[uint]*Node)\n\t}\n\tif n, ok := m.Nodes[origin.B().Type()][origin.B().Idx()]; ok {\n\t\treturn n\n\t}\n\tn := NewNode(origin)\n\tm.Nodes[origin.B().Type()][origin.B().Idx()] = n\n\treturn n\n}", "title": "" } ]
[ { "docid": "55bc02e1c8271d01ebbbbb5194cfd372", "score": "0.8179496", "text": "func (g *Graph) FindOrCreateNode(name string) (node *Node, found bool) {\n\tif node, ok := g.allNodes[name]; ok {\n\t\treturn node, true\n\t}\n\n\tnode = &Node{\n\t\tAttributes: NewAttributes(),\n\t\tname: name,\n\t}\n\tg.allNodes[name] = node\n\tg.myNodes[name] = node\n\treturn node, false\n}", "title": "" }, { "docid": "691e29c7456f457bc5aa8e4d61695ba1", "score": "0.762784", "text": "func (t *IETFNetwork_Networks_Network) GetOrCreateNode(NodeId string) (*IETFNetwork_Networks_Network_Node){\n\n\tkey := NodeId\n\n\tif v, ok := t.Node[key]; ok {\n\t\treturn v\n\t}\n\t// Panic if we receive an error, since we should have retrieved an existing\n\t// list member. This allows chaining of GetOrCreate methods.\n\tv, err := t.NewNode(NodeId)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"GetOrCreateNode got unexpected error: %v\", err))\n\t}\n\treturn v\n}", "title": "" }, { "docid": "ce12054b73f4896377d72c98b6c3e74b", "score": "0.7001891", "text": "func (t *IETFNetwork_Networks_Network_Node) GetOrCreateSupportingNode(NetworkRef string, NodeRef string) (*IETFNetwork_Networks_Network_Node_SupportingNode){\n\n\tkey := IETFNetwork_Networks_Network_Node_SupportingNode_Key{\n\t\tNetworkRef: NetworkRef,\n\t\tNodeRef: NodeRef,\n\t}\n\n\tif v, ok := t.SupportingNode[key]; ok {\n\t\treturn v\n\t}\n\t// Panic if we receive an error, since we should have retrieved an existing\n\t// list member. This allows chaining of GetOrCreate methods.\n\tv, err := t.NewSupportingNode(NetworkRef, NodeRef)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"GetOrCreateSupportingNode got unexpected error: %v\", err))\n\t}\n\treturn v\n}", "title": "" }, { "docid": "21226875cead6e5cef7b4c65965d9bd8", "score": "0.6383428", "text": "func (k Kademlia) FindNode(ctx context.Context, ID NodeID) (overlay.Node, error) {\n\treturn overlay.Node{}, nil\n}", "title": "" }, { "docid": "edb2de2c315fd115944e8fc32bc14b3e", "score": "0.63387364", "text": "func (dht *DHT) FindNode(ctx Context, key string) (*node.Node, bool, error) {\n\tkeyBytes := base58.Decode(key)\n\tif len(keyBytes) != routing.MaxContactsInBucket {\n\t\treturn nil, false, errors.New(\"invalid key\")\n\t}\n\tht := dht.htFromCtx(ctx)\n\n\tif ht.Origin.ID.Equal(keyBytes) {\n\t\treturn ht.Origin, true, nil\n\t}\n\n\tvar targetNode *node.Node\n\tvar exists = false\n\trouteSet := ht.GetClosestContacts(1, keyBytes, nil)\n\n\tif routeSet.Len() > 0 && routeSet.FirstNode().ID.Equal(keyBytes) {\n\t\ttargetNode = routeSet.FirstNode()\n\t\texists = true\n\t} else if dht.proxy.ProxyNodesCount() > 0 {\n\t\taddress, _ := node.NewAddress(dht.proxy.GetNextProxyAddress())\n\t\ttargetNode = &node.Node{ID: keyBytes, Address: address}\n\t\treturn targetNode, true, nil\n\t} else {\n\t\tlog.Println(\"Node not found in routing table. Iterating through insolar...\")\n\t\t_, closest, err := dht.iterate(ctx, routing.IterateFindNode, keyBytes, nil)\n\t\tif err != nil {\n\t\t\treturn nil, false, err\n\t\t}\n\t\tfor i := range closest {\n\t\t\tif closest[i].ID.Equal(keyBytes) {\n\t\t\t\ttargetNode = closest[i]\n\t\t\t\texists = true\n\t\t\t}\n\t\t}\n\t}\n\n\treturn targetNode, exists, nil\n}", "title": "" }, { "docid": "827457c27f0e56dafde6b193174cc510", "score": "0.6294929", "text": "func (g *Graph) Node(id string) Node {\n\tif n, ok := g.findNode(id); ok {\n\t\treturn n\n\t}\n\tn := Node{\n\t\tid: id,\n\t\tseq: g.nextSeq(), // create a new, use root sequence\n\t\tAttributesMap: AttributesMap{attributes: map[string]interface{}{\n\t\t\t\"label\": id}},\n\t\tgraph: g,\n\t}\n\tif g.nodeInitializer != nil {\n\t\tg.nodeInitializer(n)\n\t}\n\t// store local\n\tg.nodes[id] = n\n\treturn n\n}", "title": "" }, { "docid": "2f3fc5e0948d98e51708eb0b91fbf899", "score": "0.613634", "text": "func (ndb *nodeDB) GetNode(hash []byte) (*Node, error) {\n\tndb.mtx.Lock()\n\tdefer ndb.mtx.Unlock()\n\n\tif len(hash) == 0 {\n\t\treturn nil, ErrNodeMissingHash\n\t}\n\n\t// Check the cache.\n\tif elem, ok := ndb.nodeCache[string(hash)]; ok {\n\t\tif ndb.opts.Stat != nil {\n\t\t\tndb.opts.Stat.IncCacheHitCnt()\n\t\t}\n\t\t// Already exists. Move to back of nodeCacheQueue.\n\t\tndb.nodeCacheQueue.MoveToBack(elem)\n\t\treturn elem.Value.(*Node), nil\n\t}\n\n\tif ndb.opts.Stat != nil {\n\t\tndb.opts.Stat.IncCacheMissCnt()\n\t}\n\n\t// Doesn't exist, load.\n\tbuf, err := ndb.db.Get(ndb.nodeKey(hash))\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"can't get node %X: %v\", hash, err)\n\t}\n\tif buf == nil {\n\t\treturn nil, fmt.Errorf(\"Value missing for hash %x corresponding to nodeKey %x\", hash, ndb.nodeKey(hash))\n\t}\n\n\tnode, err := MakeNode(buf)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Error reading Node. bytes: %x, error: %v\", buf, err)\n\t}\n\n\tnode.hash = hash\n\tnode.persisted = true\n\tndb.cacheNode(node)\n\n\treturn node, nil\n}", "title": "" }, { "docid": "09e7528a3dbc72829c14f8a0648a1d06", "score": "0.61331654", "text": "func (n *Node) Lookup(ctx context.Context, name string) (fs.Node, error) {\n\tchild, err := n.node.Lookup(name)\n\tif err != nil {\n\t\tdebug.Printf(\"error in lookup: %s\", err)\n\t\treturn nil, fuse.ENOENT\n\t}\n\n\tdebug.Printf(\"child %s: %v\", child.Name, child.Type())\n\treturn NewNode(child), nil\n}", "title": "" }, { "docid": "4d61c5eda01e43c5aba94e79d9127cf9", "score": "0.6099996", "text": "func RepoFindNode(id uint64) Node {\n\tfor _, t := range nodes {\n\t\tif t.ID == id {\n\t\t\treturn t\n\t\t}\n\t}\n\n\t// Return empty Node if not found\n\treturn Node{}\n}", "title": "" }, { "docid": "3ab987d48eb3ec945c8b90dbb9792d2d", "score": "0.60826945", "text": "func (graph *Graph) Node(id string) *Node {\n\tif id == \"\" {\n\t\tpanic(\"invalid node id\")\n\t}\n\n\tnode, found := graph.NodeByID[id]\n\tif !found {\n\t\tnode = NewNode(id)\n\t\tgraph.AddNode(node)\n\t}\n\treturn node\n}", "title": "" }, { "docid": "c936b5c8174e0caee510b05916fa1237", "score": "0.60569566", "text": "func (n *DaosNode) Lookup(name string) (*DaosNode, error) {\n\tdebug.Printf(\"looking up %s in %s\", name, n.oid)\n\tif err := n.openObjectLatest(); err != nil {\n\t\treturn nil, err\n\t}\n\tdefer n.closeObject()\n\n\trawOID, err := n.oh.Get(daos.EpochMax, name, \"OID\")\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"Failed to fetch OID attr for %s\", name)\n\t}\n\tvar oid daos.ObjectID\n\tif err := json.Unmarshal(rawOID, &oid); err != nil {\n\t\treturn nil, errors.Wrapf(err, \"Failed to unmarshal %q\", rawOID)\n\t}\n\tval, err := n.oh.Get(daos.EpochMax, name, \"ModeType\")\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"Failed to fetch ModeType attr for %s\", name)\n\t}\n\treturn &DaosNode{\n\t\toid: &oid,\n\t\tparent: n.oid,\n\t\tfs: n.fs,\n\t\tmodeType: os.FileMode(binary.LittleEndian.Uint32(val)),\n\t\tName: name,\n\t}, nil\n}", "title": "" }, { "docid": "65c369efe1fa486eef1caffe6401a58e", "score": "0.6029726", "text": "func (dht *DHT) FindNode(id *peer.ID /* and a callback? */) error {\n\tpanic(\"Not implemented.\")\n}", "title": "" }, { "docid": "d485dd1f5fc1cd0ecaf97d10a54b92af", "score": "0.6005997", "text": "func locateNode(id int, nodes []Node) Node {\n\treturn nodes[id]\n}", "title": "" }, { "docid": "4ad2ed8cca50b3cf45c7428725a4c489", "score": "0.5976646", "text": "func (con *Connection) CreateNode(value interface{}) (int64, neo4j.Result, error) {\n\tmapping, err := mapping.GetMapping(value)\n\tif err != nil {\n\t\treturn 0, nil, err\n\t}\n\tcyp := createNodeCypher(mapping)\n\n\tvalues := mapping.Values\n\tfor k, v := range mapping.Model {\n\t\tvalues[k] = v\n\t}\n\n\tvar result neo4j.Result\n\tif con.transaction == nil {\n\t\tresult, err = con.session.Run(cyp, values)\n\t} else {\n\t\tresult, err = con.transaction.Run(cyp, values)\n\t}\n\tif err != nil {\n\t\treturn 0, nil, err\n\t}\n\tif result.Err() != nil {\n\t\treturn 0, nil, result.Err()\n\t}\n\trecord, err := result.Single()\n\tif err != nil {\n\t\treturn 0, nil, err\n\t}\n\n\tnodeI, ok := record.Get(\"n\")\n\tif ok {\n\t\tnode, ok := nodeI.(neo4j.Node)\n\t\tif !ok {\n\t\t\treturn 0, nil, errors.New(\"CreateNode: can't convert to a neo4j Node\")\n\t\t}\n\t\treturn node.Id, result, nil\n\t}\n\treturn 0, nil, errors.New(\"CreateNode: can't get record\")\n}", "title": "" }, { "docid": "d5b065eb56fb2a328a2d63df782ce089", "score": "0.5970302", "text": "func (c *Cluster) FindNode(endPoint string) (*core.NodeInfo, bool) {\n\tc.nodesLocker.RLock()\n\tdefer c.nodesLocker.RUnlock()\n\tnode, exists := c.Nodes[endPoint]\n\treturn node, exists\n}", "title": "" }, { "docid": "f88165a348c57c9135b801ae0934f5fc", "score": "0.59235924", "text": "func FindNode(tree Tree, stack *Stack, requiredNode string) (string, error) {\n\n\tcurrentNode := stack.Pop()\n\t// found what we are looking for\n\tif currentNode == requiredNode {\n\t\treturn requiredNode, nil\n\t}\n\n\t// get the current node child nodes and add them to stack\n\tnodeList, ok := tree[currentNode]\n\tif ok {\n\t\tstack.Push(nodeList...)\n\t\tfor stack.Len() != 0 {\n\t\t\tfoundNode, err := FindNode(tree, stack, requiredNode)\n\t\t\tif err == nil {\n\t\t\t\treturn foundNode, nil\n\t\t\t}\n\t\t}\n\t}\n\n\treturn \"\", errors.New(\"Node not found\")\n}", "title": "" }, { "docid": "5b0cf05c2eafd32493813f75ec6b8036", "score": "0.5876656", "text": "func (c *ConsistentHashing) GetNode(key string) Node {\n\thashKey := crc32.ChecksumIEEE([]byte(key))\n\t//search\n\tn := len(c.ring)\n\tc.RLock()\n\tdefer c.RUnlock()\n\ti := sort.Search(n,\n\t\tfunc(i int) bool {\n\t\t\treturn c.ring[i] >= hashKey\n\t\t})\n\tif i < n {\n\t\treturn c.Nodes[c.ring[i]]\n\t}\n\treturn c.Nodes[c.ring[0]]\n}", "title": "" }, { "docid": "67e8cb66f6aeb8245f4d1a1cfe72f517", "score": "0.58428234", "text": "func (t *BinarySearchTree) Find(v int) (*Node, bool) {\n\tt.lock.RLock()\n\tdefer t.lock.RUnlock()\n\n\tif t.Root == nil {\n\t\treturn nil, false\n\t}\n\n\treturn _find(t.Root, v)\n}", "title": "" }, { "docid": "1d0eb37b44ccd044e41751c1188ca762", "score": "0.5801812", "text": "func (sm *RepoSnapshotManager) LookUpNode(relativePath string) (requestedNode *Node, err error) {\n\tif relativePath == \"\" {\n\t\treturn nil, errorutils.CheckErrorf(getLookUpNodeError(relativePath) + \"- unexpected empty path provided to look up\")\n\t}\n\trelativePath = strings.TrimSuffix(relativePath, \"/\")\n\tif relativePath == \".\" {\n\t\trequestedNode = sm.root\n\t\treturn\n\t}\n\n\t// Progress through the children maps till reaching the node that represents the requested path.\n\tdirs := strings.Split(relativePath, \"/\")\n\trequestedNode, err = sm.root.findMatchingNode(dirs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif requestedNode == nil {\n\t\treturn nil, errorutils.CheckErrorf(getLookUpNodeError(relativePath))\n\t}\n\treturn\n}", "title": "" }, { "docid": "8dee31b9ffcea9c7942f3e963f5ec739", "score": "0.5799195", "text": "func FindNode(value int, presentNode *Node) *Node {\n\n\t// If value is equal to present node value return present node.\n\tif presentNode.value == value {\n\t\treturn presentNode\n\n\t\t// If value is greater than present node value and present node has\n\t\t// a right child recurse into right child.\n\t} else if presentNode.value <= value && presentNode.rightChild != nil {\n\t\treturn FindNode(value, presentNode.rightChild)\n\n\t\t// Same for right side.\n\t} else if presentNode.leftChild != nil {\n\t\treturn FindNode(value, presentNode.leftChild)\n\t}\n\n\t// If all cases above fail node was not found and nil is returned\n\treturn nil\n}", "title": "" }, { "docid": "0bf0edcb3f8f70c75cf5bdab04fdb070", "score": "0.57916903", "text": "func (t *Tree) FindNode(ID string) *Node {\n\texistsNode := t.ExistsNode(ID)\n\tif !existsNode {\n\t\treturn nil\n\t}\n\tvar wantedNode *Node\n\tfor _, n := range t.nodes {\n\t\tif n.ID == strings.ToLower(ID) {\n\t\t\treturn n\n\t\t}\n\t\twantedNode = t.findNodeByID(n, ID)\n\t\tif wantedNode != nil {\n\t\t\treturn wantedNode\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5c0aad3b9bdb994e404b4162bd859fee", "score": "0.57906884", "text": "func NewGetNodeNotFound() *GetNodeNotFound {\n\treturn &GetNodeNotFound{}\n}", "title": "" }, { "docid": "bca5ea003ca67111ad5c5a8f25b2a44f", "score": "0.5782793", "text": "func (rt *routingTable) Node(nodeID NodeID, addr net.UDPAddr) (n *node, wasInserted bool) {\n\tn = rt.FindByAddress(addr)\n\tif n != nil {\n\t\treturn n, false\n\t}\n\n\tn = newNode(addr, nodeID)\n\n\trt.Insert(n)\n\treturn n, true\n}", "title": "" }, { "docid": "f2a76c606bd20b97d539c2392097fbd4", "score": "0.5754986", "text": "func (d *HAMTDirectory) Find(ctx context.Context, name string) (ipld.Node, error) {\n\tlnk, err := d.shard.Find(ctx, name)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn lnk.GetNode(ctx, d.dserv)\n}", "title": "" }, { "docid": "7e08ff2708e974a389c8fa0e360739e9", "score": "0.5753601", "text": "func NewGetNodeByNameNotFound() *GetNodeByNameNotFound {\n\n\treturn &GetNodeByNameNotFound{}\n}", "title": "" }, { "docid": "053b6a5640389b251a5722a4528f9010", "score": "0.5730705", "text": "func (brt BaseResourceType) FindOrCreate(tx Tx, unique bool) (*UsedBaseResourceType, error) {\n\tubrt, found, err := brt.Find(tx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif found && ubrt.UniqueVersionHistory == unique {\n\t\treturn ubrt, nil\n\t}\n\n\treturn brt.create(tx, unique)\n}", "title": "" }, { "docid": "55c240103dd74c5209888a2f02f162b8", "score": "0.5680867", "text": "func (d *Dir) Lookup(name string) (node Node, err error) {\n\tpath := path.Join(d.path, name)\n\t// fs.Debugf(path, \"Dir.Lookup\")\n\titem, err := d.lookupNode(name)\n\tif err != nil {\n\t\tif err != ENOENT {\n\t\t\tfs.Errorf(path, \"Dir.Lookup error: %v\", err)\n\t\t}\n\t\treturn nil, err\n\t}\n\t// fs.Debugf(path, \"Dir.Lookup OK\")\n\treturn item.Node, nil\n}", "title": "" }, { "docid": "0b48cf8a12425b12b8b47cb6dcf0637c", "score": "0.5656781", "text": "func (s *Store) CreateNode(host string) (*NodeInfo, error) {\n\tif err := s.exec(internal.Command_CreateNodeCommand, internal.E_CreateNodeCommand_Command,\n\t\t&internal.CreateNodeCommand{\n\t\t\tHost: proto.String(host),\n\t\t\tRand: proto.Uint64(uint64(rand.Int63())),\n\t\t},\n\t); err != nil {\n\t\treturn nil, err\n\t}\n\treturn s.NodeByHost(host)\n}", "title": "" }, { "docid": "533c67006623d31f47f0d092777753ae", "score": "0.56367767", "text": "func (g *Graph) GetNode(name string) *Node {\n\tnode := g.nodes[name]\n\tif node == nil || node.name == \"\" {\n\t\treturn g.AddNode(name)\n\t}\n\n\treturn node\n}", "title": "" }, { "docid": "7f9d0882d38aeaef48ac029c03f735bd", "score": "0.563312", "text": "func (e *ExtendedResourceScheduler) FindNode(name string) (*v1.Node, error) {\n\tnode, err := e.Clientset.CoreV1().Nodes().Get(name, metav1.GetOptions{})\n\tif err != nil {\n\t\tglog.Errorf(\"find node failed: %v\", err)\n\t\treturn nil, err\n\t}\n\treturn node, nil\n}", "title": "" }, { "docid": "95b515147d6468c36f640374d7c90208", "score": "0.56300974", "text": "func (s *Scheme) GetNode(name string) (Node, bool) {\n\tif n, found := s.GetHost(name); found {\n\t\treturn n, found\n\t}\n\n\tif n, found := s.GetSwitch(name); found {\n\t\treturn n, found\n\t}\n\n\treturn nil, false\n}", "title": "" }, { "docid": "0285a120c0aa3863e8b73eab91238800", "score": "0.56255984", "text": "func CreateNode(ip string, user string, typ string, name string) (*Node, error) {\n\tkey, err := sshkey()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tport := sshport()\n\tsshClient := ssh.New(ip).WithUser(user).WithKey(key).WithPort(port)\n\n\treturn &Node{\n\t\tIP: ip,\n\t\tUser: user,\n\t\tType: typ,\n\t\tName: name,\n\n\t\tsshClient: sshClient,\n\t}, nil\n}", "title": "" }, { "docid": "1e37c76185ab1aa3b5c01d3d2c9bbfb9", "score": "0.56083816", "text": "func (n *Nodes) Get(nodeID string) (*structs.Node, bool) {\n\tn.mu.RLock()\n\tnode, ok := n.nodes[nodeID]\n\tn.mu.RUnlock()\n\treturn node, ok\n}", "title": "" }, { "docid": "3d8800ebbbe49f3dd22468ac69c7afb3", "score": "0.55890554", "text": "func (tr *Trie) FindNode(word string) *Trie {\n\tletters, node, i := []rune(word), tr, 0\n\tn := len(letters)\n\n\tfor i < n {\n\t\tif exists, value := node.hasChild(letters[i]); exists {\n\t\t\tnode = value\n\t\t} else {\n\t\t\treturn nil\n\t\t}\n\n\t\ti++\n\t}\n\n\treturn node\n}", "title": "" }, { "docid": "0fc73fbf77d19dab44cbe4cbfc5dfd7c", "score": "0.55872226", "text": "func (set *NodeSet) Node(id string) *Node {\n\tset.lock.RLock()\n\tdefer set.lock.RUnlock()\n\n\treturn set.set[id]\n}", "title": "" }, { "docid": "9916cd45df310c5302a78e6d31bd1025", "score": "0.5582463", "text": "func (me *Tree) GetNode(key Comparable) *Node {\n\tif me.Root == nil {\n\t\treturn nil\n\t}\n\treturn me.Root.Find(key)\n}", "title": "" }, { "docid": "53bfb6f4da4ccf2a98d8987e2a1f62cb", "score": "0.5581442", "text": "func newNode() *Node {\n\treturn &Node{\n\t\texists: false,\n\t\tvalue: nil,\n\t\tchild: make(map[rune]*Node),\n\t}\n}", "title": "" }, { "docid": "17d0ce820ece316af2a84e5e825fee3c", "score": "0.5579621", "text": "func (n *Node) Create(ctx context.Context, in *cmd.Node) (*cmd.Node, error) {\n\tn.Lock()\n\tdefer n.Unlock()\n\tn.DummyNode = *in\n\tif n.nodeWatcher == nil {\n\t\t_, cancel := context.WithCancel(context.Background())\n\t\tn.nodeWatcher = &mockNodeWathcer{ch: make(chan *kvstore.WatchEvent, 100), cancel: cancel}\n\t\tn.nodeWatcher.ch <- &kvstore.WatchEvent{Type: kvstore.Created, Object: &n.DummyNode}\n\t}\n\treturn &n.DummyNode, nil\n}", "title": "" }, { "docid": "6a54267e1d8a3b0808e946d93bc117bd", "score": "0.5577275", "text": "func (b *BSTNode) FindNode(value int) *BSTNode {\n\tif *b == (BSTNode{}) {\n\t\tfmt.Println(\"The BST is emtpty\")\n\t\treturn nil\n\t}\n\n\t// we have to add check to see if the pointers to right and left are not null\n\t// since this is a method call, and not a function call\n\tif b.Data == value {\n\t\treturn b\n\t} else if value > b.Data && b.Right != nil {\n\t\treturn b.Right.FindNode(value)\n\t} else if value < b.Data && b.Left != nil {\n\t\treturn b.Left.FindNode(value)\n\t} else {\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "c59ff6ab851d33359a15f42dfd6fa6ce", "score": "0.55557907", "text": "func (d *Dag) CreateNode(ctx context.Context, obj interface{}) (format.Node, error) {\n\tsw := &safewrap.SafeWrap{}\n\tn := sw.WrapObject(obj)\n\tif sw.Err != nil {\n\t\treturn nil, fmt.Errorf(\"error wrapping object: %v\", sw.Err)\n\t}\n\treturn n, d.Store.Add(ctx, n)\n}", "title": "" }, { "docid": "dfe4ca2e3599b1d6406bf4c65ba42e0a", "score": "0.5554816", "text": "func (l *List) Find(data string) (*Node, error) {\n\treturn nil, nil\n}", "title": "" }, { "docid": "3dd6cb71839d32e9acc5d72ae5a28ed9", "score": "0.55492413", "text": "func (f *Fs) findNode(rootNode *mega.Node, nodePath string) (*mega.Node, error) {\n\tparts := f.splitNodePath(nodePath)\n\tif parts == nil {\n\t\treturn rootNode, nil\n\t}\n\tnodes, err := f.srv.FS.PathLookup(rootNode, parts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nodes[len(nodes)-1], nil\n}", "title": "" }, { "docid": "fda0bb22197c2a6388c8e64825bc3dce", "score": "0.5545627", "text": "func (graph *GonumGraph) NewNode(successors []Node) (node Node) {\n\tnodeList := graph.NodeList()\n\tids := make([]int, len(nodeList))\n\tfor i, node := range nodeList {\n\t\tids[i] = node.ID()\n\t}\n\n\tnodes := sort.IntSlice(ids)\n\tsort.Sort(&nodes)\n\tfor i, node := range nodes {\n\t\tif i != node {\n\t\t\tgraph.AddNode(GonumNode(i), successors)\n\t\t\treturn GonumNode(i)\n\t\t}\n\t}\n\n\tnewID := len(nodes)\n\tgraph.AddNode(GonumNode(newID), successors)\n\treturn GonumNode(newID)\n}", "title": "" }, { "docid": "fc41404048a21a32d4734f459f29068c", "score": "0.5544338", "text": "func (b *Bus) Node(name string) *Node {\n\tif b == nil {\n\t\treturn nil\n\t}\n\treturn b.nodeLookup[name]\n}", "title": "" }, { "docid": "0e8cdbd8186f9ff6326b9538f5747392", "score": "0.5531553", "text": "func (l *RandomAccessLookup) LookupOrCreate(key flux.GroupKey, fn func() interface{}) interface{} {\n\tvalue, ok := l.Lookup(key)\n\tif !ok {\n\t\tvalue = fn()\n\t\tl.Set(key, value)\n\t}\n\treturn value\n}", "title": "" }, { "docid": "54a0e6541d3d68d6942f9258dad81cdd", "score": "0.55151606", "text": "func (p *RandomAccessParser) GetNode(osmID int64) (gosmparse.Node, error) {\n\n\t// check if we have this element in the cache\n\tif found, ok := p.Cache.Nodes[osmID]; ok {\n\t\treturn found, nil\n\t}\n\n\tp.loadBlob(\"node\", osmID)\n\n\t// check if we have this element in the cache\n\tif found, ok := p.Cache.Nodes[osmID]; ok {\n\t\treturn found, nil\n\t}\n\n\treturn gosmparse.Node{}, fmt.Errorf(\"node not found: %d\", osmID)\n}", "title": "" }, { "docid": "b13e6b4f602d11d4d00a9f344f984b41", "score": "0.54980564", "text": "func (b TBoard) Node() TNode {\n\tif DoesNodeExist(b.Pos) {\n\t\treturn Nodes[b.Pos]\n\t}\n\tn:=b.CreateNode()\n\tNodes[b.Pos]=n\n\treturn n\n}", "title": "" }, { "docid": "86fee9810e7c6fadabe36d8d7aed32d6", "score": "0.5493165", "text": "func (t *Tree) FindNode(data int) (*node, bool) {\n\tif t.root != nil {\n\t\treturn t.root.findNode(data)\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "5c58cf9a63b5cbef2a98feb3516e56b2", "score": "0.5477815", "text": "func NewUpdateNodeNotFound() *UpdateNodeNotFound {\n\treturn &UpdateNodeNotFound{}\n}", "title": "" }, { "docid": "7a19310d92e60efeb82432b8b90bc799", "score": "0.54664165", "text": "func (l *Lookup) LookupOrCreate(key flux.GroupKey, fn func() interface{}) interface{} {\n\tgroup, ok := l.Lookup(key)\n\tif !ok {\n\t\tgroup = fn()\n\t\tl.Set(key, group)\n\t}\n\treturn group\n}", "title": "" }, { "docid": "418d04f6538a89937b60a7cb79e0bfbb", "score": "0.545541", "text": "func (t *OpenconfigLldp_Lldp_Interfaces_Interface_Neighbors) GetOrCreateNeighbor(Id string) *OpenconfigLldp_Lldp_Interfaces_Interface_Neighbors_Neighbor {\n\n\tkey := Id\n\n\tif v, ok := t.Neighbor[key]; ok {\n\t\treturn v\n\t}\n\t// Panic if we receive an error, since we should have retrieved an existing\n\t// list member. This allows chaining of GetOrCreate methods.\n\tv, err := t.NewNeighbor(Id)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"GetOrCreateNeighbor got unexpected error: %v\", err))\n\t}\n\treturn v\n}", "title": "" }, { "docid": "4b8729bef34a7fe90356e603710d3a23", "score": "0.5438211", "text": "func (s *Basic) GetOrCreate(key string, value []byte) (actual []byte, created bool) {\n\ts.Lock()\n\tdefer s.Unlock()\n\n\tvar found bool\n\tactual, found = s.data[key]\n\n\tif !found {\n\t\ts.data[key] = value\n\t\treturn value, true\n\t}\n\n\treturn actual, false\n}", "title": "" }, { "docid": "b22994f968f3095bda6501491e4f2908", "score": "0.54352796", "text": "func (g Graph) Node(id int64) graph.Node {\n\tif !IsValid(g) {\n\t\treturn nil\n\t}\n\tif id < 0 || numberOf(g) <= id {\n\t\treturn nil\n\t}\n\treturn simple.Node(id)\n}", "title": "" }, { "docid": "05a3cc72c29acf38588d325d430ea9ea", "score": "0.54344076", "text": "func (g G) Node(i int) *V {\n\tif !g.Has(V(i)) {\n\t\treturn nil\n\t}\n\tfor _, v := range g.Nodes() {\n\t\tif i == v.Value() {\n\t\t\treturn &v\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8d8f389ce78fef9c288cc0389a707c20", "score": "0.54264003", "text": "func (c Client) GetNode(id string) (*Node, error) {\n\tapi, err := c.getAPIClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treq := &ec2.DescribeInstancesInput{\n\t\tInstanceIds: []*string{aws.String(id)},\n\t}\n\tresp, err := api.DescribeInstances(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(resp.Reservations) != 1 {\n\t\treturn nil, fmt.Errorf(\"Attempted to get a single node, but API returned %d reservations\", len(resp.Reservations))\n\t}\n\tif len(resp.Reservations[0].Instances) != 1 {\n\t\treturn nil, fmt.Errorf(\"Attempted to get a single node, but API returned %d instances\", len(resp.Reservations[0].Instances))\n\t}\n\tinstance := resp.Reservations[0].Instances[0]\n\n\tvar publicIP string\n\tif instance.PublicIpAddress != nil {\n\t\tpublicIP = *instance.PublicIpAddress\n\t}\n\treturn &Node{\n\t\tPrivateDNSName: *instance.PrivateDnsName,\n\t\tPrivateIP: *instance.PrivateIpAddress,\n\t\tPublicIP: publicIP,\n\t\tSSHUser: defaultSSHUserForAMI(AMI(*instance.ImageId)),\n\t\tImageID: *instance.ImageId,\n\t}, nil\n}", "title": "" }, { "docid": "a624c1ccb5332df83a0dbcbb997ae2e2", "score": "0.5410439", "text": "func NodeCreate(org, nodeIdTok, node, token, userPw, arch string, nodeName string, nodeType string, checkNode bool) {\n\t// get message printer\n\tmsgPrinter := i18n.GetMessagePrinter()\n\n\t// They should specify either nodeIdTok (for backward compat) or node and token, but not both\n\tvar nodeId, nodeToken string\n\tif node != \"\" || token != \"\" {\n\t\tif node == \"\" || token == \"\" {\n\t\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, msgPrinter.Sprintf(\"if node or token are specified then they both must be specified\"))\n\t\t}\n\t\t// at this point we know both node and token were specified\n\t\tif nodeIdTok != \"\" {\n\t\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, msgPrinter.Sprintf(\"do not specify both the -n flag and the node and token positional arguments. They mean the same thing.\"))\n\t\t}\n\t\tnodeId = node\n\t\tnodeToken = token\n\t} else {\n\t\t// here we know neither node nor token were specified\n\t\tif nodeIdTok == \"\" {\n\t\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, msgPrinter.Sprintf(\"either the node and token positional arguments, or the -n flag must be specified.\"))\n\t\t}\n\t\tnodeId, nodeToken = cliutils.SplitIdToken(nodeIdTok)\n\t}\n\n\tif nodeName == \"\" {\n\t\tnodeName = nodeId\n\t}\n\n\tcliutils.SetWhetherUsingApiKey(userPw)\n\texchUrlBase := cliutils.GetExchangeUrl()\n\n\t// validate the node type\n\tif nodeType != \"\" && nodeType != persistence.DEVICE_TYPE_DEVICE && nodeType != persistence.DEVICE_TYPE_CLUSTER {\n\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, msgPrinter.Sprintf(\"Wrong node type specified: %v. It must be 'device' or 'cluster'.\", nodeType))\n\t}\n\n\t//Check if the node exists or not\n\tvar nodes ExchangeNodes\n\tnodeExists := false\n\tif checkNode {\n\t\thttpCode := cliutils.ExchangeGet(\"Exchange\", exchUrlBase, \"orgs/\"+org+\"/nodes/\"+nodeId, cliutils.OrgAndCreds(org, userPw), []int{200, 404, 401}, &nodes)\n\t\tif httpCode == 401 {\n\t\t\t// Invalid creds means the user doesn't exist, or pw is wrong\n\t\t\tuser, _ := cliutils.SplitIdToken(userPw)\n\t\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, msgPrinter.Sprintf(\"user '%s' does not exist with the specified password.\", user))\n\t\t} else if httpCode == 200 {\n\t\t\tnodeExists = true\n\t\t}\n\t}\n\n\tvar httpCode int\n\tvar resp struct {\n\t\tCode string `json:\"code\"`\n\t\tMsg string `json:\"msg\"`\n\t}\n\tif nodeExists {\n\t\tmsgPrinter.Printf(\"Node %v exists. Only the node token will be updated.\", nodeId)\n\t\tmsgPrinter.Println()\n\t\tif arch != \"\" || nodeType != \"\" {\n\t\t\tmsgPrinter.Printf(\"The node arch and node type will be ignored if they are specified.\")\n\t\t\tmsgPrinter.Println()\n\t\t}\n\t\tpatchNodeReq := NodeExchangePatchToken{Token: nodeToken}\n\t\thttpCode = cliutils.ExchangePutPost(\"Exchange\", http.MethodPatch, cliutils.GetExchangeUrl(), \"orgs/\"+org+\"/nodes/\"+nodeId, cliutils.OrgAndCreds(org, userPw), []int{201, 401, 403}, patchNodeReq, &resp)\n\t} else {\n\t\t// create the node with given node type\n\t\tif nodeType == \"\" {\n\t\t\tnodeType = persistence.DEVICE_TYPE_DEVICE\n\t\t}\n\t\tputNodeReq := exchange.PutDeviceRequest{Token: nodeToken, Name: nodeName, NodeType: nodeType, SoftwareVersions: make(map[string]string), PublicKey: []byte(\"\"), Arch: arch}\n\t\thttpCode = cliutils.ExchangePutPost(\"Exchange\", http.MethodPut, exchUrlBase, \"orgs/\"+org+\"/nodes/\"+nodeId+\"?\"+cliutils.NOHEARTBEAT_PARAM, cliutils.OrgAndCreds(org, userPw), []int{201, 401, 403}, putNodeReq, &resp)\n\t}\n\n\tif httpCode == 401 {\n\t\t// Invalid creds means the user doesn't exist, or pw is wrong\n\t\tuser, _ := cliutils.SplitIdToken(userPw)\n\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, msgPrinter.Sprintf(\"user '%s' does not exist with the specified password.\", user))\n\t} else if httpCode == 403 {\n\t\t// Access denied means either the node exists and is owned by another user or it doesn't exist but user reached the maxNodes threshold.\n\t\tvar nodesOutput exchange.GetDevicesResponse\n\t\thttpCode = cliutils.ExchangeGet(\"Exchange\", cliutils.GetExchangeUrl(), \"orgs/\"+org+\"/nodes/\"+nodeId, cliutils.OrgAndCreds(org, userPw), []int{200, 404}, &nodesOutput)\n\t\tif httpCode == 200 {\n\t\t\t// Node exists. Figure out who is the owner and tell the user\n\t\t\tvar ok bool\n\t\t\tvar ourNode exchange.Device\n\t\t\tif ourNode, ok = nodesOutput.Devices[cliutils.OrgAndCreds(org, nodeId)]; !ok {\n\t\t\t\tcliutils.Fatal(cliutils.INTERNAL_ERROR, msgPrinter.Sprintf(\"key '%s' not found in exchange nodes output\", cliutils.OrgAndCreds(org, nodeId)))\n\t\t\t}\n\t\t\tcliutils.Fatal(cliutils.CLI_INPUT_ERROR, msgPrinter.Sprintf(\"can not update existing node %s because it is owned by another user (%s)\", nodeId, ourNode.Owner))\n\t\t} else if httpCode == 404 {\n\t\t\t// Node doesn't exist. MaxNodes reached or 403 means real access denied, display the message from the exchange\n\t\t\tif resp.Msg != \"\" {\n\t\t\t\tfmt.Println(resp.Msg)\n\t\t\t}\n\t\t}\n\t} else if httpCode == 201 {\n\t\tif nodeExists {\n\t\t\tmsgPrinter.Printf(\"Node %v updated.\", nodeId)\n\t\t\tmsgPrinter.Println()\n\t\t} else {\n\t\t\tif resp.Msg != \"\" {\n\t\t\t\tfmt.Println(resp.Msg)\n\t\t\t}\n\t\t\tmsgPrinter.Printf(\"Node %v created.\", nodeId)\n\t\t\tmsgPrinter.Println()\n\t\t}\n\t}\n}", "title": "" }, { "docid": "200f4abef2796ac569d1dd6558bf6b92", "score": "0.5402565", "text": "func (nb *BridgeBuilder) FindOrCreateNetwork(nw *Network) error {\n\tvar err error\n\n\tbridgeName := fmt.Sprintf(bridgeNameFormat, nw.Name, nw.SharedENI.GetLinkIndex())\n\n\t// Find the bridge network namespace. If none is specified, use the host network namespace.\n\tif nw.BridgeNetNSPath != \"\" {\n\t\tvar bridgeNetNS netns.NetNS\n\n\t\tlog.Infof(\"Searching for bridge netns %s.\", nw.BridgeNetNSPath)\n\t\tbridgeNetNS, err = netns.GetNetNSByName(nw.BridgeNetNSPath)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Failed to find bridge netns %s: %v.\", nw.BridgeNetNSPath, err)\n\t\t\treturn err\n\t\t}\n\n\t\t// Move the ENI link to the bridge network namespace.\n\t\tlog.Infof(\"Moving link %s to netns %s.\", nw.SharedENI, nw.BridgeNetNSPath)\n\t\terr = nw.SharedENI.SetNetNS(bridgeNetNS)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Failed to move link: %v.\", err)\n\t\t\treturn err\n\t\t}\n\n\t\t// Connect the ENI to a bridge in the bridge network namespace.\n\t\terr = bridgeNetNS.Run(func() error {\n\t\t\tnw.BridgeIndex, err = nb.createBridge(\n\t\t\t\tbridgeName, nw.BridgeType, nw.SharedENI, nw.ENIIPAddresses)\n\t\t\treturn err\n\t\t})\n\t} else {\n\t\t// Connect the ENI to a bridge.\n\t\tnw.BridgeIndex, err = nb.createBridge(\n\t\t\tbridgeName, nw.BridgeType, nw.SharedENI, nw.ENIIPAddresses)\n\t}\n\n\tif err != nil {\n\t\tlog.Errorf(\"Failed to create bridge: %v.\", err)\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "c878ddc3fb2d79056ed0742d6a426fd3", "score": "0.5396699", "text": "func (s Suite) GetNodeByName(name string) (runtime.Node, error) {\n\tfor _, n := range s.Nodes {\n\t\tif n.Name == name {\n\t\t\treturn n, nil\n\t\t}\n\t}\n\treturn runtime.Node{}, fmt.Errorf(\"could not find node with name %s\", name)\n}", "title": "" }, { "docid": "6d0952f5b0239d28f8c9c12508e7ac17", "score": "0.5393842", "text": "func (g *Graph) GetNode(name string) *Node {\r\n\tfor i := range g.nodes {\r\n\t\tif g.nodes[i].name == name {\r\n\t\t\treturn g.nodes[i]\r\n\t\t}\r\n\t}\r\n\treturn nil\r\n}", "title": "" }, { "docid": "10a9016ef3b760925f1beb3b1c21ef70", "score": "0.538557", "text": "func (node *Node) FindNode(args FindNodeArgs, reply *FindNodeReply) error {\n\tnode.logger.Printf(\"FindNode from %s\", args.Source.String())\n\tcontact := NewContact(args.Source)\n\tif contact == nil {\n\t\treturn errors.New(\"Couldn't hash IP address\")\n\t}\n\tnode.rt.add(*contact)\n\n\tkeyInt := new(big.Int)\n\tkeyInt.SetString(args.Key, keyBase)\n\n\tnearest := node.rt.findKNearestContacts(*keyInt)\n\t*reply = FindNodeReply{Contacts: nearest}\n\tnode.logger.Printf(\"Processed FindNode from %s\", args.Source.String())\n\treturn nil\n}", "title": "" }, { "docid": "2044eced30f0b8528e9cc490634813d7", "score": "0.53853226", "text": "func (t *IETFNetwork_Networks_Network_Node) GetOrCreateTerminationPoint(TpId string) (*IETFNetwork_Networks_Network_Node_TerminationPoint){\n\n\tkey := TpId\n\n\tif v, ok := t.TerminationPoint[key]; ok {\n\t\treturn v\n\t}\n\t// Panic if we receive an error, since we should have retrieved an existing\n\t// list member. This allows chaining of GetOrCreate methods.\n\tv, err := t.NewTerminationPoint(TpId)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"GetOrCreateTerminationPoint got unexpected error: %v\", err))\n\t}\n\treturn v\n}", "title": "" }, { "docid": "4454018aca5799478977fddfd8a9ce56", "score": "0.53722656", "text": "func (n *Node) GetNode() *Node { return n }", "title": "" }, { "docid": "bcbc7cf921b04776fbabac7f07b763cf", "score": "0.53675526", "text": "func (d Database) CreateNode(obj Label) *Query {\n\tquery := Query{\n\t\taction: typeWrite,\n\t\tdb: d.db,\n\t\tnewNode: obj,\n\t}\n\n\treturn &query\n}", "title": "" }, { "docid": "cd52b260a8a3692452d2814fddd01c71", "score": "0.53490615", "text": "func (node *Node) GetNode(keys ...interface{}) *Node {\n\treturn node.GetNodeDefault(nil, keys...)\n}", "title": "" }, { "docid": "78de284ccbf5c5f9fcdf3aa71282048a", "score": "0.534772", "text": "func CreateNode(value int) *Node {\n\t//here you returned a local variable, it will cause problem in C++\n\t//but in GO, it's OK\n\t// so here comes the question, where is the treenode? stack or heap?\n\t//in GO ,you don't need to know, it depends on the runtime env.\n\t//compiler can decide where to put it\n\treturn &Node{Value: value}\n}", "title": "" }, { "docid": "7796f1bdec24c92f608c9448ff54f459", "score": "0.534739", "text": "func (t *IETFNetwork_Networks_Network) GetNode(NodeId string) (*IETFNetwork_Networks_Network_Node){\n\n\tif t == nil {\n\t\treturn nil\n\t}\n\n key := NodeId\n\n if lm, ok := t.Node[key]; ok {\n return lm\n }\n return nil\n}", "title": "" }, { "docid": "79ee0f92602227589d2816f327a13603", "score": "0.5346858", "text": "func LookupNode(ctx *pulumi.Context, args *LookupNodeArgs, opts ...pulumi.InvokeOption) (*LookupNodeResult, error) {\n\topts = internal.PkgInvokeDefaultOpts(opts)\n\tvar rv LookupNodeResult\n\terr := ctx.Invoke(\"aws-native:managedblockchain:getNode\", args, &rv, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &rv, nil\n}", "title": "" }, { "docid": "14c01533c1d7adc27eeefcdee62de764", "score": "0.5344441", "text": "func (q *Queue) Find(f func(*Node) bool) *Node {\n\treturn q.Search(f)\n}", "title": "" }, { "docid": "76a043cb1ae739f90694c28862ea584b", "score": "0.53438765", "text": "func TestStore_CreateNode_ErrNodeExists(t *testing.T) {\n\ts := MustOpenStore()\n\tdefer s.Close()\n\n\t// Create node.\n\tif _, err := s.CreateNode(\"host0\"); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Create it again.\n\tif _, err := s.CreateNode(\"host0\"); err != meta.ErrNodeExists {\n\t\tt.Fatalf(\"unexpected error: %s\", err)\n\t}\n}", "title": "" }, { "docid": "21ed776cdd5273c2e94a1732fc82c354", "score": "0.5328765", "text": "func GetNode(id string) (*models.Node, bool) {\n\trefreshDB()\n\n\tvar n models.Node\n\tresult := db.Where(\"id = ?\", id).Or(\"id_short = ?\", id).Or(\"name = ?\", id).First(&n)\n\tif errors.Is(result.Error, gorm.ErrRecordNotFound) {\n\t\treturn nil, false\n\t}\n\n\treturn &n, true\n}", "title": "" }, { "docid": "80c7f9cce6014f490f861bf212541e51", "score": "0.5325137", "text": "func getNode(gatewayID, nodeID string) (*nodeML.Node, error) {\n\tid := getNodeStoreID(gatewayID, nodeID)\n\n\ttoNode := func(item interface{}) (*nodeML.Node, error) {\n\t\tif node, ok := item.(*nodeML.Node); ok {\n\t\t\treturn node, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"unknown data received in the place node: %T\", item)\n\t}\n\n\tdata := nodeStore.Get(id)\n\tif data != nil {\n\t\treturn toNode(data)\n\t}\n\n\terr := updateNode(gatewayID, nodeID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdata = nodeStore.Get(id)\n\tif data != nil {\n\t\treturn toNode(data)\n\t}\n\treturn nil, fmt.Errorf(\"node not available. gatewayID:%s, nodeID:%s\", gatewayID, nodeID)\n}", "title": "" }, { "docid": "f436f04be443dfe40975286a86097b8d", "score": "0.53214145", "text": "func NewNodeUpdateNotFound() *NodeUpdateNotFound {\n\treturn &NodeUpdateNotFound{}\n}", "title": "" }, { "docid": "a4a78630e3f1b675a6039111d8f4a06f", "score": "0.53211296", "text": "func (d *Database) GetNode(id int64) (*multitree.Node, error) {\n\tvar node *multitree.Node\n\terr := d.execTxFunc(func(tx *sql.Tx) error {\n\t\tn, err := getNode(tx, id)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tnode = n\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn node, nil\n}", "title": "" }, { "docid": "c141cac58f45097f073ca786cd9737f1", "score": "0.5319688", "text": "func (group *NodeGroup) GetNode(i int) Node {\n\tgroup.mux.RLock()\n\tdefer group.mux.RUnlock()\n\n\tif i < 0 || group == nil || len(group.nodes) <= i {\n\t\treturn Node{}\n\t}\n\treturn group.nodes[i].Clone()\n}", "title": "" }, { "docid": "d6093cb3afd9f679b1585e444d180cc2", "score": "0.5315284", "text": "func (h *ConsistentHashRing) GetNode(s string) string {\n\tif len(h.nodes) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsport_key := genHashKey(s)\n\ti := sort.Search(len(h.nodes), func(i int) bool { return h.nodes[i].spotValue >= sport_key })\n\n\tif i == len(h.nodes) {\n\t\ti = 0\n\t}\n\n\treturn h.nodes[i].nodeKey\n}", "title": "" }, { "docid": "d3d01e8f98049b4b907cb48803f22280", "score": "0.5315057", "text": "func CreateNode(addr string) *Node {\n\treturn &Node{\n\t\tAddr: addr,\n\t\tClient: nil,\n\n\t\tlock: make(chan bool, 1),\n\t}\n}", "title": "" }, { "docid": "6d453cfbfaf073b21cb498583598d7ca", "score": "0.5310691", "text": "func (d *Database) CreateNode(name string, parentID int64) (int64, error) {\n\tvar childID int64\n\ttxf := func(tx *sql.Tx) error {\n\t\tid, err := createNode(tx, name, parentID)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tchildID = id\n\t\treturn nil\n\t}\n\tif err := d.execTxFunc(txf); err != nil {\n\t\treturn 0, err\n\t}\n\treturn childID, nil\n}", "title": "" }, { "docid": "72b4141539eb57a1fed5242963b52756", "score": "0.53078014", "text": "func (n *node) findNode(key string) *node {\n\tfor _, r := range key {\n\t\tn = n.children[r]\n\t\tif n == nil {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn n\n}", "title": "" }, { "docid": "bf33678e015f85cba38cab9ba602d693", "score": "0.5305502", "text": "func (n *Node) Find(ptr interface{}) *Node {\r\n\tkey := reflect.ValueOf(ptr)\r\n\tif key.Kind() != reflect.Ptr {\r\n\t\tpanic(\"expected a pointer\")\r\n\t}\r\n\treturn n.findNode(key)\r\n}", "title": "" }, { "docid": "a823b343eac6d19f827aa4d49b6c90cc", "score": "0.530389", "text": "func (chord *ChordServer) FindClosestNode(id []byte) Node {\n\tchord.rwmu.RLock()\n\tfingerTable := chord.fingerTable\n\tchord.rwmu.RUnlock()\n\tfor i := numBits - 1; i >= 0; i-- {\n\t\tchord.rwmu.RLock()\n\t\tif fingerTable[i].ID != nil && between(fingerTable[i].ID, chord.node.ID, id) {\n\t\t\tchord.rwmu.RUnlock()\n\t\t\treturn fingerTable[i]\n\t\t}\n\t\tchord.rwmu.RUnlock()\n\t}\n\treturn chord.node\n}", "title": "" }, { "docid": "24cd599ecccd6dd64c3c41d1947cdaf6", "score": "0.5303488", "text": "func (b TBoard) CreateNode() TNode {\n\tn:=TNode{}\n\tn.Moves=b.CreateMoveList()\n\treturn n\n}", "title": "" }, { "docid": "080b6f3cd83034d1a92c90699b1a8228", "score": "0.52920234", "text": "func findNodeForEdge(graph *VertexGraph, fromVtx *GeoCoord, toVtx *GeoCoord) *VertexNode {\n\t// Determine location\n\tindex := _hashVertex(fromVtx, graph.res, graph.numBuckets)\n\t// Check whether there's an existing node in that spot\n\tnode := graph.buckets[index]\n\n\t// Look through the list and see if we find the edge\n\tfor node != nil {\n\t\tif geoAlmostEqual(&node.from, fromVtx) && (toVtx == nil || geoAlmostEqual(&node.to, toVtx)) {\n\t\t\treturn node\n\t\t}\n\t\tnode = node.next\n\t}\n\t// Iteration lookup fail\n\treturn nil\n}", "title": "" }, { "docid": "89fc44188f3bced3f1d7d8a26a61ce8e", "score": "0.52858996", "text": "func (t *IETFNetwork_Networks_Network) NewNode(NodeId string) (*IETFNetwork_Networks_Network_Node, error){\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Node == nil {\n\t\tt.Node = make(map[string]*IETFNetwork_Networks_Network_Node)\n\t}\n\n\tkey := NodeId\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Node[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Node\", key)\n\t}\n\n\tt.Node[key] = &IETFNetwork_Networks_Network_Node{\n\t\tNodeId: &NodeId,\n\t}\n\n\treturn t.Node[key], nil\n}", "title": "" }, { "docid": "8004ef05be872042fc7621997e1b4858", "score": "0.527474", "text": "func NewNode(address string) *Node {\n\tnode := new(Node)\n\taddr, err := net.ResolveTCPAddr(\"tcp\", address)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn nil\n\t}\n\n\tnode.addr = *addr\n\n\thash := sha1.Sum([]byte(addr.String()))\n\n\tnode.id = *big.NewInt(0)\n\tnode.id.SetBytes(hash[:])\n\t// TODO: take in k and tRefresh arguments - for now just hardcoding default\n\tnode.rt = NewRoutingTable(node)\n\n\t// Disable logging if necessary (see option in globals.go)\n\tif !loggingEnable {\n\t\tlog.SetOutput(ioutil.Discard)\n\t\tlog.SetFlags(0)\n\t\tnode.logger = log.New(ioutil.Discard, \"INFO: \", log.Ldate|log.Ltime|log.Lshortfile)\n\t} else {\n\t\tnode.logger = log.New(os.Stdout, \"INFO: \", log.Ldate|log.Ltime|log.Lshortfile)\n\t}\n\n\tnode.ht = *NewKVStore()\n\n\tfmt.Println(caching_on)\n\n\treturn node\n}", "title": "" }, { "docid": "8ec18313981711acc4bc23476a99b905", "score": "0.5267618", "text": "func (n *Node) GetNode(name string) (node *Node, err error) {\n\tval, err := n.Get(name)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tswitch vt := val.(type) {\n\tcase *Node:\n\t\treturn vt, nil\n\t}\n\n\treturn nil, errTypeMismatch\n}", "title": "" }, { "docid": "d568fb4d2b09ef2f65d6dc4c2a2f7c44", "score": "0.5265627", "text": "func NodeCreate(w http.ResponseWriter, r *http.Request) {\n\tvar node Node\n\tbody, err := ioutil.ReadAll(io.LimitReader(r.Body, 1048576))\n\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif err := r.Body.Close(); err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif err := json.Unmarshal(body, &node); err != nil {\n\t\tw.Header().Set(\"Content-Type\", \"application/json;charset=UTF-8\")\n\t\tw.WriteHeader(422) // unprocessable entity\n\t\tif err := json.NewEncoder(w).Encode(err); err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\n\tt := RepoCreateNode(node)\n\tw.Header().Set(\"Content-Type\", \"application/json;charset=UTF-8\")\n\tw.WriteHeader(http.StatusCreated)\n\tif err := json.NewEncoder(w).Encode(t); err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "e50fa7e2a77a8d899e8d40b4604d1242", "score": "0.5265462", "text": "func (c Client) GetNode(id string) (*Node, error) {\n\tapi, err := c.getEC2APIClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treq := &ec2.DescribeInstancesInput{\n\t\tInstanceIds: []*string{aws.String(id)},\n\t}\n\tvar resp *ec2.DescribeInstancesOutput\n\terr = retry.WithBackoff(func() error {\n\t\tvar err2 error\n\t\tresp, err2 = api.DescribeInstances(req)\n\t\treturn err2\n\t}, exponentialBackoffMaxEC2Attempts)\n\tif err != nil {\n\t\tfmt.Println(\"Failed to get node information\")\n\t\treturn nil, err\n\t}\n\tif len(resp.Reservations) != 1 {\n\t\treturn nil, fmt.Errorf(\"Attempted to get a single node, but API returned %d reservations\", len(resp.Reservations))\n\t}\n\tif len(resp.Reservations[0].Instances) != 1 {\n\t\treturn nil, fmt.Errorf(\"Attempted to get a single node, but API returned %d instances\", len(resp.Reservations[0].Instances))\n\t}\n\tinstance := resp.Reservations[0].Instances[0]\n\n\tvar imageID string\n\tif instance.ImageId != nil {\n\t\timageID = *instance.ImageId\n\t}\n\tvar privateDNSName string\n\tif instance.PrivateDnsName != nil {\n\t\tprivateDNSName = *instance.PrivateDnsName\n\t}\n\tvar privateIP string\n\tif instance.PrivateIpAddress != nil {\n\t\tprivateIP = *instance.PrivateIpAddress\n\t}\n\tvar publicIP string\n\tif instance.PublicIpAddress != nil {\n\t\tpublicIP = *instance.PublicIpAddress\n\t}\n\n\treturn &Node{\n\t\tImageID: imageID,\n\t\tPrivateDNSName: privateDNSName,\n\t\tPrivateIP: privateIP,\n\t\tPublicIP: publicIP,\n\t\tSSHUser: defaultSSHUserForAMI(AMI(*instance.ImageId)),\n\t\tState: instance.State.GoString(),\n\t}, nil\n}", "title": "" }, { "docid": "e6488621d052c15d927176969dca54bd", "score": "0.5261838", "text": "func (table *SymbolTable) LookupNode(node ast.Node) (*Symbol, bool) {\n\tsymbol, ok := table.symbolsByNode[node]\n\treturn symbol, ok\n}", "title": "" }, { "docid": "dd77b6c53a89171c1883a8e90c698efc", "score": "0.5259691", "text": "func (node *DfsNode) Lookup(ctx context.Context, name string, out *fuse.EntryOut) (*fs.Inode, syscall.Errno) {\n\tfmt.Println(\"lookup:\", node, \";\", name)\n\n\tpath := node.Path()\n\n\tout.Mode = 0777\n\n\t// Find the appropriate storage server\n\topClient := node.Client.NamingServerClient\n\n\tinfo := pb.ListDirectoryRequest{\n\t\tPath: path,\n\t}\n\n\tresult, err := opClient.ListDirectory(ctx, &info)\n\tif err != nil {\n\t\tprintln(\"error occurred during lookup:\", err.Error())\n\t\treturn nil, syscall.EAGAIN\n\t}\n\n\tfor _, n := range result.Contents {\n\t\tif n.Name != name {\n\t\t\tcontinue\n\t\t}\n\n\t\tmode := fuse.S_IFREG\n\t\tif n.Mode == pb.NodeMode_DIRECTORY {\n\t\t\tmode = fuse.S_IFDIR\n\t\t}\n\n\t\toperations := NewDfsNode(node.Client, name)\n\t\tstable := fs.StableAttr{Mode: uint32(mode)}\n\t\tchild := node.NewInode(ctx, operations, stable)\n\n\t\treturn child.EmbeddedInode(), 0\n\t}\n\n\treturn nil, syscall.ENOENT\n}", "title": "" }, { "docid": "eecbff9f61333719ea8842182cbaeaeb", "score": "0.5259475", "text": "func (m *Model) FindOrCreateEdge(origin entities.Connectable) *Edge {\n\tif _, ok := m.Edges[origin.B().Type()]; !ok {\n\t\tm.Edges[origin.B().Type()] = make(map[uint]*Edge)\n\t}\n\tif e, ok := m.Edges[origin.B().Type()][origin.B().Idx()]; ok {\n\t\treturn e\n\t}\n\tfrom, to := m.FindOrCreateNode(origin.From()), m.FindOrCreateNode(origin.To())\n\te := NewEdge(origin, from, to)\n\tm.Edges[origin.B().Type()][origin.B().Idx()] = e\n\treturn e\n}", "title": "" }, { "docid": "00ee55eb5cccf5a9ac1a7615eae917e8", "score": "0.52550185", "text": "func GetNode(id primitive.TypedID) (*Node, bool) {\n\tn, ok := globalGraph.GetNode(id)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn &Node{n}, true\n}", "title": "" }, { "docid": "fd5f52deb7c36b95dfc6b41dbbd2f0cb", "score": "0.5248346", "text": "func (l *Loader) LookupOrCreate(name string, version int, syms *sym.Symbols) *sym.Symbol {\n\ti := l.Lookup(name, version)\n\tif i != 0 {\n\t\t// symbol exists\n\t\tif int(i) < len(l.Syms) && l.Syms[i] != nil {\n\t\t\treturn l.Syms[i] // already loaded\n\t\t}\n\t\tif l.IsExternal(i) {\n\t\t\tpanic(\"Can't load an external symbol.\")\n\t\t}\n\t\treturn l.LoadSymbol(name, version, syms)\n\t}\n\ti = l.AddExtSym(name, version)\n\ts := syms.Newsym(name, version)\n\tl.Syms[i] = s\n\treturn s\n}", "title": "" }, { "docid": "5a042808f37893ea78b106831870f5e6", "score": "0.52418274", "text": "func NewNode() Node {\n\treturn Node{}\n}", "title": "" }, { "docid": "64ed681361b8a27cdf04f426b5836b4f", "score": "0.52398974", "text": "func NewNode() *Node {\n\treturn &Node{}\n}", "title": "" }, { "docid": "efe3c3a6c85427171cf780aab0a2a3a3", "score": "0.5236204", "text": "func DoesNodeExist(pos TPosition) bool {\n\t_,found := Nodes[pos]\n\treturn found\n}", "title": "" }, { "docid": "32774ba4fda2964daa74e17906f95e8a", "score": "0.52351433", "text": "func (op *Operation) FindOrCreateCategory(name string) (*Category, error) {\n\tvar category, err = op.FindCategoryByName(name)\n\tif category == nil && err == nil {\n\t\tcategory = &Category{Name: name}\n\t\top.Categories.Save(category)\n\t}\n\treturn category, op.Operation.Err()\n}", "title": "" }, { "docid": "96f9ce4717f10dc0caa18b2d0e5598c6", "score": "0.5233331", "text": "func (g *Undirected) Node(id int) Node {\n\tif id >= len(g.nodes) {\n\t\treturn nil\n\t}\n\treturn g.nodes[id]\n}", "title": "" }, { "docid": "222227dd3cabf5cd060f45f92023ba3c", "score": "0.52325803", "text": "func (t *RBTree) FindNode(key Comparable) (nodes []*RBNode) {\n\tif t.size == 0 {\n\t\treturn\n\t}\n\tn := t.root\nLOOP:\n\tfor n != t.Nil {\n\t\tswitch Compare(key, n.Bag) {\n\t\tcase Less:\n\t\t\tn = n.left\n\t\tcase Greater:\n\t\t\tn = n.right\n\t\tcase Equal:\n\t\t\tnodes = append(nodes, n)\n\t\t\tbreak LOOP\n\t\t}\n\t}\n\tif !t.dupable || n == t.Nil {\n\t\treturn\n\t}\n\n\tnext := n\n\tfor next = t.NextNode(next); next != t.Nil; next = t.NextNode(next) {\n\t\tif Compare(key, next.Bag) == Equal {\n\t\t\tnodes = append(nodes, next)\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tprev := n\n\tfor prev = t.PrevNode(prev); prev != t.Nil; prev = t.PrevNode(prev) {\n\t\tif Compare(key, prev.Bag) == Equal {\n\t\t\tnodes = append(nodes, prev)\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "62f0e185cf367b48a8d8313c993bcbd4", "score": "0.5229433", "text": "func (h *HashRing) GetNode(k string) (string, error) {\n\tif len(h.nodes) == 0 {\n\t\treturn \"\", errors.New(\"Hash ring is empty\")\n\t}\n\n\th.RLock()\n\n\t// Hash the reference key.\n\thk := getHashKey(k)\n\n\t// Get index in the ring.\n\ti := sort.Search(len(h.nodes), func(i int) bool { return h.nodes[i].nodeID >= hk }) % len(h.nodes)\n\n\tnode := h.nodes[i].nodeName\n\n\th.RUnlock()\n\n\treturn node, nil\n}", "title": "" }, { "docid": "ebece2717b7a8b7bd1d0b4f164ff1738", "score": "0.5228846", "text": "func CreateNode(id string, revisit bool) *Node {\n\tnode := new(Node)\n\tnode.id = id\n\tnode.revisitable = revisit\n\tnode.rewards = make(map[*Reward]int)\n\tnode.minPathLeft = math.MaxInt32\n\treturn node\n}", "title": "" } ]
bd4c099042cb3b6d92f1a4aadda8db8f
Load ensures a npm package is loaded in the jsii kernel.
[ { "docid": "7a7ee21729079a81696fd786152a2209", "score": "0.5493786", "text": "func Load(name string, version string, tarball []byte) {\n\tc := kernel.GetClient()\n\n\t_, err := c.Load(kernel.LoadProps{\n\t\tName: name,\n\t\tVersion: version,\n\t}, tarball)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "title": "" } ]
[ { "docid": "f2101d4471e5fe3eb4948bee9e8bfde6", "score": "0.57752454", "text": "func (pm *PackageManager) Load(name string) (*Package, error) {\n\treturn LoadPackageFromPath(pm.MakePackagePath(name))\n}", "title": "" }, { "docid": "726aaf827f73af41fe4fcdb2b742da8b", "score": "0.56503254", "text": "func loadPackage(arg string, stk *importStack) *Package {\n\tbase := packageBaseImportPath(arg)\n\toptions := packageOptions(arg)\n\n\t// Wasn't a command; must be a package.\n\t// If it is a local import path but names a standard package,\n\t// we treat it as if the user specified the standard package.\n\t// This lets you run go test ./ioutil in package io and be\n\t// referring to io/ioutil rather than a hypothetical import of\n\t// \"./ioutil\".\n\tif build.IsLocalImport(base) {\n\t\tbp, _ := build.Default.ImportDir(filepath.Join(cwd, base), build.FindOnly)\n\t\tif bp.ImportPath != \"\" && bp.ImportPath != \".\" {\n\t\t\tbase = bp.ImportPath\n\t\t}\n\t}\n\n\tbuildContext := build.Default\n\tif contains(options, \"race\") {\n\t\tif buildContext.InstallSuffix != \"\" {\n\t\t\tbuildContext.InstallSuffix += \"_\"\n\t\t}\n\t\tbuildContext.InstallSuffix += \"race\"\n\t\tbuildContext.BuildTags = append(buildContext.BuildTags, \"race\")\n\t}\n\n\treturn loadImport(&buildContext, base, cwd, stk, nil)\n}", "title": "" }, { "docid": "f6a3b55a99311892d252206df33c5d7c", "score": "0.55049366", "text": "func (p *Package) load(buildContext *build.Context, stk *importStack, bp *build.Package, err error) *Package {\n\tp.Package = bp\n\tp.buildContext = buildContext\n\tp.Standard = p.Goroot && p.ImportPath != \"\" && !strings.Contains(p.ImportPath, \".\")\n\tp.race = contains(p.buildContext.BuildTags, \"race\")\n\n\tif err != nil {\n\t\tp.Incomplete = true\n\t\terr = expandScanner(err)\n\t\tp.Error = &PackageError{\n\t\t\tImportStack: stk.copy(),\n\t\t\tErr: err.Error(),\n\t\t}\n\t\treturn p\n\t}\n\n\tif p.Name == \"main\" {\n\t\t_, elem := filepath.Split(p.Dir)\n\t\tfull := buildContext.GOOS + \"_\" + buildContext.GOARCH + \"/\" + elem\n\t\tif buildContext.GOOS != runtime.GOOS || buildContext.GOARCH != runtime.GOARCH {\n\t\t\t// Install cross-compiled binaries to subdirectories of bin.\n\t\t\telem = full\n\t\t}\n\t\tif p.BinDir != \"\" {\n\t\t\t// Install to GOBIN or bin of GOPATH entry.\n\t\t\tp.Target = filepath.Join(p.BinDir, elem)\n\t\t}\n\t\tif p.Target != \"\" && buildContext.GOOS == \"windows\" {\n\t\t\tp.Target += \".exe\"\n\t\t}\n\t} else if p.local {\n\t\t// Local import turned into absolute path.\n\t\t// No permanent install target.\n\t\tp.Target = \"\"\n\t} else {\n\t\tp.Target = p.PkgObj\n\t}\n\n\timportPaths := p.Imports\n\t// Packages that use cgo import runtime/cgo implicitly.\n\t// Packages that use cgo also import syscall implicitly,\n\t// to wrap errno.\n\t// Exclude certain packages to avoid circular dependencies.\n\tif len(p.CgoFiles) > 0 && (!p.Standard || !cgoExclude[p.baseImportPath]) {\n\t\timportPaths = append(importPaths, \"runtime/cgo\")\n\t}\n\tif len(p.CgoFiles) > 0 && (!p.Standard || !cgoSyscallExclude[p.baseImportPath]) {\n\t\timportPaths = append(importPaths, \"syscall\")\n\t}\n\t// Everything depends on runtime, except runtime, its internal\n\t// subpackages, and unsafe.\n\tif !p.Standard || (p.baseImportPath != \"runtime\" && !strings.HasPrefix(p.baseImportPath, \"runtime/internal/\") && p.baseImportPath != \"unsafe\") {\n\t\timportPaths = append(importPaths, \"runtime\")\n\t\t// When race detection enabled everything depends on runtime/race.\n\t\t// Exclude certain packages to avoid circular dependencies.\n\t\tif p.race && (!p.Standard || !raceExclude[p.baseImportPath]) {\n\t\t\timportPaths = append(importPaths, \"runtime/race\")\n\t\t}\n\t}\n\n\t// Build list of imported packages and full dependency list.\n\timports := make([]*Package, 0, len(p.Imports))\n\tdeps := make(map[string]*Package)\n\tfor i, path := range importPaths {\n\t\tif path == \"C\" {\n\t\t\tcontinue\n\t\t}\n\t\tp1 := loadImport(buildContext, path, p.Dir, stk, p.ImportPos[path])\n\t\tif p1.local {\n\t\t\tif !p.local && p.Error == nil {\n\t\t\t\tp.Error = &PackageError{\n\t\t\t\t\tImportStack: stk.copy(),\n\t\t\t\t\tErr: fmt.Sprintf(\"local import %q in non-local package\", path),\n\t\t\t\t}\n\t\t\t\tpos := p.ImportPos[path]\n\t\t\t\tif len(pos) > 0 {\n\t\t\t\t\tp.Error.Pos = pos[0].String()\n\t\t\t\t}\n\t\t\t}\n\t\t\tpath = p1.ImportPath\n\t\t\timportPaths[i] = path\n\t\t}\n\t\tdeps[path] = p1\n\t\timports = append(imports, p1)\n\t\tfor _, dep := range p1.deps {\n\t\t\tdeps[dep.ImportPath] = dep\n\t\t}\n\t\tif p1.Incomplete {\n\t\t\tp.Incomplete = true\n\t\t}\n\t}\n\tp.imports = imports\n\n\tp.deps = make([]*Package, 0, len(deps))\n\tfor _, dep := range deps {\n\t\tp.deps = append(p.deps, dep)\n\t}\n\tsort.Sort(packageList(p.deps))\n\n\t// unsafe is a fake package.\n\tif p.Standard && (p.baseImportPath == \"unsafe\" || buildContext.Compiler == \"gccgo\") {\n\t\tp.Target = \"\"\n\t}\n\n\t// Check for C code compiled with Plan 9 C compiler.\n\t// No longer allowed except in runtime and runtime/cgo, for now.\n\tif len(p.CFiles) > 0 && !p.usesCgo() && (!p.Standard || p.baseImportPath != \"runtime\") {\n\t\tp.Error = &PackageError{\n\t\t\tImportStack: stk.copy(),\n\t\t\tErr: fmt.Sprintf(\"C source files not allowed when not using cgo: %s\", strings.Join(p.CFiles, \" \")),\n\t\t}\n\t\treturn p\n\t}\n\n\treturn p\n}", "title": "" }, { "docid": "96cf680ece2778c0a3db28b2fb52ce39", "score": "0.5408266", "text": "func LoadIntegration(name string) {\n\tif Disabled() {\n\t\treturn\n\t}\n\tcontrib.Lock()\n\tdefer contrib.Unlock()\n\tcontribPackages = append(contribPackages, Integration{Name: name, Enabled: true})\n}", "title": "" }, { "docid": "e3e7ce41472f59f0673b1322bc9c3e95", "score": "0.5379395", "text": "func (dt *DependencyTree) Load(importPath types.ImportPath) (*oyafile.Oyafile, bool, error) {\n\tpack, found, err := dt.findRequiredPack(importPath)\n\tif err != nil {\n\t\treturn nil, false, err\n\t}\n\tif found {\n\t\treturn dt.reqs.LoadLocalOyafile(pack)\n\t}\n\treturn nil, false, nil\n}", "title": "" }, { "docid": "41b93d44b055e1d8d0cded7684cda337", "score": "0.53512555", "text": "func LoadPackage(locale string) []Package {\n\n\tfiles, err := ioutil.ReadDir(\"./package\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tdirs := filter.Choose(files, func(f os.FileInfo) bool {\n\t\treturn f.IsDir()\n\t}).([]os.FileInfo)\n\n\tpackages := make([]Package, len(dirs))\n\tpackchan := make(chan interface{})\n\n\tfor _, f := range dirs {\n\t\tgo loader(f, locale, packchan)\n\t}\n\n\tfor range dirs {\n\t\tres := <-packchan\n\t\tswitch res.(type) {\n\t\tcase error:\n\t\t\t//fmt.Println(\"loadModules failed : \", err)\n\t\tcase Package:\n\t\t\tpackages = append(packages, res.(Package))\n\t\t}\n\t}\n\tloadedPackage[locale] = packages[:]\n\treturn GetPackage(locale)\n}", "title": "" }, { "docid": "09543e4b54ac05cee2bb4c5a71d6bde6", "score": "0.5281309", "text": "func Load(env *lisp.LEnv, fn Loader) *lisp.LVal {\n\tlerr := fn(env)\n\tif lerr.Type == lisp.LError {\n\t\treturn lerr\n\t}\n\t// Switch back to the user package so that further defined symbols end up\n\t// in that package by default.\n\tlerr = env.InPackage(lisp.String(lisp.DefaultUserPackage))\n\tif lerr.Type == lisp.LError {\n\t\treturn lerr\n\t}\n\treturn lisp.Nil()\n}", "title": "" }, { "docid": "c510535cd00b6199c58f9b5a1ab7696a", "score": "0.52649415", "text": "func loadPackage(c *Context, stack []string, path string) (*Package, error) {\n\tif build.IsLocalImport(path) {\n\t\t// sanity check\n\t\treturn nil, fmt.Errorf(\"%q is not a valid import path\", path)\n\t}\n\tif pkg, ok := c.pkgs[path]; ok {\n\t\t// already loaded, just return\n\t\treturn pkg, nil\n\t}\n\n\tpush := func(path string) {\n\t\tstack = append(stack, path)\n\t}\n\tpop := func(path string) {\n\t\tstack = stack[:len(stack)-1]\n\t}\n\tonStack := func(path string) bool {\n\t\tfor _, p := range stack {\n\t\t\tif p == path {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t\treturn false\n\t}\n\n\tp, err := c.Context.Import(path, c.Projectdir(), 0)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// everything depends on runtime, except the runtime itself.\n\t// TODO(dfc) see if this can be made more selective by adding\n\t// runtime as a dependency of some select packages.\n\tstandard := p.Goroot && p.ImportPath != \"\" && !strings.Contains(p.ImportPath, \".\")\n\tif standard && p.ImportPath != \"runtime\" {\n\t\tp.Imports = append(p.Imports, \"runtime\")\n\t}\n\n\tpush(path)\n\tvar stale bool\n\tfor _, i := range p.Imports {\n\t\tif c.shouldignore(i) {\n\t\t\tcontinue\n\t\t}\n\t\tif onStack(i) {\n\t\t\tpush(i)\n\t\t\treturn nil, fmt.Errorf(\"import cycle detected: %s\", strings.Join(stack, \" -> \"))\n\t\t}\n\t\tpkg, err := loadPackage(c, stack, i)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tstale = stale || pkg.Stale\n\t}\n\tpop(path)\n\n\tpkg := Package{\n\t\tContext: c,\n\t\tPackage: p,\n\t\tStandard: standard,\n\t}\n\tpkg.Stale = stale || isStale(&pkg)\n\tc.pkgs[path] = &pkg\n\treturn &pkg, nil\n}", "title": "" }, { "docid": "fd8d51c3d6f53bcbf9a7c65cacae4618", "score": "0.5219697", "text": "func (l *Loader) Load(pkgpath string) (*Package, error) {\n\tif !filepath.IsAbs(pkgpath) {\n\t\tpkgpath, _ = filepath.Abs(pkgpath)\n\t}\n\tl.root = pkgpath\n\tentryDesc, err := l.parsePackageDesc(pkgpath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tentryDesc.path = pkgpath\n\tpkgs := make(map[string]*Package)\n\n\t// 广度搜索,剔除已经加载的package\n\tqueue := list.New()\n\tqueue.PushBack(entryDesc)\n\tfor queue.Len() != 0 {\n\t\tdesc := queue.Remove(queue.Front()).(*PackgeDesc)\n\t\tif _, ok := pkgs[desc.Package.Name]; ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tpkgs[desc.Package.Name] = &Package{\n\t\t\tName: desc.Package.Name,\n\t\t\tPath: desc.path,\n\t\t}\n\n\t\tdeps, err := l.parsePackageDeps(desc)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tfor _, dep := range deps {\n\t\t\tqueue.PushBack(dep)\n\t\t}\n\t}\n\n\tdepPkgs := make([]*Package, 0, len(pkgs))\n\tfor _, pkg := range pkgs {\n\t\tif pkg.Name == entryDesc.Package.Name {\n\t\t\tcontinue\n\t\t}\n\t\tdepPkgs = append(depPkgs, pkg)\n\t}\n\treturn &Package{\n\t\tName: entryDesc.Package.Name,\n\t\tPath: pkgpath,\n\t\tDeps: depPkgs,\n\t}, nil\n}", "title": "" }, { "docid": "b98c0646062edf746183685f88c1ed4b", "score": "0.51967263", "text": "func LoadStdLib(ctx context.Context, vm *otto.Otto, pkgname string) error {\n\tv, err := vm.Run(`({})`)\n\tif err != nil {\n\t\treturn err\n\t}\n\tpkg := v.Object()\n\tfor name, subpkg := range map[string]func(*otto.Otto) (otto.Value, error){\n\t\t\"time\": (&timePkg{ctx: ctx}).load,\n\t\t\"os\": (&osPkg{ctx: ctx}).load,\n\t\t\"do\": (newDoPkg(ctx)).load,\n\t} {\n\t\tv, err := subpkg(vm)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"can't load package %q: %v\", name, err)\n\t\t}\n\t\tif err := pkg.Set(name, v); err != nil {\n\t\t\treturn fmt.Errorf(\"can't set package %q: %v\", name, err)\n\t\t}\n\t}\n\n\treturn vm.Set(pkgname, pkg)\n}", "title": "" }, { "docid": "872a6ff940505c139da846d74607eaed", "score": "0.51322603", "text": "func loadPackage(cfg *Config) (*types.Package, error) {\n\t// Find the import path of the package in the given directory.\n\tcwd, _ := os.Getwd()\n\tdir := filepath.Join(cfg.Dir, \"*.go\")\n\tpkg, err := buildutil.ContainingPackage(&build.Default, cwd, dir)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tnocheck := func(path string) bool { return false }\n\tlcfg := loader.Config{Fset: token.NewFileSet(), TypeCheckFuncBodies: nocheck}\n\tlcfg.ImportWithTests(pkg.ImportPath)\n\tprog, err := lcfg.Load()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn prog.Package(pkg.ImportPath).Pkg, nil\n}", "title": "" }, { "docid": "28edd50a79e3ac108cd71cc75171306d", "score": "0.5122249", "text": "func LoadPackage(st *vmdat.State, pkgName string, cfg *Config) ([]*ir.Package, error) {\n\tif st.FindPackage(pkgName) != nil {\n\t\treturn nil, nil // Already loaded\n\t}\n\n\tpkgFiles, err := readClassFiles(pkgName, cfg)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"read %q class files: %v\", pkgName, err)\n\t}\n\treturn loadPackageSet(st, pkgName, pkgFiles, cfg)\n}", "title": "" }, { "docid": "f44f831218516293615268c0e08ce432", "score": "0.50322706", "text": "func (r *Require) Load(call otto.FunctionCall) otto.Value {\n\tid, err := call.Argument(0).ToString()\n\tif err != nil {\n\t\tpanicOtto(err)\n\t}\n\tif cached, ok := r.checkCache(id); ok {\n\t\treturn cached\n\t}\n\tnewID, err := r.resolve(id)\n\tif err != nil {\n\t\tpanicOtto(err.Error())\n\t}\n\tif cached, ok := r.checkCache(newID); ok {\n\t\treturn cached\n\t}\n\treturn r.loadFromFile(newID, call.Otto)\n}", "title": "" }, { "docid": "b6985fc4c67f5e61965ff6a216705c10", "score": "0.50252193", "text": "func (p *Pkg) Require(pkg *link8.Pkg) uint32 { return p.lib.Require(pkg) }", "title": "" }, { "docid": "84312e4627f20ea809fb3006d95418e8", "score": "0.50042003", "text": "func (s *stack) LoadModule() error {\n\treturn nil\n}", "title": "" }, { "docid": "cdb031f20b7dab86ef03f2c3f8f27c8e", "score": "0.4985402", "text": "func LoadPackage(ccNameVersion string, path string, getHasher GetHasher) (CCPackage, error) {\n\treturn (&CCInfoFSImpl{GetHasher: getHasher}).GetChaincodeFromPath(ccNameVersion, path)\n}", "title": "" }, { "docid": "f2918f0c4f291cd7c41eaa8696429701", "score": "0.4977422", "text": "func Load() {\n\terr := envconfig.Process(\"\", &Cfg)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "title": "" }, { "docid": "94b9de793c3253293cb77a247cd6638b", "score": "0.49270245", "text": "func PackageLoader(p Package) Loader {\n\treturn func(env *lisp.LEnv) *lisp.LVal {\n\t\tname := lisp.Symbol(p.PackageName())\n\t\te := env.DefinePackage(name)\n\t\tif !e.IsNil() {\n\t\t\treturn e\n\t\t}\n\t\te = env.InPackage(name)\n\t\tif !e.IsNil() {\n\t\t\treturn e\n\t\t}\n\t\tinitLoader := packageInit(p)\n\t\te = initLoader(env)\n\t\tif e.Type == lisp.LError {\n\t\t\treturn e\n\t\t}\n\t\tfor _, fn := range packageBuiltins(p) {\n\t\t\tenv.AddBuiltins(true, fn)\n\t\t}\n\t\tfor _, fn := range packageSpecialOps(p) {\n\t\t\tenv.AddSpecialOps(true, fn)\n\t\t}\n\t\tfor _, fn := range packageMacros(p) {\n\t\t\tenv.AddMacros(true, fn)\n\t\t}\n\t\treturn lisp.Nil()\n\t}\n}", "title": "" }, { "docid": "24bcb40b173952db6729d30916c30e17", "score": "0.4909101", "text": "func loadImport(buildContext *build.Context, path string, srcDir string,\n\tstk *importStack, importPos []token.Position) *Package {\n\tstk.push(path)\n\tdefer stk.pop()\n\n\t// Determine canonical identifier for this package.\n\t// For a local import the identifier is the pseudo-import path\n\t// we create from the full directory to the package.\n\t// Otherwise it is the usual import path.\n\timportPath := path\n\tisLocal := build.IsLocalImport(path)\n\tif isLocal {\n\t\timportPath = dirToImportPath(filepath.Join(srcDir, path))\n\t}\n\tfullImportPath := importPath\n\tif contains(buildContext.BuildTags, \"race\") {\n\t\tfullImportPath += \":race\"\n\t}\n\tif p := packageCache[fullImportPath]; p != nil {\n\t\treturn reusePackage(p, stk)\n\t}\n\n\tp := new(Package)\n\tp.local = isLocal\n\tpackageCache[fullImportPath] = p\n\n\t// Load package.\n\t// Import always returns bp != nil, even if an error occurs,\n\t// in order to return partial information.\n\t//\n\t// TODO: After Go 1, decide when to pass build.AllowBinary here.\n\t// See issue 3268 for mistakes to avoid.\n\tbp, err := buildContext.Import(path, srcDir, build.ImportComment)\n\tbp.ImportPath = fullImportPath\n\tif gobin != \"\" {\n\t\tbp.BinDir = gobin\n\t}\n\tif err == nil && !isLocal && bp.ImportComment != \"\" && bp.ImportComment != path {\n\t\terr = fmt.Errorf(\"code in directory %s expects import %q\", bp.Dir, bp.ImportComment)\n\t}\n\tp.baseImportPath = importPath\n\tp.load(buildContext, stk, bp, err)\n\tif p.Error != nil && len(importPos) > 0 {\n\t\tpos := importPos[0]\n\t\tpos.Filename = shortPath(pos.Filename)\n\t\tp.Error.Pos = pos.String()\n\t}\n\n\treturn p\n}", "title": "" }, { "docid": "a3ba04bd8f0f7ca2845e83a32254af0d", "score": "0.48970157", "text": "func Load() (err error) {\n\treturn A.loadAPI()\n}", "title": "" }, { "docid": "10138c2228403d48a51ad9013e5771ed", "score": "0.486726", "text": "func Load() {\n\t_ = godotenv.Load()\n\n\tif os.Getenv(\"GIN_MODE\") != \"debug\" {\n\t\tgin.SetMode(gin.ReleaseMode)\n\t}\n}", "title": "" }, { "docid": "c5d78db2ac3fe47df82bdafccdbd50d4", "score": "0.47866744", "text": "func LoadPackage(dir string) (pkg Package, err error) {\n\tpkg.files = map[string][]byte{}\n\tpkg.f = token.NewFileSet()\n\tpackages, err := parser.ParseDir(pkg.f, dir, func(f os.FileInfo) bool {\n\t\t// exclude test files\n\t\treturn !strings.HasSuffix(f.Name(), \"_test.go\")\n\t}, 0)\n\tif err != nil {\n\t\treturn\n\t}\n\tif len(packages) < 1 {\n\t\terr = errorInfo{\n\t\t\tm: fmt.Sprintf(\"didn't find any packages in '%s'\", dir),\n\t\t}\n\t\treturn\n\t}\n\tif len(packages) > 1 {\n\t\tpkgs := []string{}\n\t\tfor p := range packages {\n\t\t\tpkgs = append(pkgs, p)\n\t\t}\n\t\terr = errorInfo{\n\t\t\tm: fmt.Sprintf(\"found multiple packages in '%s': %s\", dir, strings.Join(pkgs, \", \")),\n\t\t\tp: pkgs,\n\t\t}\n\t\treturn\n\t}\n\tfor pn := range packages {\n\t\tp := packages[pn]\n\t\t// trim any non-exported nodes\n\t\tif exp := ast.PackageExports(p); !exp {\n\t\t\terr = fmt.Errorf(\"package '%s' doesn't contain any exports\", pn)\n\t\t\treturn\n\t\t}\n\t\tpkg.p = p\n\t\treturn\n\t}\n\t// shouldn't ever get here...\n\tpanic(\"failed to return package\")\n}", "title": "" }, { "docid": "ddf57aae1e99cb5f2d16a7248363e022", "score": "0.47728994", "text": "func (a *ArtifactRepositoryProvider) Load(c *kubernetes.Clientset, namespace string) error {\n\tif a.GCS == nil {\n\t\treturn nil\n\t}\n\n\tsecret, err := c.CoreV1().Secrets(namespace).Get(context.Background(), \"onepanel\", v1.GetOptions{})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif secretKeyBytes, ok := secret.Data[\"artifactRepositoryS3SecretKey\"]; ok {\n\t\ta.GCS.ServiceAccountKeySecret.Key = string(secretKeyBytes)\n\n\t\treturn nil\n\t}\n\n\treturn fmt.Errorf(\"unable to read artifact configuration\")\n}", "title": "" }, { "docid": "f677300e49e316629832df8d4c72be11", "score": "0.47654662", "text": "func (c *Context) Package(path string) {\n\tcfg := &packages.Config{\n\t\tMode: packages.NeedTypes | packages.NeedDeps | packages.NeedImports,\n\t}\n\tpkgs, err := packages.Load(cfg, path)\n\tif err != nil {\n\t\tc.adderror(err)\n\t\treturn\n\t}\n\tpkg := pkgs[0]\n\tif len(pkg.Errors) > 0 {\n\t\tfor _, err := range pkg.Errors {\n\t\t\tc.adderror(err)\n\t\t}\n\t\treturn\n\t}\n\tc.pkg = pkg\n}", "title": "" }, { "docid": "906729910498c6381a7480b64de0b80e", "score": "0.47508696", "text": "func (pstate *PackageState) loadsys() {\n\tpstate.types.Block = 1\n\n\tpstate.inimport = true\n\tpstate.typecheckok = true\n\tpstate.defercheckwidth()\n\n\ttyps := pstate.runtimeTypes()\n\tfor _, d := range pstate.runtimeDecls {\n\t\tsym := pstate.Runtimepkg.Lookup(pstate.types, d.name)\n\t\ttyp := typs[d.typ]\n\t\tswitch d.tag {\n\t\tcase funcTag:\n\t\t\tpstate.importfunc(pstate.Runtimepkg, pstate.src.NoXPos, sym, typ)\n\t\tcase varTag:\n\t\t\tpstate.importvar(pstate.Runtimepkg, pstate.src.NoXPos, sym, typ)\n\t\tdefault:\n\t\t\tpstate.Fatalf(\"unhandled declaration tag %v\", d.tag)\n\t\t}\n\t}\n\n\tpstate.typecheckok = false\n\tpstate.resumecheckwidth()\n\tpstate.inimport = false\n}", "title": "" }, { "docid": "d0fb56283e8ac7f00713728fbea7a0bb", "score": "0.4747093", "text": "func (ld *loader) loadRecursive(lpkg *Package) {\n\tlpkg.loadOnce.Do(func() {\n\t\t// Load the direct dependencies, in parallel.\n\t\tvar wg sync.WaitGroup\n\t\tfor _, imp := range lpkg.Imports {\n\t\t\twg.Add(1)\n\t\t\tgo func(imp *Package) {\n\t\t\t\tld.loadRecursive(imp)\n\t\t\t\twg.Done()\n\t\t\t}(imp)\n\t\t}\n\t\twg.Wait()\n\n\t\tld.loadPackage(lpkg)\n\t})\n}", "title": "" }, { "docid": "ca0e8c0387254f69abc0a444e33af960", "score": "0.4731926", "text": "func (l *Loader) Load(ctx context.Context) error {\n\tif !l.initialized {\n\t\treturn errors.New(errors.NotReadyErr, \"\")\n\t}\n\n\tl.mutex.Lock()\n\tdefer l.mutex.Unlock()\n\n\tbundle, err := l.get(ctx, \"\")\n\tif err != nil {\n\t\treturn errors.New(errors.InvalidBundleErr, err.Error())\n\t}\n\n\tif len(bundle.WasmModules) == 0 {\n\t\treturn errors.New(errors.InvalidBundleErr, \"missing wasm\")\n\t}\n\n\tvar data *interface{}\n\tif bundle.Data != nil {\n\t\tvar v interface{} = bundle.Data\n\t\tdata = &v\n\t}\n\n\treturn l.pd.SetPolicyData(ctx, bundle.WasmModules[0].Raw, data)\n}", "title": "" }, { "docid": "368eeca7fc3d6ee08c147f1f76c3f873", "score": "0.4686926", "text": "func init() {\n\timports.Packages[\"display\"] = display\n\timports.Packages[\"github.com/gopherdata/gophernotes\"] = display\n}", "title": "" }, { "docid": "8b2d3d166838c37ca081cddf1c250797", "score": "0.46813294", "text": "func (m *dnf) load() error {\n\tm.commands = managerCommands{\n\t\tclean: \"dnf\",\n\t\tinstall: \"dnf\",\n\t\trefresh: \"dnf\",\n\t\tremove: \"dnf\",\n\t\tupdate: \"dnf\",\n\t}\n\n\tm.flags = managerFlags{\n\t\tglobal: []string{\n\t\t\t\"-y\",\n\t\t},\n\t\tinstall: []string{\n\t\t\t\"install\",\n\t\t},\n\t\tremove: []string{\n\t\t\t\"remove\",\n\t\t},\n\t\trefresh: []string{\n\t\t\t\"makecache\",\n\t\t},\n\t\tupdate: []string{\n\t\t\t\"upgrade\",\n\t\t\t\"--nobest\",\n\t\t},\n\t\tclean: []string{\n\t\t\t\"clean\", \"all\",\n\t\t},\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "a1a96afa3a46822a656337b928b2252f", "score": "0.4664349", "text": "func loadPlugin(ctx *context.Context, path string) *plugin.Plugin {\n\t// TODO: It is not yet known if this process is accurate. If you find a better way, you need to fix this process.\n\t// see related PR: https://github.com/zoncoen/scenarigo/pull/78\n\tplgMu.Lock()\n\tdefer plgMu.Unlock()\n\tp, err := plugin.Open(path)\n\tif err != nil {\n\t\tctx.Reporter().Fatalf(\"failed to open plugin: %s\", err)\n\t}\n\treturn p\n}", "title": "" }, { "docid": "ccef6092acbe22950f41efd264748146", "score": "0.46611425", "text": "func (ld *loader) loadPackage(lpkg *Package) {\n\tif lpkg.PkgPath == \"unsafe\" {\n\t\t// Fill in the blanks to avoid surprises.\n\t\tlpkg.Type = types.Unsafe\n\t\tlpkg.Fset = ld.Fset\n\t\tlpkg.Files = []*ast.File{}\n\t\tlpkg.Info = new(types.Info)\n\t\treturn\n\t}\n\n\tif ld.mode == typeCheck && !lpkg.needsrc {\n\t\treturn // not a source package\n\t}\n\n\thardErrors := false\n\tappendError := func(err error) {\n\t\tif terr, ok := err.(types.Error); ok && terr.Soft {\n\t\t\t// Don't mark the package as bad.\n\t\t} else {\n\t\t\thardErrors = true\n\t\t}\n\t\tld.Error(err)\n\t\tlpkg.Errors = append(lpkg.Errors, err)\n\t}\n\n\tfiles, errs := ld.parseFiles(lpkg.Srcs)\n\tfor _, err := range errs {\n\t\tappendError(err)\n\t}\n\n\tlpkg.Fset = ld.Fset\n\tlpkg.Files = files\n\n\t// Call NewPackage directly with explicit name.\n\t// This avoids skew between golist and go/types when the files'\n\t// package declarations are inconsistent.\n\tlpkg.Type = types.NewPackage(lpkg.PkgPath, lpkg.Name)\n\n\tlpkg.Info = &types.Info{\n\t\tTypes: make(map[ast.Expr]types.TypeAndValue),\n\t\tDefs: make(map[*ast.Ident]types.Object),\n\t\tUses: make(map[*ast.Ident]types.Object),\n\t\tImplicits: make(map[ast.Node]types.Object),\n\t\tScopes: make(map[ast.Node]*types.Scope),\n\t\tSelections: make(map[*ast.SelectorExpr]*types.Selection),\n\t}\n\n\t// Copy the prototype types.Config as it must vary across Packages.\n\ttc := ld.TypeChecker // copy\n\tif !ld.cgo {\n\t\ttc.FakeImportC = true\n\t}\n\ttc.Importer = importerFunc(func(path string) (*types.Package, error) {\n\t\tif path == \"unsafe\" {\n\t\t\treturn types.Unsafe, nil\n\t\t}\n\n\t\t// The imports map is keyed by import path.\n\t\timp := lpkg.Imports[path]\n\t\tif imp == nil {\n\t\t\tif err := lpkg.importErrors[path]; err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\t// There was skew between the metadata and the\n\t\t\t// import declarations, likely due to an edit\n\t\t\t// race, or because the ParseFile feature was\n\t\t\t// used to supply alternative file contents.\n\t\t\treturn nil, fmt.Errorf(\"no metadata for %s\", path)\n\t\t}\n\t\tif imp.Type != nil && imp.Type.Complete() {\n\t\t\treturn imp.Type, nil\n\t\t}\n\t\tif ld.mode == typeCheck && !imp.needsrc {\n\t\t\treturn ld.loadFromExportData(imp)\n\t\t}\n\t\tlog.Fatalf(\"internal error: nil Pkg importing %q from %q\", path, lpkg)\n\t\tpanic(\"unreachable\")\n\t})\n\ttc.Error = appendError\n\n\t// type-check\n\ttypes.NewChecker(&tc, ld.Fset, lpkg.Type, lpkg.Info).Files(lpkg.Files)\n\n\tlpkg.importErrors = nil // no longer needed\n\n\t// If !Cgo, the type-checker uses FakeImportC mode, so\n\t// it doesn't invoke the importer for import \"C\",\n\t// nor report an error for the import,\n\t// or for any undefined C.f reference.\n\t// We must detect this explicitly and correctly\n\t// mark the package as IllTyped (by reporting an error).\n\t// TODO(adonovan): if these errors are annoying,\n\t// we could just set IllTyped quietly.\n\tif tc.FakeImportC {\n\touter:\n\t\tfor _, f := range lpkg.Files {\n\t\t\tfor _, imp := range f.Imports {\n\t\t\t\tif imp.Path.Value == `\"C\"` {\n\t\t\t\t\tappendError(fmt.Errorf(`%s: import \"C\" ignored`,\n\t\t\t\t\t\tlpkg.Fset.Position(imp.Pos())))\n\t\t\t\t\tbreak outer\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Record accumulated errors.\n\tfor _, imp := range lpkg.Imports {\n\t\tif imp.IllTyped {\n\t\t\thardErrors = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\tlpkg.IllTyped = hardErrors\n}", "title": "" }, { "docid": "c0b18ac960def8c5dd17e252b7564c2d", "score": "0.46354395", "text": "func (c *Context) ResolvePackage(path string) (*Package, error) {\n\treturn loadPackage(c, nil, path)\n}", "title": "" }, { "docid": "ba65da29296762aaa12cabcd89175476", "score": "0.46083325", "text": "func (s *Script) loadModules(runtime *lua.LState) error {\n\truntime.PreloadModule(\"json\", json.Loader)\n\tfor _, m := range s.mods {\n\t\tif err := m.inject(runtime); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "2b0b8ba9db46c25ffbd528d543cc19be", "score": "0.4588409", "text": "func LoadPlugin (\r\n\t loadPlugin string, \r\n\t\t\t\tlogDebug func(string, ...interface{}),\r\n logSevere func(string, ...interface{})) (gpl.GumsPlugin, error) {\r\n\r\n\t if(len(aliases) == 0) {\r\n\t\t\tloadDefault(logDebug, logSevere)\r\n\t\t}\r\n\r\n\t\taliasPlugin := aliases[loadPlugin]\r\n\t\tif aliasPlugin == \"\" {\r\n return nil, fmt.Errorf(\"\")\r\n\t\t} \r\n\r\n // the module to load\r\n module := modsPath + aliasPlugin + \".so\"\r\n\r\n if _, err := os.Stat(module); os.IsNotExist(err) {\r\n return nil, fmt.Errorf(\"Cannot find requested module:%s. File doesn't exist:%s\", aliasPlugin, module)\r\n } else {\r\n // load the module\r\n p, err := plugin.Open(module)\r\n if err != nil {\r\n return nil, fmt.Errorf(\"Cannot load requested module:%s. Malformed binary?\", module)\r\n }\r\n\r\n // look up the GumsPlugin symbol\r\n symGumsPlugin, err := p.Lookup(\"GumsPlugin\")\r\n if err != nil {\r\n return nil, fmt.Errorf(\"Cannot find symbol 'GumsPlugin' for loaded module:%s\", module)\r\n }\r\n\r\n // Assert that loaded symbol is of type GumsPlugin\r\n loadedPlugin := symGumsPlugin.(gplugin)\r\n if loadedPlugin == nil {\r\n return nil, fmt.Errorf(\"assert of loaded symbol (%s) failed\", module)\r\n }\r\n\r\n return loadedPlugin, nil\r\n }\r\n}", "title": "" }, { "docid": "eea30b48855383e1564912b78915bbe8", "score": "0.45824018", "text": "func (l *LoaderPackage) Load(mctx libkb.MetaContext) (err error) {\n\ttmp, err := mctx.G().GetHiddenTeamChainManager().Load(mctx, l.id)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif tmp == nil {\n\t\treturn nil\n\t}\n\tcp := tmp.DeepCopy()\n\tl.data = &cp\n\treturn err\n}", "title": "" }, { "docid": "0ebdf05deae9ef4f2671a597f2de2eed", "score": "0.45757976", "text": "func loadPkg(pkgName string) (*token.FileSet, *loader.PackageInfo, error) {\n\tvar loaderConf loader.Config\n\t_, err := loaderConf.FromArgs([]string{pkgName}, false)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tprog, err := loaderConf.Load()\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn prog.Fset, prog.Package(pkgName), nil\n}", "title": "" }, { "docid": "f9eca1625910501e8edfde130db94542", "score": "0.45752862", "text": "func (r *Loader) Load(ctx context.Context, packages []string) (map[string]*bazel.Package, error) {\n\treq := spb.LoaderRequest{\n\t\tWorkspaceDir: &r.workspaceRoot,\n\t\tInstallBase: &r.bazelInstallBase,\n\t\tOutputBase: &r.bazelOutputBase,\n\t\tPackages: packages,\n\t\tRuleKindsToSerialize: r.ruleKindsToSerialize,\n\t}\n\tctx, cancel := context.WithTimeout(ctx, r.timeout)\n\tdefer cancel()\n\tstopwatch := time.Now()\n\treply, err := r.stub.Load(ctx, &req)\n\tif vlog.V(2) {\n\t\tlog.Printf(\"Loading packages took %dms. Request:\\n%q\", int64(time.Now().Sub(stopwatch)/time.Millisecond), proto.CompactTextString(&req))\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn DeserializeProto(reply), nil\n}", "title": "" }, { "docid": "b76c777ffe47c9df15b1adcbd957cf11", "score": "0.45741794", "text": "func Load() {\n\tfor _, route := range routes {\n\t\tswitch route.method {\n\t\tcase \"GET\":\n\t\t\tgoji.Get(route.path, route.handler)\n\t\tdefault:\n\t\t\tpanic(\"UNKNOWN HTTP METHOD: \" + route.method)\n\t\t}\n\t}\n\tpublicPath, _ := filepath.Abs(\"public\")\n\tlog.Info(publicPath)\n\tgoji.Get(\"/*\", http.FileServer(http.Dir(publicPath)))\n}", "title": "" }, { "docid": "ecb4d0b492b37406323679d41be62de1", "score": "0.45714593", "text": "func readPackageFile(folder string) (*npmPackage, error) {\n\t// Verify package.json file exists\n\tpackagePath := path.Join(folder, \"package.json\")\n\tinfo, err := os.Stat(packagePath)\n\n\tif os.IsNotExist(err) {\n\t\treturn nil, fmt.Errorf(\"no package.json at %s: %w\", packagePath, err)\n\t}\n\tif info.IsDir() {\n\t\treturn nil, fmt.Errorf(\"the package.json at %s is a directory\", packagePath)\n\t}\n\n\t// Read the file\n\tfile, err := os.ReadFile(packagePath)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"could not read package.json at %s: %w\", packagePath, err)\n\t}\n\n\t// Unmarshal the json data\n\tnpm := npmPackage{}\n\terr = json.Unmarshal(file, &npm)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make sure values are present\n\tif npm.Name == \"\" {\n\t\treturn nil, fmt.Errorf(\"no package name present\")\n\t}\n\tif npm.Version == \"\" {\n\t\treturn nil, fmt.Errorf(\"no package version present\")\n\t}\n\n\t// Set the default registry\n\tif npm.Config.Registry == \"\" {\n\t\tnpm.Config.Registry = globalRegistry\n\t}\n\n\tlogrus.WithFields(logrus.Fields{\n\t\t\"name\": npm.Name,\n\t\t\"version\": npm.Version,\n\t\t\"path\": packagePath,\n\t}).Info(\"Found package.json\")\n\n\treturn &npm, nil\n}", "title": "" }, { "docid": "9aa5c9cd725eb387213b896283ab2316", "score": "0.45405373", "text": "func Load(ret interface{}) error {\n\treturn envconfig.Process(AppName, ret)\n}", "title": "" }, { "docid": "f37353694e9bc9941d32e321d8a9adb1", "score": "0.45266", "text": "func ModuleLoad(client Client, actionID, module, loadType string) (Response, error) {\n\treturn send(client, \"ModuleLoad\", actionID, map[string]string{\n\t\t\"Module\": module,\n\t\t\"LoadType\": loadType,\n\t})\n}", "title": "" }, { "docid": "73cbde2f456848e1e0d6f3373f850bb3", "score": "0.45253226", "text": "func loadOpenvswitchModule() {\n\tif triedLoadOpenvswitchModule {\n\t\treturn\n\t}\n\n\t// netdev ioctls don't seem to work on netlink sockets, so we\n\t// need a new socket for this purpose.\n\ts, err := syscall.Socket(syscall.AF_INET, syscall.SOCK_DGRAM, 0)\n\tif err != nil {\n\t\ttriedLoadOpenvswitchModule = true\n\t\treturn\n\t}\n\n\tdefer syscall.Close(s)\n\n\tvar req ifreqIfindex\n\tcopy(req.name[:], []byte(\"openvswitch\"))\n\tsyscall.Syscall(syscall.SYS_IOCTL, uintptr(s),\n\t\tsyscall.SIOCGIFINDEX, uintptr(unsafe.Pointer(&req)))\n\ttriedLoadOpenvswitchModule = true\n}", "title": "" }, { "docid": "b4e9551726dece56a6cad1e4b47b5563", "score": "0.4508087", "text": "func (host *goLanguageHost) loadGomod(gobin, programDir string) (modDir string, modFile *modfile.File, err error) {\n\t// Get the path to the go.mod file.\n\t// This may be different from the programDir if the Pulumi program\n\t// is in a subdirectory of the Go module.\n\t//\n\t// The '-f {{.GoMod}}' specifies that the command should print\n\t// just the path to the go.mod file.\n\t//\n\t//\ttype Module struct {\n\t//\t\tPath string // module path\n\t//\t\t...\n\t//\t\tGoMod string // path to go.mod file\n\t//\t}\n\t//\n\t// See 'go help list' for the full definition.\n\tcmd := exec.Command(gobin, \"list\", \"-m\", \"-f\", \"{{.GoMod}}\")\n\tcmd.Dir = programDir\n\tcmd.Stderr = os.Stderr\n\tout, err := cmd.Output()\n\tif err != nil {\n\t\treturn \"\", nil, fmt.Errorf(\"go list -m: %w\", err)\n\t}\n\tout = bytes.TrimSpace(out)\n\tif len(out) == 0 {\n\t\t// The 'go list' command above will exit successfully\n\t\t// and return no output if the program is not in a Go module.\n\t\treturn \"\", nil, fmt.Errorf(\"no go.mod file found: %v\", programDir)\n\t}\n\n\tmodPath := string(out)\n\tbody, err := os.ReadFile(modPath)\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\n\tf, err := modfile.ParseLax(modPath, body, nil)\n\tif err != nil {\n\t\treturn \"\", nil, fmt.Errorf(\"parse: %w\", err)\n\t}\n\n\treturn filepath.Dir(modPath), f, nil\n}", "title": "" }, { "docid": "d56c4f50db38d9c9ee1ae780587c1a4e", "score": "0.4488711", "text": "func init() {\n\tmb.Registry.MustAddMetricSet(\"system\", \"load\", New)\n}", "title": "" }, { "docid": "0d03c34f9f64ac23e8ff1b3efff57b6b", "score": "0.44720468", "text": "func loadFS() {\n\t// Load GOROOT (in zip file)\n\trsc, err := asset.Open(gorootZipFile) // asset file, never closed.\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\toffset, err := rsc.Seek(0, os.SEEK_END)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tif _, err = rsc.Seek(0, os.SEEK_SET); err != nil {\n\t\tpanic(err)\n\t}\n\tr, err := zip.NewReader(&readerAt{wrapped: rsc}, offset)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfs.Bind(\"/\", newZipFS(r, gorootZipFile), \"/go\", vfs.BindReplace)\n\n\t// static files for godoc.\n\tfs.Bind(\"/lib/godoc\", mapfs.New(static.Files), \"/\", vfs.BindReplace)\n}", "title": "" }, { "docid": "304528f368b6208b358cd6e0027f1e73", "score": "0.44480813", "text": "func assets_js_lib_bootstrap_npm_js() ([]byte, error) {\n\treturn bindata_read(\n\t\t\"D:\\\\GoPath\\\\src\\\\letinvr.com\\\\echo-web\\\\assets\\\\js\\\\lib\\\\bootstrap\\\\npm.js\",\n\t\t\"assets/js/lib/bootstrap/npm.js\",\n\t)\n}", "title": "" }, { "docid": "e33f345eac086512e1aafb9c2d31122e", "score": "0.4447437", "text": "func Load(name string) bool {\n\treturn true\n}", "title": "" }, { "docid": "25ef9ad2620c2f509cfd369e34b0f31c", "score": "0.4440781", "text": "func Import(pkg string) NodeMarshaler {\n\treturn nodef(func(s *Scope) ast.Node {\n\t\tpath := `\"` + pkg + `\"`\n\t\tx, _ := s.LookupStash(&fileKey)\n\t\tf := x.(*ast.File)\n\t\tfor _, spec := range f.Imports {\n\t\t\tif spec.Path.Value == path {\n\t\t\t\treturn ast.NewIdent(spec.Name.Name)\n\t\t\t}\n\t\t}\n\t\tcfg := &packages.Config{Mode: packages.NeedName}\n\t\tpkgs, err := packages.Load(cfg, pkg)\n\t\tname := \"\"\n\t\tif err == nil && len(pkgs) == 1 && pkgs[0].Name != \"\" {\n\t\t\tname = pkgs[0].Name\n\t\t} else {\n\t\t\tparts := strings.Split(pkg, \"/\")\n\t\t\tname = parts[len(parts)-1]\n\t\t}\n\t\tidx := 0\n\t\tuniq := name\n\t\tfor !isUniqueImport(f, uniq) {\n\t\t\tidx++\n\t\t\tuniq = name + strconv.Itoa(idx)\n\t\t}\n\n\t\tspec := &ast.ImportSpec{\n\t\t\tName: ast.NewIdent(uniq),\n\t\t\tPath: &ast.BasicLit{Kind: token.STRING, Value: path},\n\t\t}\n\t\tf.Imports = append(f.Imports, spec)\n\t\timports := f.Decls[0].(*ast.GenDecl)\n\t\tif uniq == name {\n\t\t\tspec = &ast.ImportSpec{Path: spec.Path}\n\t\t}\n\t\timports.Specs = append(imports.Specs, spec)\n\t\treturn ast.NewIdent(uniq)\n\t})\n}", "title": "" }, { "docid": "1b6b784493a23f31e257dc66e936934d", "score": "0.44148394", "text": "func (em *Em100) Load(filename string) error {\n\t// Check if file exists, but only log, command could run with sudo\n\t// and have access to a file not visible to us.\n\tinfo, err := os.Stat(filename)\n\tif err != nil {\n\t\tlog.Println(\"cannot stat\", filename, err)\n\t} else if info.IsDir() {\n\t\tlog.Printf(\"error %s is a directory\", filename)\n\t}\n\treturn em.run(\"-d\", filename, \"--start\")\n}", "title": "" }, { "docid": "677f1fdc35308f46d193d0bdb8208b6a", "score": "0.44120368", "text": "func Load() {\n\tCodeRoutes()\n}", "title": "" }, { "docid": "97521a74f3595b4041631f651f540472", "score": "0.44021705", "text": "func LoadFS(fs *MyFS) {\n\n\thash2mynode = make(map[string]MyNode)\n\n\t/* State.Root_version_bootstrap is Vid of root of filesystem */\n\tstr := fmt.Sprintf(\"%s:%s\", NODE_VERSION_KEY, State.Root_version_bootstrap)\n\tutil.P_out(\"key: %s\", str)\n\trootdirstr, err := storage.Get([]byte(str))\n\tif err != nil {\n\t\tutil.P_out(\"creating filesystem!\")\n\t\t/* key most likely doesn't exist */\n\t\tfs.RootDir = new(MyNode)\n\t\tfs.RootDir.Init(\"/\", os.ModeDir | 0755, nil)\n\t\tfs.RootDir.Vid = GenerateVersionId(fs.RootDir)\n\t\tupdateAncestors(fs.RootDir)\n\t} else {\n\t\tutil.P_out(\"loading filesystem!\")\n\t\tjson.Unmarshal(rootdirstr, &fs.RootDir)\n\t}\n\tAssertExpanded(fs.RootDir)\n\n\trand.Seed(int64(Pid))\n}", "title": "" }, { "docid": "64d4d3c17b834dbcf43dfbc8d7975c08", "score": "0.4399355", "text": "func Load(g *gin.Engine, mw ...gin.HandlerFunc) *gin.Engine {\n\n\tg.Use(gin.Recovery())\n\tg.Use(middleware.NoCache)\n\tg.Use(middleware.Options)\n\tg.Use(middleware.Secure)\n\tg.Use(mw...)\n\n\t//404\n\tg.NoRoute(func(context *gin.Context) {\n\t\tcontext.String(http.StatusNotFound, \"The incorrect API route. 404\")\n\t})\n\n\t// The health check handlers\n\tapiGroup := g.Group(\"/api\")\n\n\tuser(apiGroup)\n\tsdFn(g)\n\n\treturn g\n}", "title": "" }, { "docid": "fbf0cd3512889882c607141de8c1bf17", "score": "0.43975472", "text": "func PackageA() {\n log.Println(\"Hello from packageA\")\n}", "title": "" }, { "docid": "5f28abd9d591bfc9e621d548d3d5da87", "score": "0.4383226", "text": "func MakeLoad() func(thread *starlark.Thread, module string) (starlark.StringDict, error) {\n\ttype entry struct {\n\t\tglobals starlark.StringDict\n\t\terr error\n\t}\n\n\tvar cache = make(map[string]*entry)\n\n\treturn func(thread *starlark.Thread, module string) (starlark.StringDict, error) {\n\t\te, ok := cache[module]\n\t\tif e == nil {\n\t\t\tif ok {\n\t\t\t\t// request for package whose loading is in progress\n\t\t\t\treturn nil, fmt.Errorf(\"cycle in load graph\")\n\t\t\t}\n\n\t\t\t// Add a placeholder to indicate \"load in progress\".\n\t\t\tcache[module] = nil\n\n\t\t\t// Load it.\n\t\t\tthread := &starlark.Thread{Load: thread.Load}\n\t\t\tglobals, err := starlark.ExecFile(thread, module, nil, nil)\n\t\t\te = &entry{globals, err}\n\n\t\t\t// Update the cache.\n\t\t\tcache[module] = e\n\t\t}\n\t\treturn e.globals, e.err\n\t}\n}", "title": "" }, { "docid": "f271ffc61c747437cd0f3a554c5db646", "score": "0.4378633", "text": "func Load() (*File, error) {\n\tcmd := exec.Command(\"go\", \"env\", \"GOMOD\")\n\tp, err := cmd.Output()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfile := strings.TrimSuffix(strings.Trim(string(p), \"\\n\"), \".mod\") + \".sum\"\n\n\tdata, err := ioutil.ReadFile(file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn Parse(data)\n}", "title": "" }, { "docid": "5699dd3d9c9dbaa53535b0a19339ccb5", "score": "0.4372793", "text": "func (s *WorkloadStack) Package(cfg PackageConfig) error {\n\tcfg.s3Path = func(hash string) string {\n\t\treturn artifactpath.AddonAsset(s.workloadName, hash)\n\t}\n\treturn s.packageAssets(cfg)\n}", "title": "" }, { "docid": "e9bdc8b3d51d1419ec6a841ea107c597", "score": "0.43716055", "text": "func (t *Tracker) registerPackage(pkgPath string, md *hub.PackageMetadata, logoImageID string) error {\n\t// Prepare package from metadata\n\tp, err := pkg.PreparePackageFromMetadata(md)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error preparing package %s version %s from metadata: %w\", md.Name, md.Version, err)\n\t}\n\tp.LogoImageID = logoImageID\n\tp.Repository = t.r\n\n\t// Include kind specific data into package\n\tignorer, err := ignore.CompileIgnoreLines(md.Ignore...)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error processing package %s version %s ignore entries: %w\", md.Name, md.Version, err)\n\t}\n\tvar data map[string]interface{}\n\tswitch t.r.Kind {\n\tcase hub.Falco:\n\t\tdata, err = prepareFalcoData(pkgPath, ignorer)\n\tcase hub.OPA:\n\t\tdata, err = prepareOPAData(pkgPath, ignorer)\n\t}\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error preparing package %s version %s data: %w\", md.Name, md.Version, err)\n\t}\n\tp.Data = data\n\n\t// Register package\n\treturn t.svc.Pm.Register(t.svc.Ctx, p)\n}", "title": "" }, { "docid": "81d1d3cf06eadf01f466885193854cbe", "score": "0.43667763", "text": "func (s *DefaultService) LoadAllowNondistributableArtifacts(registries []string) error {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\treturn s.config.LoadAllowNondistributableArtifacts(registries)\n}", "title": "" }, { "docid": "4bf58b5b5b774870326eb37a4470a23f", "score": "0.43628466", "text": "func (c *Config) Load(serviceName string) error {\n\treturn envconfig.Process(serviceName, c)\n}", "title": "" }, { "docid": "84d54c06012d3f0625551acc3dea31e3", "score": "0.43600586", "text": "func init() {\n\tPackages[\"go/importer\"] = Package{\n\tBinds: map[string]Value{\n\t\t\"Default\":\tValueOf(importer.Default),\n\t\t\"For\":\tValueOf(importer.For),\n\t}, Types: map[string]Type{\n\t\t\"Lookup\":\tTypeOf((*importer.Lookup)(nil)).Elem(),\n\t}, \n\t}\n}", "title": "" }, { "docid": "7e93f98eda7730bd42db897009a3f517", "score": "0.4359121", "text": "func Package() {\n\tstart := time.Now()\n\tdefer func() { fmt.Println(\"package ran for\", time.Since(start)) }()\n\n\tdevtools.UseElasticBeatOSSPackaging()\n\tmetricbeat.CustomizePackaging()\n\n\tmg.Deps(Update, metricbeat.PrepareModulePackagingOSS)\n\tmg.Deps(CrossBuild, CrossBuildGoDaemon)\n\tmg.SerialDeps(devtools.Package, TestPackages)\n}", "title": "" }, { "docid": "fc16463b3d1e6736d0e08e5425d542b7", "score": "0.4355173", "text": "func (m *Engine) Require(name string) (otto.Value, error) {\n\t// return cached value if already loaded\n\tkey := sourceKey(name)\n\tif cache, ok := m.sourceCache.Load(key); ok {\n\t\tfmt.Printf(\"found module %s in cache\\n\", name)\n\t\treturn cache.(otto.Value), nil\n\t}\n\n\t// find a known asset in jsdata.go\n\tasset := assetName(name)\n\tif asset != \"\" {\n\t\t// load a known asset and add it to cache\n\t\tif err := m.registerResource(asset); err != nil {\n\t\t\treturn otto.UndefinedValue(), err\n\t\t}\n\t\t// return the newly added asset in cache\n\t\tif value, ok := m.sourceCache.Load(key); ok {\n\t\t\treturn value.(otto.Value), nil\n\t\t}\n\t}\n\treturn otto.UndefinedValue(), errors.Errorf(\"required module %s is not loaded\", name)\n}", "title": "" }, { "docid": "ecf4db4d618777040bed400b0beccc3a", "score": "0.43538532", "text": "func preloadModuleFromFS(L *lua.LState, fname string) error {\n\t// Derive package name by removing file extension\n\tpkgname := strings.TrimSuffix(fname, filepath.Ext(fname))\n\n\t// Read the file content\n\tb, err := fs.ReadFile(fname)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to read %s: %w\", fname, err)\n\t}\n\n\t// Load the Lua module\n\tmod, err := L.LoadString(string(b))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"could not load %s: %w\", fname, err)\n\t}\n\n\t// Get the \"package\" table\n\tpkg := L.GetField(L.Get(lua.EnvironIndex), \"package\")\n\tif pkg == lua.LNil {\n\t\treturn fmt.Errorf(\"unable to get 'package' table\")\n\t}\n\n\t// Get the \"preload\" table inside \"package\"\n\tpreload := L.GetField(pkg, \"preload\")\n\tif preload == lua.LNil {\n\t\treturn fmt.Errorf(\"unable to get 'preload' table\")\n\t}\n\n\tL.SetField(preload, pkgname, mod)\n\treturn nil\n}", "title": "" }, { "docid": "79e9c0ce3399eaee24dd8153bf4702aa", "score": "0.435096", "text": "func (node *Node) loadConfiguration() error {\n\tpackageJson := PackageJson{}\n\tpackageJsonFile, err := os.Open(\"package.json\")\n\tif err != nil {\n\t\treturn errors.New(\"a package.json file was not found in the project root directory\")\n\t}\n\tdefer packageJsonFile.Close()\n\n\tjsonParser := json.NewDecoder(packageJsonFile)\n\terr = jsonParser.Decode(&packageJson)\n\tif err != nil {\n\t\treturn errors.New(\"the package.json file is not properly formatted\")\n\t}\n\tnode.PackageJson.Scripts.Build = packageJson.Scripts.Build\n\tnode.PackageJson.Scripts.Format = packageJson.Scripts.Format\n\tnode.PackageJson.Scripts.Lint = packageJson.Scripts.Lint\n\tnode.PackageJson.Scripts.Test = packageJson.Scripts.Test\n\tnode.PackageJson.Scripts.TestUnit = packageJson.Scripts.TestUnit\n\n\treturn nil\n}", "title": "" }, { "docid": "f314868fa080653e0b38ac56ca16c2eb", "score": "0.4347108", "text": "func loadGodeps(directory string) (*Godeps, error) {\n\tfilename := path.Join(directory, manifest)\n\tf, err := os.Open(filename)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer f.Close()\n\tdecoder := json.NewDecoder(f)\n\tdeps := new(Godeps)\n\terr = decoder.Decode(deps)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn deps, nil\n}", "title": "" }, { "docid": "8ec6e263706c46421ddfb404f53460f2", "score": "0.43441772", "text": "func (pp Packages) Import(path string) (ImportablePackage, error) {\n\tif p, ok := pp[path]; ok {\n\t\treturn p, nil\n\t}\n\treturn nil, nil\n}", "title": "" }, { "docid": "486dd0669fcc0e8f2cdb4e8617699bd8", "score": "0.4341806", "text": "func (r *Runtime) Load(wasmBytes []byte) (*Module, error) {\n\tresult := C.m3Err_none\n\tbytes := C.CBytes(wasmBytes)\n\tlength := len(wasmBytes)\n\tvar module C.IM3Module\n\tresult = C.m3_ParseModule(\n\t\tr.cfg.Environment.Ptr(),\n\t\t&module,\n\t\t(*C.uchar)(bytes),\n\t\tC.uint(length),\n\t)\n\tif result != nil {\n\t\treturn nil, errParseModule\n\t}\n\tresult = C.m3_LoadModule(\n\t\tr.Ptr(),\n\t\tmodule,\n\t)\n\tif result != nil {\n\t\treturn nil, errLoadModule\n\t}\n\tresult = C.m3_LinkSpecTest(r.Ptr().modules)\n\tif result != nil {\n\t\treturn nil, errors.New(\"LinkSpecTest failed\")\n\t}\n\tif r.cfg.EnableWASI {\n\t\tC.m3_LinkWASI(r.Ptr().modules)\n\t}\n\tm := NewModule((ModuleT)(module))\n\treturn m, nil\n}", "title": "" }, { "docid": "9a6117263f941c801cfeeb03c77ace4a", "score": "0.43362445", "text": "func (lu *EnvLoadUnloader) Load(b []byte, nGrps []*node.Nodes) error {\n\treturn lu.loader.Load(b, nGrps)\n}", "title": "" }, { "docid": "39cd23491665bf97cac3f3fe15e03f81", "score": "0.4329688", "text": "func loadGopkgLock(rawToml []byte) pinnedPkgs {\n\tlock := struct {\n\t\tProjects []struct {\n\t\t\tName string `toml:\"name\"`\n\t\t\tRevision string `toml:\"revision\"`\n\t\t\t// There are other fields, but we don't use them\n\t\t} `toml:\"projects\"`\n\t\t// There are other fields, but we don't use them\n\t}{}\n\terr := toml.Unmarshal(rawToml, &lock)\n\tif err != nil {\n\t\treturn nil\n\t}\n\n\tpkgs := make(pinnedPkgs, 0, len(lock.Projects))\n\tfor _, p := range lock.Projects {\n\t\tpkgs = append(pkgs, pinnedPkg{Pkg: p.Name + \"/\", Rev: p.Revision})\n\t}\n\tsort.Sort(pkgs)\n\treturn pkgs\n}", "title": "" }, { "docid": "29e68ed28a21de5e04189a3d8cee3a85", "score": "0.43288347", "text": "func (i *ImageLoader) loadOperatorBundle(manifest registry.PackageManifest, bundle registry.Bundle) error {\n\tif manifest.PackageName == \"\" {\n\t\treturn nil\n\t}\n\n\tif err := i.store.AddBundlePackageChannels(manifest, bundle); err != nil {\n\t\treturn fmt.Errorf(\"error loading bundle into db: %s\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "64d2f3714318f1543f64064eb1ba9033", "score": "0.43249166", "text": "func (env *Jenv) Load() error {\n\tenvFile, err := os.Open(env.FilePath)\n\tif err != nil {\n\t\treturn errors.New(\"File Not Found\")\n\t}\n\tdefer envFile.Close()\n\n\tdecoder := json.NewDecoder(envFile)\n\terr = decoder.Decode(&env.Data)\n\tif err != nil {\n\t\treturn errors.New(\"Decode Error\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5042303d389ceb0a8ad4635d59d22ac9", "score": "0.4317145", "text": "func LoadPackageFromPath(path string) (*Package, error) {\n\td, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpkg := &Package{\n\t\tWorkingDir: \"/workdir\",\n\t}\n\tif err = yaml.Unmarshal(d, pkg); err != nil {\n\t\treturn pkg, err\n\t}\n\tif pkg.RequiredVersion != \"\" {\n\t\tif err := version.CheckCompatible(pkg.RequiredVersion); err != nil {\n\t\t\treturn pkg, err\n\t\t}\n\t}\n\treturn pkg, nil\n}", "title": "" }, { "docid": "16893d74cf2c8973a4aaf38fb52affab", "score": "0.43138295", "text": "func loadFactory(context *cli.Context) (libcontainer.Factory, error) {\n\troot := context.GlobalString(\"root\")\n\tabs, err := filepath.Abs(root)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn libcontainer.New(abs, libcontainer.Cgroupfs, func(l *libcontainer.LinuxFactory) error {\n\t\tl.CriuPath = context.GlobalString(\"criu\")\n\t\treturn nil\n\t})\n}", "title": "" }, { "docid": "c6dba6b5c086c78f4c75f018fe3c6971", "score": "0.43115693", "text": "func (li *LinuxImage) Load(verbose bool) error {\n\tloadedImage, cleanup, err := loadLinuxImage(li, verbose)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer cleanup()\n\n\tif li.LoadSyscall {\n\t\treturn linux.KexecLoad(loadedImage.Kernel, loadedImage.Initrd, loadedImage.Cmdline, loadedImage.KexecOpts)\n\t}\n\treturn kexec.FileLoad(loadedImage.Kernel, loadedImage.Initrd, loadedImage.Cmdline)\n}", "title": "" }, { "docid": "045e21d4c213761798d3737c2303ea5a", "score": "0.43108127", "text": "func (loadGenerator loadGenerator) Load(injectionRate int, duration time.Duration) (executor.TaskHandle, error) {\n\tif len(loadGenerator.transactionInjectors) == 0 {\n\t\treturn nil, errors.New(\"Cannot generate load with empty transaction injector executors\")\n\t}\n\n\tloadCommand := getControllerLoadCommand(loadGenerator.config, injectionRate, duration)\n\tcontrollerHandle, err := loadGenerator.controller.Execute(loadCommand)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"execution of SPECjbb Load Generator failed. command: %q\", loadCommand)\n\t}\n\ttxIHandles, err := loadGenerator.runTransactionInjectors()\n\tif err != nil {\n\t\treturn nil, errors.Errorf(\"execution of SPECjbb HBIR RT transaction injectors failed with error: %s\", err.Error())\n\t}\n\n\treturn executor.NewClusterTaskHandle(controllerHandle, txIHandles), nil\n}", "title": "" }, { "docid": "111a2d292d9081614bde69c2a8858622", "score": "0.4307832", "text": "func (goH *Handler) install(pkg Package) error {\n\toutput.Info(\"📦 GoGet\\tInstalling Package %s\", pkg)\n\terr := goH.goGet(pkg)\n\tif err != nil {\n\t\treturn err\n\t}\n\toutput.Success(\"📦 GoGet\\tInstalled Package %s\", pkg)\n\treturn nil\n}", "title": "" }, { "docid": "53f952d6d961bbe373d9bc3693f8cbf6", "score": "0.43028322", "text": "func (rp *RevelContainer) loadModules(callback RevelCallback) (err error) {\n\tkeys := []string{}\n\tkeys = append(keys, rp.Config.Options(\"module.\")...)\n\n\t// Reorder module order by key name, a poor mans sort but at least it is consistent\n\tsort.Strings(keys)\n\tfor _, key := range keys {\n\t\tmoduleImportPath := rp.Config.StringDefault(key, \"\")\n\t\tif moduleImportPath == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tmodulePath, err := rp.ResolveImportPath(moduleImportPath)\n\t\tif err != nil {\n\t\t\tutils.Logger.Info(\"Missing module \", \"module_import_path\", moduleImportPath, \"error\", err)\n\n\t\t\tif err := callback.PackageResolver(moduleImportPath); err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to resolve package %w\", err)\n\t\t\t}\n\n\t\t\tmodulePath, err = rp.ResolveImportPath(moduleImportPath)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to load module. Import of path failed %s:%s %s:%w \", \"modulePath\", moduleImportPath, \"error\", err)\n\t\t\t}\n\t\t}\n\t\t// Drop anything between module.???.<name of module>\n\t\tname := key[len(\"module.\"):]\n\t\tif index := strings.Index(name, \".\"); index > -1 {\n\t\t\tname = name[index+1:]\n\t\t}\n\t\tcallback.FireEvent(REVEL_BEFORE_MODULE_LOADED, []interface{}{rp, name, moduleImportPath, modulePath})\n\t\trp.addModulePaths(name, moduleImportPath, modulePath)\n\t\tcallback.FireEvent(REVEL_AFTER_MODULE_LOADED, []interface{}{rp, name, moduleImportPath, modulePath})\n\t}\n\treturn\n}", "title": "" }, { "docid": "6ab03595308822c20b2b5989cb2b5641", "score": "0.42917898", "text": "func Load(path string, unsafe...bool) (*Json, error) {\n return LoadContent(gfcache.GetBinContents(path), unsafe...)\n}", "title": "" }, { "docid": "065bcf23fad000b3f8e821d998c9bab0", "score": "0.42897034", "text": "func LoadGlobalGo(L *lua.LState, name string) {\n\tif name == \"exec\" {\n\t\tL.SetGlobal(\"exec\", L.NewTable())\n\t\tnsExec := L.GetField(L.Get(lua.EnvironIndex), \"exec\")\n\t\tL.SetField(nsExec, \"command\", L.NewFunction(ll.ExecCommand))\n\t\tL.SetField(nsExec, \"ctx\", L.NewFunction(ll.ExecCtx))\n\t\treturn\n\t}\n\tif name == \"fs\" {\n\t\tnsFs := L.SetFuncs(L.NewTable(), lfs.Api)\n\t\tL.SetGlobal(\"fs\", nsFs)\n\t\tL.SetField(nsFs, \"isdir\", L.NewFunction(ll.FsIsdir))\n\t\tL.SetField(nsFs, \"isfile\", L.NewFunction(ll.FsIsfile))\n\t\tL.SetField(nsFs, \"read\", L.NewFunction(ll.FsRead))\n\t\tL.SetField(nsFs, \"write\", L.NewFunction(ll.FsWrite))\n\t\treturn\n\t}\n\tif name == \"os\" {\n\t\tnsOs := L.GetField(L.Get(lua.EnvironIndex), \"os\")\n\t\tL.SetField(nsOs, \"hostname\", L.NewFunction(ll.OsHostname))\n\t\tL.SetField(nsOs, \"outbound_ip\", L.NewFunction(ll.OsOutboundIP))\n\t\tL.SetField(nsOs, \"sleep\", L.NewFunction(ll.OsSleep))\n\t\treturn\n\t}\n\tif name == \"extend\" {\n\t\tL.SetGlobal(\"extend\", L.NewFunction(ll.Extend))\n\t\treturn\n\t}\n\tL.RaiseError(\"Unknown module.\")\n}", "title": "" }, { "docid": "05a22c02b389fe0ba719adf4dbc92c3b", "score": "0.4287109", "text": "func (intr *Interpreter) LoadModule(ctx context.Context, pkg, path string) (starlark.StringDict, error) {\n\tkey, err := MakeModuleKey(nil, fmt.Sprintf(\"@%s//%s\", pkg, path))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// If the module has been 'exec'-ed previously it is not allowed to be loaded.\n\t// Modules are either 'library-like' or 'script-like', not both.\n\tif _, yes := intr.execed[key]; yes {\n\t\treturn nil, errors.New(\"the module has been exec'ed before and therefore is not loadable\")\n\t}\n\n\tswitch m, ok := intr.modules[key]; {\n\tcase m != nil: // already loaded or attempted and failed\n\t\treturn m.dict, m.err\n\tcase ok:\n\t\t// This module is being loaded right now, Starlark stack trace will show\n\t\t// the sequence of load(...) calls that led to this cycle.\n\t\treturn nil, errors.New(\"cycle in the module dependency graph\")\n\t}\n\n\t// Add a placeholder to indicate we are loading this module to detect cycles.\n\tintr.modules[key] = nil\n\n\tm := &loadedModule{\n\t\terr: fmt.Errorf(\"panic when loading %q\", key), // overwritten on non-panic\n\t}\n\tdefer func() { intr.modules[key] = m }()\n\n\tm.dict, m.err = intr.runModule(ctx, key, ThreadLoading)\n\treturn m.dict, m.err\n}", "title": "" }, { "docid": "d2f1351a333c00cf827131c1e5cd7882", "score": "0.42848596", "text": "func LoadExternalRepo(args *Args, repo *ExternalRepo) error {\r\n\t// Fetch the repo.\r\n\terr := fetchGit(args, repo)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\r\n\treturn nil\r\n}", "title": "" }, { "docid": "91d23b47dbdc415604088677dccb1d40", "score": "0.4280824", "text": "func (hr *Repository) Load(name string) (*Hub, bool) {\n\tif n, ok := hr.hubs.Load(name); ok {\n\t\treturn n.(*Hub), ok\n\t}\n\treturn nil, false\n}", "title": "" }, { "docid": "9fd6e0ab272ef240973875af770bfdc8", "score": "0.42763525", "text": "func (node *Node) setupNpm() {\n\tnode.Javascript.Language.Command.Build = append(\n\t\tnode.Javascript.Language.Command.Build,\n\t\t\"npm ci\",\n\t)\n}", "title": "" }, { "docid": "b5f5f4a50a38414eac69a8a9afd89103", "score": "0.42758518", "text": "func (r *Runtime) LoadModule(module *Module) (*Module, error) {\n\tresult := C.m3Err_none\n\tresult = C.m3_LoadModule(\n\t\tr.Ptr(),\n\t\tmodule.Ptr(),\n\t)\n\tif result != nil {\n\t\treturn nil, errLoadModule\n\t}\n\tif r.cfg.EnableSpecTest {\n\t\tC.m3_LinkSpecTest(r.Ptr().modules)\n\t}\n\tif r.cfg.EnableWASI {\n\t\tC.m3_LinkWASI(r.Ptr().modules)\n\t}\n\treturn module, nil\n}", "title": "" }, { "docid": "5f825778884b7a6b323b8fe06c2ecb09", "score": "0.4273431", "text": "func Load(path string) (err error) {\n\tdefer func() {\n\t\tif v := recover(); v != nil {\n\t\t\trerr, ok := v.(error)\n\t\t\tif !ok {\n\t\t\t\trerr = fmt.Errorf(\"%s\", v)\n\t\t\t}\n\t\t\terr = rerr\n\t\t}\n\t}()\n\treturn loadPlugins(path)\n}", "title": "" }, { "docid": "ab3918ed34aa39b61bf541fdbdef09ac", "score": "0.42726803", "text": "func Load(config interface{}) error {\n\treturn LoadNamedConfig(\"onos\", config)\n}", "title": "" }, { "docid": "5a40f78a1cf279d3969bc7eec9efa73b", "score": "0.42693815", "text": "func Register() {\n\trequire.Register()\n\ttransform.AddFileTransformer(\".js\", NodeTransform)\n}", "title": "" }, { "docid": "37fcec7ccbe1f67188f040815639f4c2", "score": "0.42677933", "text": "func loadNode(c context.Context, db gorp.SqlExecutor, store cache.Store, proj *sdk.Project, w *sdk.Workflow, id int64, u *sdk.User, opts LoadOptions) (*sdk.WorkflowNode, error) {\n\tc, end := observability.Span(c, \"workflow.loadNode\",\n\t\tobservability.Tag(observability.TagWorkflow, w.Name),\n\t\tobservability.Tag(observability.TagProjectKey, proj.Key),\n\t\tobservability.Tag(\"with_pipeline\", opts.DeepPipeline),\n\t\tobservability.Tag(\"only_root\", opts.OnlyRootNode),\n\t\tobservability.Tag(\"with_base64_keys\", opts.Base64Keys),\n\t\tobservability.Tag(\"without_node\", opts.WithoutNode),\n\t)\n\tdefer end()\n\n\tdbwn := Node{}\n\tif err := db.SelectOne(&dbwn, \"select * from workflow_node where workflow_id = $1 and id = $2\", w.ID, id); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, sdk.ErrWorkflowNodeNotFound\n\t\t}\n\t\treturn nil, err\n\t}\n\n\twn := sdk.WorkflowNode(dbwn)\n\twn.WorkflowID = w.ID\n\twn.Ref = fmt.Sprintf(\"%d\", dbwn.ID)\n\n\tif !opts.OnlyRootNode {\n\t\t//Load triggers\n\t\ttriggers, errTrig := loadTriggers(c, db, store, proj, w, &wn, u, opts)\n\t\tif errTrig != nil {\n\t\t\treturn nil, sdk.WrapError(errTrig, \"LoadNode> Unable to load triggers of %d\", id)\n\t\t}\n\t\twn.Triggers = triggers\n\n\t\t// Load outgoing hooks\n\t\tohooks, errHooks := loadOutgoingHooks(c, db, store, proj, w, &wn, u, opts)\n\t\tif errHooks != nil {\n\t\t\treturn nil, sdk.WrapError(errHooks, \"LoadNode> Unable to load outgoing hooks of %d\", id)\n\t\t}\n\t\twn.OutgoingHooks = ohooks\n\n\t\t// load forks\n\t\tforks, errForks := loadForks(c, db, store, proj, w, &wn, u, opts)\n\t\tif errForks != nil {\n\t\t\treturn nil, sdk.WrapError(errForks, \"LoadNode> Unable to load forks of %d\", id)\n\t\t}\n\t\twn.Forks = forks\n\t}\n\n\t//Load context\n\tctx, errCtx := LoadNodeContext(db, store, proj, wn.ID, u, opts)\n\tif errCtx != nil {\n\t\treturn nil, sdk.WrapError(errCtx, \"LoadNode> Unable to load context of %d\", id)\n\t}\n\twn.Context = ctx\n\n\t// Add application in maps\n\tif w.Applications == nil {\n\t\tw.Applications = map[int64]sdk.Application{}\n\t}\n\tif ctx.Application != nil {\n\t\tw.Applications[ctx.Application.ID] = *ctx.Application\n\t}\n\n\t// Add environment in maps\n\tif w.Environments == nil {\n\t\tw.Environments = map[int64]sdk.Environment{}\n\t}\n\tif ctx.Environment != nil {\n\t\tw.Environments[ctx.Environment.ID] = *ctx.Environment\n\t}\n\n\t//Load hooks\n\thooks, errHooks := loadHooks(db, w, &wn)\n\tif errHooks != nil {\n\t\treturn nil, sdk.WrapError(errHooks, \"LoadNode> Unable to load hooks of %d\", id)\n\t}\n\twn.Hooks = hooks\n\n\t//Load pipeline\n\tif w.Pipelines == nil {\n\t\tw.Pipelines = map[int64]sdk.Pipeline{}\n\t}\n\tpip, has := w.Pipelines[wn.PipelineID]\n\tif !has {\n\t\tnewPip, err := pipeline.LoadPipelineByID(c, db, wn.PipelineID, opts.DeepPipeline)\n\t\tif err != nil {\n\t\t\treturn nil, sdk.WrapError(err, \"Unable to load pipeline of %d\", id)\n\t\t}\n\n\t\tw.Pipelines[wn.PipelineID] = *newPip\n\t\tpip = *newPip\n\t}\n\twn.PipelineName = pip.Name\n\n\tif wn.Name == \"\" {\n\t\twn.Name = pip.Name\n\t}\n\n\treturn &wn, nil\n}", "title": "" }, { "docid": "ce93b1dbbdddc8654c2164190251eb5d", "score": "0.42550284", "text": "func (sys *system) discoverPackages() error {\n\tif sys.packages != nil {\n\t\treturn nil\n\t}\n\n\tsys.packages = make(map[idset.ID]*cpuPackage)\n\n\tfor _, cpu := range sys.cpus {\n\t\tpkg, found := sys.packages[cpu.pkg]\n\t\tif !found {\n\t\t\tpkg = &cpuPackage{\n\t\t\t\tid: cpu.pkg,\n\t\t\t\tcpus: idset.NewIDSet(),\n\t\t\t\tnodes: idset.NewIDSet(),\n\t\t\t\tdies: idset.NewIDSet(),\n\t\t\t\tdieCPUs: make(map[idset.ID]idset.IDSet),\n\t\t\t\tdieNodes: make(map[idset.ID]idset.IDSet),\n\t\t\t}\n\t\t\tsys.packages[cpu.pkg] = pkg\n\t\t}\n\t\tpkg.cpus.Add(cpu.id)\n\t\tpkg.nodes.Add(cpu.node)\n\t\tpkg.dies.Add(cpu.die)\n\n\t\tif dieCPUs, ok := pkg.dieCPUs[cpu.die]; !ok {\n\t\t\tpkg.dieCPUs[cpu.die] = idset.NewIDSet(cpu.id)\n\t\t} else {\n\t\t\tdieCPUs.Add(cpu.id)\n\t\t}\n\t\tif dieNodes, ok := pkg.dieNodes[cpu.die]; !ok {\n\t\t\tpkg.dieNodes[cpu.die] = idset.NewIDSet(cpu.node)\n\t\t} else {\n\t\t\tdieNodes.Add(cpu.node)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "cf1526b36b793860c29da54c081404c3", "score": "0.42526534", "text": "func (m *Engine) LoadModule(source string) (otto.Value, error) {\n\t// Wraps the source to create a module environment\n\tsource = \"(function(module) {var require = module.require;var exports = module.exports;var __dirname = module.__dirname;\\n\" + source + \"\\n})\"\n\n\t// Provide the \"require\" method in the module scope.\n\tjsRequire := func(call otto.FunctionCall) otto.Value {\n\t\tjsModuleName := call.Argument(0).String()\n\n\t\tmoduleValue, err := m.Require(jsModuleName)\n\t\tif err != nil {\n\t\t\tjsException(m, \"failed to load required module \"+jsModuleName+\": \"+err.Error())\n\t\t\treturn otto.UndefinedValue()\n\t\t}\n\t\treturn moduleValue\n\t}\n\n\tjsModule, _ := m.Object(`({exports: {}})`)\n\tjsModule.Set(\"require\", jsRequire)\n\tjsModule.Set(\"__dirname\", \"\")\n\tjsExports, _ := jsModule.Get(\"exports\")\n\n\t// Run the module source, with \"jsModule\" as the \"module\" variable, \"jsExports\" as \"this\"(Nodejs capable).\n\tmoduleReturn, err := m.Call(source, jsExports, jsModule)\n\tif err != nil {\n\t\treturn otto.UndefinedValue(), err\n\t}\n\n\tif !moduleReturn.IsUndefined() {\n\t\tjsModule.Set(\"exports\", moduleReturn)\n\t\treturn moduleReturn, nil\n\t}\n\treturn jsModule.Get(\"exports\")\n}", "title": "" }, { "docid": "2132c8eeca78403502ac8537da52827c", "score": "0.42483953", "text": "func (ctx Context) Load(name string) (interface{}, error) {\n\tif app, ok := ctx.required[name]; ok {\n\t\tif instancer, ok := app.instance.(Instancer); ok {\n\t\t\treturn instancer.Instance(), nil\n\t\t}\n\t\treturn app.instance, nil\n\t}\n\treturn nil, fmt.Errorf(\"app %q is not required\", name)\n}", "title": "" }, { "docid": "805c026aa3c5bc5838c672aede57628f", "score": "0.4244351", "text": "func (c NodeNpm) Install(path string) {\n\tif composerFile := HasPackage(path); composerFile != \"\" {\n\t\tif err := NpmInstall(composerFile, c.ContainerOpts); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "f638cec5807b66e67190cbac5233a094", "score": "0.42438677", "text": "func PublishPackage(w http.ResponseWriter, req *http.Request, next http.HandlerFunc) {\n\tvar err error\n\tstorage := StorageFromContext(req.Context())\n\trenderer := RendererFromContext(req.Context())\n\n\tpkgInfo, err := gabs.ParseJSONBuffer(req.Body)\n\tif err != nil {\n\t\trenderer.JSON(w, http.StatusBadRequest, map[string]string{\"error\": \"bad request\"})\n\t\treturn\n\t}\n\tdefer req.Body.Close()\n\n\tpkgName, _ := pkgInfo.Path(\"name\").Data().(string)\n\n\t// Check if we have 1 dist-tag\n\tdistTags, _ := pkgInfo.Path(\"dist-tags\").ChildrenMap()\n\tif len(distTags) != 1 {\n\t\trenderer.JSON(w, http.StatusBadRequest, map[string]string{\"error\": \"must have 1 dist-tag\"})\n\t\treturn\n\t}\n\n\t// Check if version of this package already exists\n\tvar tag string\n\tfor k, _ := range distTags {\n\t\ttag = k\n\t\tbreak\n\t}\n\tnewVersion := distTags[tag].Data().(string)\n\tnewVersions, _ := pkgInfo.Path(\"versions\").ChildrenMap()\n\n\tnewVersions = packages.RewriteScopedTarballs(pkgName, newVersions)\n\n\tlogger := log.WithFields(log.Fields{\n\t\t\"package\": pkgName,\n\t\t\"version\": newVersion,\n\t})\n\tlogger.Info(\"Processing package\")\n\n\t// 1. grab existing versions if package already exists\n\tmetaFile := new(bytes.Buffer)\n\terr = storage.RetrieveMetadata(pkgName, metaFile)\n\tif err == nil { // package is already known\n\t\tparsedMeta, err := gabs.ParseJSONBuffer(metaFile)\n\t\tif err != nil {\n\t\t\tlogger.Error(err)\n\t\t\treturn\n\t\t}\n\t\texistingVersions, _ := parsedMeta.Path(\"versions\").ChildrenMap()\n\n\t\t// 2. check if version already exists\n\t\tif _, exists := existingVersions[newVersion]; exists {\n\t\t\trenderer.JSON(w, http.StatusBadRequest, map[string]string{\n\t\t\t\t\"error\": \"version already exists\",\n\t\t\t})\n\t\t\treturn\n\t\t}\n\n\t\t// 3. add dist-tag version to existing versions\n\t\tfor v, obj := range existingVersions {\n\t\t\tpkgInfo.Set(obj.Data(), \"versions\", v)\n\t\t}\n\t\tpkgInfo.Set(newVersions[newVersion].Data(), \"versions\", newVersion)\n\t}\n\n\tattachments, _ := pkgInfo.Path(\"_attachments\").ChildrenMap()\n\tfor filename, attachment := range attachments {\n\t\tlogger.WithFields(log.Fields{\"attachment\": filename}).Info(\"Decoding...\")\n\t\tdata := attachment.Path(\"data\").Data().(string)\n\t\tbuff, err := base64.StdEncoding.DecodeString(data)\n\t\tif err != nil {\n\t\t\tlogger.Error(err)\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\tstorage.StoreTarball(filename, bytes.NewReader(buff))\n\t}\n\n\t// 3. delete _attachments from JSON payload\n\tpkgInfo.Delete(\"_attachments\")\n\n\t// 4. store metadata blob\n\tstorage.StoreMetadata(pkgName, strings.NewReader(pkgInfo.String()))\n}", "title": "" }, { "docid": "d5e1f7eb64ba82a2e81f2c82543f218d", "score": "0.42422786", "text": "func (ld *loader) loadFromExportData(lpkg *Package) (*types.Package, error) {\n\tif lpkg.PkgPath == \"\" {\n\t\tlog.Fatalf(\"internal error: Package %s has no PkgPath\", lpkg)\n\t}\n\n\t// Because gcexportdata.Read has the potential to create or\n\t// modify the types.Package for each node in the transitive\n\t// closure of dependencies of lpkg, all exportdata operations\n\t// must be sequential. (Finer-grained locking would require\n\t// changes to the gcexportdata API.)\n\t//\n\t// The exportMu lock guards the Package.Pkg field and the\n\t// types.Package it points to, for each Package in the graph.\n\t//\n\t// Not all accesses to Package.Pkg need to be protected by exportMu:\n\t// graph ordering ensures that direct dependencies of source\n\t// packages are fully loaded before the importer reads their Pkg field.\n\tld.exportMu.Lock()\n\tdefer ld.exportMu.Unlock()\n\n\tif tpkg := lpkg.Type; tpkg != nil && tpkg.Complete() {\n\t\treturn tpkg, nil // cache hit\n\t}\n\n\tlpkg.IllTyped = true // fail safe\n\n\tif lpkg.export == \"\" {\n\t\t// Errors while building export data will have been printed to stderr.\n\t\treturn nil, fmt.Errorf(\"no export data file\")\n\t}\n\tf, err := os.Open(lpkg.export)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer f.Close()\n\n\t// Read gc export data.\n\t//\n\t// We don't currently support gccgo export data because all\n\t// underlying workspaces use the gc toolchain. (Even build\n\t// systems that support gccgo don't use it for workspace\n\t// queries.)\n\tr, err := gcexportdata.NewReader(f)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"reading %s: %v\", lpkg.export, err)\n\t}\n\n\t// Build the view.\n\t//\n\t// The gcexportdata machinery has no concept of package ID.\n\t// It identifies packages by their PkgPath, which although not\n\t// globally unique is unique within the scope of one invocation\n\t// of the linker, type-checker, or gcexportdata.\n\t//\n\t// So, we must build a PkgPath-keyed view of the global\n\t// (conceptually ID-keyed) cache of packages and pass it to\n\t// gcexportdata, then copy back to the global cache any newly\n\t// created entries in the view map. The view must contain every\n\t// existing package that might possibly be mentioned by the\n\t// current package---its reflexive transitive closure.\n\t//\n\t// (Yes, reflexive: although loadRecursive processes source\n\t// packages in topological order, export data packages are\n\t// processed only lazily within Importer calls. In the graph\n\t// A->B->C, A->C where A is a source package and B and C are\n\t// export data packages, processing of the A->B and A->C import\n\t// edges may occur in either order, depending on the sequence\n\t// of imports within A. If B is processed first, and its export\n\t// data mentions C, an imcomplete package for C will be created\n\t// before processing of C.)\n\t// We could do export data processing in topological order using\n\t// loadRecursive, but there's no parallelism to be gained.\n\t//\n\t// TODO(adonovan): it would be more simpler and more efficient\n\t// if the export data machinery invoked a callback to\n\t// get-or-create a package instead of a map.\n\t//\n\tview := make(map[string]*types.Package) // view seen by gcexportdata\n\tseen := make(map[*Package]bool) // all visited packages\n\tvar copyback []*Package // candidates for copying back to global cache\n\tvar visit func(p *Package)\n\tvisit = func(p *Package) {\n\t\tif !seen[p] {\n\t\t\tseen[p] = true\n\t\t\tif p.Type != nil {\n\t\t\t\tview[p.PkgPath] = p.Type\n\t\t\t} else {\n\t\t\t\tcopyback = append(copyback, p)\n\t\t\t}\n\t\t\tfor _, p := range p.Imports {\n\t\t\t\tvisit(p)\n\t\t\t}\n\t\t}\n\t}\n\tvisit(lpkg)\n\n\t// Parse the export data.\n\t// (May create/modify packages in view.)\n\ttpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"reading %s: %v\", lpkg.export, err)\n\t}\n\n\t// For each newly created types.Package in the view,\n\t// save it in the main graph.\n\tfor _, p := range copyback {\n\t\tp.Type = view[p.PkgPath] // may still be nil\n\t}\n\n\tlpkg.Type = tpkg\n\tlpkg.IllTyped = false\n\n\treturn tpkg, nil\n}", "title": "" }, { "docid": "2848a4884b6e0983660c83d280ee5d2f", "score": "0.423731", "text": "func (li *loaderImporter) Import(path string) (*types.Package, error) {\n\tdp, err := li.locatePackages(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif dp == nil {\n\t\treturn nil, fmt.Errorf(\"Path parsed, but does not contain package %s\", path)\n\t}\n\n\treturn dp.typesPkg, nil\n}", "title": "" }, { "docid": "99436a90f62c729b704dd915d4439170", "score": "0.42366123", "text": "func (t *transpilerState) requireImport(pkgpath string) *ast.Identifier {\n\t// Check to see if the import already exists.\n\tfor _, decl := range t.file.Imports {\n\t\tif decl.Path.Value == pkgpath {\n\t\t\treturn decl.As\n\t\t}\n\t}\n\n\t// Append the import to the file.\n\tas := &ast.Identifier{\n\t\tName: filepath.Base(pkgpath),\n\t}\n\tt.file.Imports = append(t.file.Imports, &ast.ImportDeclaration{\n\t\tPath: &ast.StringLiteral{\n\t\t\tValue: pkgpath,\n\t\t},\n\t\tAs: as,\n\t})\n\treturn as\n}", "title": "" }, { "docid": "94632b6d21835a99e1796a84272db967", "score": "0.42251176", "text": "func loadPrebuiltPackages(manifest string) ([]PrebuiltPackage, error) {\n\tf, err := os.Open(manifest)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to open %s: %w\", manifest, err)\n\t}\n\tdefer f.Close()\n\tvar pkgs []PrebuiltPackage\n\tif err := json.NewDecoder(f).Decode(&pkgs); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to decode %s: %w\", manifest, err)\n\t}\n\treturn pkgs, nil\n}", "title": "" }, { "docid": "4979196493e7d39ab36dc1773a35a86c", "score": "0.42192215", "text": "func (rslv *Resolver) getNPMPackages(path string) map[string]bool {\n\tif rslv.npmPackages != nil {\n\t\treturn rslv.npmPackages\n\t}\n\n\tvar packageJSON struct {\n\t\tDependencies map[string]string `json:\"dependencies\"`\n\t\tDevDependencies map[string]string `json:\"devDependencies\"`\n\t}\n\n\t// Read in and unmarshall package.json file.\n\tb, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\tlog.Panicf(\"Error reading file %q: %v\", path, err)\n\t}\n\tif err := json.Unmarshal(b, &packageJSON); err != nil {\n\t\tlog.Panicf(\"Error parsing %s: %v\", path, err)\n\t}\n\n\t// Extract all NPM packages found in the package.json file.\n\trslv.npmPackages = map[string]bool{}\n\tfor pkg := range packageJSON.Dependencies {\n\t\trslv.npmPackages[pkg] = true\n\t}\n\tfor pkg := range packageJSON.DevDependencies {\n\t\trslv.npmPackages[pkg] = true\n\t}\n\n\treturn rslv.npmPackages\n}", "title": "" }, { "docid": "1432bd4531c2bdb152cf7e1e74c796f6", "score": "0.421898", "text": "func Package() {\n\tstart := time.Now()\n\tdefer func() { fmt.Println(\"package ran for\", time.Since(start)) }()\n\n\tdevtools.UseElasticBeatXPackPackaging()\n\tdevtools.PackageKibanaDashboardsFromBuildDir()\n\tfilebeat.CustomizePackaging()\n\n\tmg.Deps(Update)\n\tmg.Deps(CrossBuild, CrossBuildGoDaemon)\n\tmg.SerialDeps(devtools.Package, TestPackages)\n}", "title": "" }, { "docid": "99363b959a68ccb991d7c23ae6b494a5", "score": "0.4204758", "text": "func Load(path *string) error {\n\tif env := viper.GetString(XLiteMode); env != \"\" {\n\t\tviper.SupportedRemoteProviders = []string{\"etcd\", \"apollo\"}\n\t\tenv = strings.ToUpper(env)\n\t\tif env != XLiteModeLt && env != XLiteModeDev && env != XLiteModeTest && env != XLiteModeProduct {\n\t\t\treturn errors.Format(\"x-lite env should be set %s OR %s OR %s OR %s\", XLiteModeLt, XLiteModeDev, XLiteModeTest, XLiteModeProduct)\n\t\t}\n\t\tif err := LoadRemote(*path); err != nil {\n\t\t\treturn errors.By(err)\n\t\t}\n\n\t\treturn nil\n\t}\n\n\tviper.AddConfigPath(*path)\n\tviper.SetConfigName(\"config\")\n\tviper.SetConfigType(\"yml\")\n\n\terr := viper.ReadInConfig()\n\tif err != nil {\n\t\treturn errors.By(err)\n\t}\n\tLocalViper = viper.GetViper()\n\tviper.WatchConfig()\n\treturn nil\n}", "title": "" } ]
b79d95ad1c27b0ee0aa919fb50add350
GetTransformationsOk returns a tuple with the Transformations field value if set, nil otherwise and a boolean to check if the value has been set.
[ { "docid": "4e245957a2362ee34d2a4211cc62b814", "score": "0.83720285", "text": "func (o *MetricDescriptor) GetTransformationsOk() (*[]string, bool) {\n\tif o == nil || o.Transformations == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Transformations, true\n}", "title": "" } ]
[ { "docid": "28b27621b1afece7562e629b1a2cd83f", "score": "0.6684184", "text": "func (o *MetricDescriptor) HasTransformations() bool {\n\tif o != nil && o.Transformations != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "8ca022f1801ccb23a3f63e478b701cf0", "score": "0.59994", "text": "func (o *Ga4ghComponent) GetTreatmentsOk() (Ga4ghSearchTreatmentsRequest, bool) {\n\tif o == nil || o.Treatments == nil {\n\t\tvar ret Ga4ghSearchTreatmentsRequest\n\t\treturn ret, false\n\t}\n\treturn *o.Treatments, true\n}", "title": "" }, { "docid": "aac848cdb55d9d439b1f5df266bc7292", "score": "0.5916629", "text": "func (o *HyperflexInitiatorGroup) GetTargetsOk() ([]HyperflexTargetRelationship, bool) {\n\tif o == nil || o.Targets == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Targets, true\n}", "title": "" }, { "docid": "4d6cc5e46e77feeacd7de383bc261a45", "score": "0.5863593", "text": "func (o *FlowDTO) GetProcessorsOk() (*[]ProcessorEntity, bool) {\n\tif o == nil || o.Processors == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Processors, true\n}", "title": "" }, { "docid": "a9ffe0a2674436a18ffe9b7410d24064", "score": "0.5796879", "text": "func (o *MetricDescriptor) GetTransformations() []string {\n\tif o == nil || o.Transformations == nil {\n\t\tvar ret []string\n\t\treturn ret\n\t}\n\treturn *o.Transformations\n}", "title": "" }, { "docid": "364f7eb903868a36fdf181699ad24c27", "score": "0.57754296", "text": "func (o *Ga4ghSearchQueryResponse) GetTreatmentsOk() ([]Ga4ghTreatment, bool) {\n\tif o == nil || o.Treatments == nil {\n\t\tvar ret []Ga4ghTreatment\n\t\treturn ret, false\n\t}\n\treturn *o.Treatments, true\n}", "title": "" }, { "docid": "da244ef3f37b5f7b61fe81b307f90fbd", "score": "0.576619", "text": "func (o *V1GeneDescriptor) GetTranscriptsOk() (*[]V1Transcript, bool) {\n\tif o == nil || o.Transcripts == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Transcripts, true\n}", "title": "" }, { "docid": "f9dee173f4584adbc9ba59bff5e3c178", "score": "0.5667412", "text": "func (o *SyntheticsAPIStep) GetExtractedValuesOk() (*[]SyntheticsParsingOptions, bool) {\n\tif o == nil || o.ExtractedValues == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.ExtractedValues, true\n}", "title": "" }, { "docid": "482e8a6cab5ba2256e85c0356157a370", "score": "0.56486535", "text": "func (o *WorkflowExecutionResult) GetFlagsOk() (*[]string, bool) {\n\tif o == nil || o.Flags == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Flags, true\n}", "title": "" }, { "docid": "3f7e665bcb7f574692cdc970b32fbb27", "score": "0.55985975", "text": "func (o *ClientProvidedEnrichedTransaction) GetEnrichmentsOk() (*Enrichments, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Enrichments, true\n}", "title": "" }, { "docid": "1c8bcadec76c2cb4905891544fafc9d8", "score": "0.5551828", "text": "func (v *SimpleService_TransHeadersType_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "1c8bcadec76c2cb4905891544fafc9d8", "score": "0.5551828", "text": "func (v *SimpleService_TransHeadersType_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "154e43dd7c9f01355e6449e1d458974b", "score": "0.5541093", "text": "func (o *InlineObject930) GetValuesOk() (AnyOfobject, bool) {\n\tif o == nil || o.Values == nil {\n\t\tvar ret AnyOfobject\n\t\treturn ret, false\n\t}\n\treturn *o.Values, true\n}", "title": "" }, { "docid": "00c4f3ebbd364b8b257a099aa8a24225", "score": "0.55283356", "text": "func (o *InstitutionStatus) GetTransactionsUpdatesOk() (*ProductStatus, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn o.TransactionsUpdates.Get(), o.TransactionsUpdates.IsSet()\n}", "title": "" }, { "docid": "8ff49015ef1724573205ac38b55f8762", "score": "0.55166185", "text": "func (o *SyntheticUsageDto) GetPerformedSyntheticActionsOk() (*int32, bool) {\n\tif o == nil || o.PerformedSyntheticActions == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PerformedSyntheticActions, true\n}", "title": "" }, { "docid": "9b384a840f3a9f5a6c26be016c24fc79", "score": "0.54840547", "text": "func (o *SoftwarerepositoryCachedImageAllOf) GetRegisteredWorkflowsOk() ([]string, bool) {\n\tif o == nil || o.RegisteredWorkflows == nil {\n\t\treturn nil, false\n\t}\n\treturn o.RegisteredWorkflows, true\n}", "title": "" }, { "docid": "07c54517025efb88519b6b8e15fa2daf", "score": "0.54669094", "text": "func (o MappingDataFlowResponsePtrOutput) Transformations() TransformationResponseArrayOutput {\n\treturn o.ApplyT(func(v *MappingDataFlowResponse) []TransformationResponse {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Transformations\n\t}).(TransformationResponseArrayOutput)\n}", "title": "" }, { "docid": "1e5b757e80dd6c9671be8971c9562b10", "score": "0.5457526", "text": "func (v *SimpleService_Trans_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "1e5b757e80dd6c9671be8971c9562b10", "score": "0.5457526", "text": "func (v *SimpleService_Trans_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "41a218944fa65320f34ef04ea1db64f6", "score": "0.543386", "text": "func (o *WorkflowProperties) GetOutputDefinitionOk() ([]WorkflowBaseDataType, bool) {\n\tif o == nil || o.OutputDefinition == nil {\n\t\treturn nil, false\n\t}\n\treturn o.OutputDefinition, true\n}", "title": "" }, { "docid": "4ad35d818046aa6f2afd7c5762e49570", "score": "0.5423735", "text": "func (o *SyntheticsTriggerTest) GetMetadataOk() (*SyntheticsCIBatchMetadata, bool) {\n\tif o == nil || o.Metadata == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Metadata, true\n}", "title": "" }, { "docid": "d4b8857b939ce3fcb67e4b002773d7e0", "score": "0.5419966", "text": "func (o GooglePrivacyDlpV2InfoTypeTransformationsResponseOutput) Transformations() GooglePrivacyDlpV2InfoTypeTransformationResponseArrayOutput {\n\treturn o.ApplyT(func(v GooglePrivacyDlpV2InfoTypeTransformationsResponse) []GooglePrivacyDlpV2InfoTypeTransformationResponse {\n\t\treturn v.Transformations\n\t}).(GooglePrivacyDlpV2InfoTypeTransformationResponseArrayOutput)\n}", "title": "" }, { "docid": "432786a93796445f28c6003e34d8f7e6", "score": "0.54165894", "text": "func (o MappingDataFlowOutput) Transformations() TransformationArrayOutput {\n\treturn o.ApplyT(func(v MappingDataFlow) []Transformation { return v.Transformations }).(TransformationArrayOutput)\n}", "title": "" }, { "docid": "3784f583be21e33a9e864e93fbee117a", "score": "0.5393846", "text": "func (v *SecondService_EchoStructMap_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "d2114ba505eec6b0eb9d289ca726d87b", "score": "0.5392696", "text": "func (v *SimpleService_TransHeaders_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "d2114ba505eec6b0eb9d289ca726d87b", "score": "0.5392696", "text": "func (v *SimpleService_TransHeaders_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "2e3cd2dad5beee63954a08087f55fd5c", "score": "0.5380851", "text": "func (o MappingDataFlowPtrOutput) Transformations() TransformationArrayOutput {\n\treturn o.ApplyT(func(v *MappingDataFlow) []Transformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Transformations\n\t}).(TransformationArrayOutput)\n}", "title": "" }, { "docid": "12dad01efc2b40ac78ca8c6d69de272d", "score": "0.53802854", "text": "func (o MappingDataFlowResponseOutput) Transformations() TransformationResponseArrayOutput {\n\treturn o.ApplyT(func(v MappingDataFlowResponse) []TransformationResponse { return v.Transformations }).(TransformationResponseArrayOutput)\n}", "title": "" }, { "docid": "064225769099541dc8c8ebdd8f467f7c", "score": "0.5366714", "text": "func (v *SimpleService_TransHeadersNoReq_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "064225769099541dc8c8ebdd8f467f7c", "score": "0.5366714", "text": "func (v *SimpleService_TransHeadersNoReq_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "3b1c71324a619445e203a0bc46a0cf20", "score": "0.5365874", "text": "func (o *CustconfAuthGeo) GetValuesOk() (*string, bool) {\n\tif o == nil || o.Values == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Values, true\n}", "title": "" }, { "docid": "ba05ba4330162ff0bf5c0e98779afde0", "score": "0.53613776", "text": "func (o Options) transform() bool {\n\treturn o.Width != 0 || o.Height != 0 || o.Rotate != 0 || o.FlipHorizontal || o.FlipVertical || o.Quality != 0 || o.Format != \"\" || o.CropX != 0 || o.CropY != 0 || o.CropWidth != 0 || o.CropHeight != 0\n}", "title": "" }, { "docid": "e5fdf6d9e41b44703be0be4894d8db4f", "score": "0.5355365", "text": "func (o *SyntheticsTriggerBody) GetTestsOk() (*[]SyntheticsTriggerTest, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Tests, true\n}", "title": "" }, { "docid": "fae298dc4f19a48a879de65c9000d58b", "score": "0.5336932", "text": "func (o *InlineObject969) GetValuesOk() (AnyOfobject, bool) {\n\tif o == nil || o.Values == nil {\n\t\tvar ret AnyOfobject\n\t\treturn ret, false\n\t}\n\treturn *o.Values, true\n}", "title": "" }, { "docid": "cd2804caa932c0c5dac3230fc05346c4", "score": "0.5323541", "text": "func (v *SecondService_EchoStructSet_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "d884a74c8e1628aae4eebc7bdd540c58", "score": "0.5320944", "text": "func (o *BulkMoCloner) GetTargetsOk() ([]MoBaseMo, bool) {\n\tif o == nil || o.Targets == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Targets, true\n}", "title": "" }, { "docid": "4f65fb87b3d6b6cf0a25ca486e11870d", "score": "0.5293775", "text": "func (o *ProcessorsEntity) GetProcessorsOk() (*[]ProcessorEntity, bool) {\n\tif o == nil || o.Processors == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Processors, true\n}", "title": "" }, { "docid": "269c56ae5ca71f07d96959c322741806", "score": "0.5271035", "text": "func (o *GetEntitiesEntityFidPropertiesValuesPropertyNameOK) IsSuccess() bool {\n\treturn true\n}", "title": "" }, { "docid": "6aa2dca1286ceaa8d3e2e9fc8974c61f", "score": "0.5259492", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "7d7f2fe2609aedbe8b343d428b1d70ad", "score": "0.5255816", "text": "func (o *UpdateSettingsFlowWithOidcMethod) GetTraitsOk() (map[string]interface{}, bool) {\n\tif o == nil || o.Traits == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Traits, true\n}", "title": "" }, { "docid": "df5a046b689f06afa21db82286d5c1f0", "score": "0.5252184", "text": "func (o *WorkflowDecisionTaskAllOf) GetInputParametersOk() (*interface{}, bool) {\n\tif o == nil || o.InputParameters == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.InputParameters, true\n}", "title": "" }, { "docid": "c7bbdf42ce6282955441d5994df601a5", "score": "0.5239845", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "3d69968dc95dfb1282b588dc81658177", "score": "0.52389044", "text": "func (o *SyntheticsAPIStep) GetAssertionsOk() (*[]SyntheticsAssertion, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Assertions, true\n}", "title": "" }, { "docid": "64e9edfd519f16a0f78c03a84e795705", "score": "0.52349395", "text": "func (o *WorkbookWorksheet) GetPivotTablesOk() ([]MicrosoftGraphWorkbookPivotTable, bool) {\n\tif o == nil || o.PivotTables == nil {\n\t\tvar ret []MicrosoftGraphWorkbookPivotTable\n\t\treturn ret, false\n\t}\n\treturn *o.PivotTables, true\n}", "title": "" }, { "docid": "69667b36efbd5b9de96bebf0c44a8240", "score": "0.5230795", "text": "func (o *BulkMoDeepCloner) GetTargetsOk() ([]MoBaseMo, bool) {\n\tif o == nil || o.Targets == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Targets, true\n}", "title": "" }, { "docid": "c0cecc2fef13985ce7c20d50fea13dad", "score": "0.5226553", "text": "func (o *RelKey) GetValuesOk() (*[]string, bool) {\n\tif o == nil || o.Values == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Values, true\n}", "title": "" }, { "docid": "ab3d931dcd227bd1b0a382ad4fa272e5", "score": "0.52240765", "text": "func (t *TriggeringValue) GetToTsOk() (int, bool) {\n\tif t == nil || t.ToTs == nil {\n\t\treturn 0, false\n\t}\n\treturn *t.ToTs, true\n}", "title": "" }, { "docid": "4fd58ffaed9a36c7feca08f6ba516394", "score": "0.5221035", "text": "func (o *TemplateSummaryVariableArgs) GetValuesOk() (*interface{}, bool) {\n\tif o == nil || o.Values == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Values, true\n}", "title": "" }, { "docid": "ed799ee97304d672a4842e71dc9c8430", "score": "0.5213349", "text": "func (o RuleGroupRuleStatementOrStatementStatementXssMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementXssMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementXssMatchStatement) []RuleGroupRuleStatementOrStatementStatementXssMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementXssMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "03d1377474cbb623de085c46481b224b", "score": "0.52094406", "text": "func (o RuleGroupRuleStatementOrStatementStatementRegexMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementRegexMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementRegexMatchStatement) []RuleGroupRuleStatementOrStatementStatementRegexMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementRegexMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "a6fb16bcb03fd2f0ab842aee6607a733", "score": "0.5208098", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "d8ea89d9c5252aa65bc2f789f89ee17d", "score": "0.5207666", "text": "func (o *Alert) GetTriggersOk() ([]AnyOfmicrosoftGraphAlertTrigger, bool) {\n\tif o == nil || o.Triggers == nil {\n\t\tvar ret []AnyOfmicrosoftGraphAlertTrigger\n\t\treturn ret, false\n\t}\n\treturn *o.Triggers, true\n}", "title": "" }, { "docid": "f0a5b2e3be7d6cb5e11a75e5879eb897", "score": "0.52041274", "text": "func (o *InlineObject27) GetTypesOk() ([]string, bool) {\n\tif o == nil || o.Types == nil {\n\t\tvar ret []string\n\t\treturn ret, false\n\t}\n\treturn *o.Types, true\n}", "title": "" }, { "docid": "8649d0941155ebad84deec7dd25c7f68", "score": "0.5204014", "text": "func (o *MicrosoftGraphTeamMessagingSettings) GetAllowTeamMentionsOk() (bool, bool) {\n\tif o == nil || o.AllowTeamMentions == nil {\n\t\tvar ret bool\n\t\treturn ret, false\n\t}\n\treturn *o.AllowTeamMentions, true\n}", "title": "" }, { "docid": "7204243ad62528eab04d31af39ced270", "score": "0.5196998", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "9432dec5de8b16d5c95a2c199d9c3a7f", "score": "0.51915455", "text": "func (o *TemplateSummaryDiffBucketFields) GetMeasurementSchemasOk() (*[]map[string]interface{}, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.MeasurementSchemas, true\n}", "title": "" }, { "docid": "fb11284d41cfecadb09216f7ca345869", "score": "0.51859164", "text": "func NewGetInstancesTransformationsOK() *GetInstancesTransformationsOK {\n\treturn &GetInstancesTransformationsOK{}\n}", "title": "" }, { "docid": "79e701f3016cdbda76f4890c7645a423", "score": "0.51846147", "text": "func (o *ConsensusWorkflowMetrics) GetFlagTransactionFinalizedOk() (*bool, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.FlagTransactionFinalized, true\n}", "title": "" }, { "docid": "d68bb720c06473de03f9006dd836df04", "score": "0.51782256", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementXssMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "1d9257597a76aecd0f95d67d7ab5884f", "score": "0.5177801", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "eb96f604da79bebf279151eacde297de", "score": "0.517082", "text": "func (o RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatement) []RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "5b57fa53c6139143049024d191734224", "score": "0.51695544", "text": "func (o *DetectedServicesPayload) GetAnnotationsOk() (*map[string]string, bool) {\n\tif o == nil || o.Annotations == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Annotations, true\n}", "title": "" }, { "docid": "f4fe2671f2a82857d54e0e51eb46642a", "score": "0.51646566", "text": "func (o *ProcessGroupMetadata) GetTibcoBusinessWorksEnginePropertyFilesOk() (*[]string, bool) {\n\tif o == nil || o.TibcoBusinessWorksEnginePropertyFiles == nil {\n\t\treturn nil, false\n\t}\n\treturn o.TibcoBusinessWorksEnginePropertyFiles, true\n}", "title": "" }, { "docid": "8b5e601b78087b077e6daaa11d68b7fe", "score": "0.5163353", "text": "func (o RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatement) []RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "5fccc48035357de5dd95fca91f955307", "score": "0.51622343", "text": "func (o RuleGroupRuleStatementOrStatementStatementXssMatchStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementXssMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementXssMatchStatement) []RuleGroupRuleStatementOrStatementStatementXssMatchStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementXssMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "997f5ff91cc0d46debdb197487d883ba", "score": "0.5162096", "text": "func (o GooglePrivacyDlpV2InfoTypeTransformationsOutput) Transformations() GooglePrivacyDlpV2InfoTypeTransformationArrayOutput {\n\treturn o.ApplyT(func(v GooglePrivacyDlpV2InfoTypeTransformations) []GooglePrivacyDlpV2InfoTypeTransformation {\n\t\treturn v.Transformations\n\t}).(GooglePrivacyDlpV2InfoTypeTransformationArrayOutput)\n}", "title": "" }, { "docid": "4ea81191c2c71bcf340f6af9d1afcbe4", "score": "0.51616174", "text": "func (o *HyperflexInitiatorGroup) GetTargetUuidsOk() ([]string, bool) {\n\tif o == nil || o.TargetUuids == nil {\n\t\treturn nil, false\n\t}\n\treturn o.TargetUuids, true\n}", "title": "" }, { "docid": "7392404c47918873efdc038019d4a9f1", "score": "0.5160956", "text": "func (o *RemoteProcessGroupDTO) GetTargetUrisOk() (*string, bool) {\n\tif o == nil || o.TargetUris == nil {\n\t\treturn nil, false\n\t}\n\treturn o.TargetUris, true\n}", "title": "" }, { "docid": "1752ffa776c816c46a7ebb479f54b3a1", "score": "0.51605666", "text": "func (v *WorkflowResult) IsSetSucceeded() bool {\n\treturn v != nil && v.Succeeded != nil\n}", "title": "" }, { "docid": "9251646be836c5828bc7c4ad644cc25c", "score": "0.5158367", "text": "func (o RuleGroupRuleStatementOrStatementStatementByteMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementByteMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementByteMatchStatement) []RuleGroupRuleStatementOrStatementStatementByteMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementByteMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "04567dea37fc7bca203764148d9b5cad", "score": "0.51579386", "text": "func (v *SimpleService_HeaderSchema_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "04567dea37fc7bca203764148d9b5cad", "score": "0.51579386", "text": "func (v *SimpleService_HeaderSchema_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "bafbc33b490697bd3be14aadd8e57942", "score": "0.51578474", "text": "func (o GooglePrivacyDlpV2DeidentifyConfigResponseOutput) ImageTransformations() GooglePrivacyDlpV2ImageTransformationsResponseOutput {\n\treturn o.ApplyT(func(v GooglePrivacyDlpV2DeidentifyConfigResponse) GooglePrivacyDlpV2ImageTransformationsResponse {\n\t\treturn v.ImageTransformations\n\t}).(GooglePrivacyDlpV2ImageTransformationsResponseOutput)\n}", "title": "" }, { "docid": "920280b17a8867c4d948534e81a150df", "score": "0.5153385", "text": "func (o *IaasServiceRequestAllOf) GetWorkflowStepsOk() ([]IaasWorkflowSteps, bool) {\n\tif o == nil || o.WorkflowSteps == nil {\n\t\treturn nil, false\n\t}\n\treturn o.WorkflowSteps, true\n}", "title": "" }, { "docid": "b171d977bdbdcef71823049432d19ada", "score": "0.51517385", "text": "func (o RuleGroupRuleStatementOrStatementStatementAndStatementStatementRegexMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementAndStatementStatementRegexMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementAndStatementStatementRegexMatchStatement) []RuleGroupRuleStatementOrStatementStatementAndStatementStatementRegexMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementAndStatementStatementRegexMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "7dfff6d2b10d4b20fb8f760bc80abf3c", "score": "0.51494855", "text": "func (o RuleGroupRuleStatementOrStatementStatementAndStatementStatementXssMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementAndStatementStatementXssMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementAndStatementStatementXssMatchStatement) []RuleGroupRuleStatementOrStatementStatementAndStatementStatementXssMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementAndStatementStatementXssMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "fa75edad684bc71ff2a70759ad847e36", "score": "0.514769", "text": "func (o RuleGroupRuleStatementOrStatementStatementSqliMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementSqliMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementSqliMatchStatement) []RuleGroupRuleStatementOrStatementStatementSqliMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementSqliMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "c763dba57cba6d5de12545e4cdddc753", "score": "0.5145821", "text": "func (o *DetectedServicesPayload) GetTagsOk() (*map[string]string, bool) {\n\tif o == nil || o.Tags == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Tags, true\n}", "title": "" }, { "docid": "41795a888c8bde64a30f33fc901dcda6", "score": "0.51420385", "text": "func (o *WorkflowTargetPropertyAllOf) GetSupportedObjectsOk() ([]string, bool) {\n\tif o == nil || o.SupportedObjects == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SupportedObjects, true\n}", "title": "" }, { "docid": "77726afd4ec69b840b885c0da12e4998", "score": "0.51396316", "text": "func (o *GroupScalarColumn) GetValuesOk() (*[][]string, bool) {\n\tif o == nil || o.Values == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Values, true\n}", "title": "" }, { "docid": "7eabbaa0630008a22c05f9b7c3a2e6a7", "score": "0.513476", "text": "func (o RuleGroupRuleStatementOrStatementStatementRegexMatchStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementRegexMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementRegexMatchStatement) []RuleGroupRuleStatementOrStatementStatementRegexMatchStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementRegexMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "9631f508df4c38f40f3347487b7cedde", "score": "0.51338667", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "b0393e19600643b7674e130555fb67c0", "score": "0.5128757", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementRegexPatternSetReferenceStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "aab8cbf64656039017364b533e5252bd", "score": "0.5123984", "text": "func (o *VersionMetaData) GetPropertiesOk() (*map[string]string, bool) {\n\tif o == nil || o.Properties == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Properties, true\n}", "title": "" }, { "docid": "516b9535f581d7462fe814a734433a34", "score": "0.512341", "text": "func (v *SecondService_EchoStructList_Result) IsSetSuccess() bool {\n\treturn v != nil && v.Success != nil\n}", "title": "" }, { "docid": "0232eb1ecbff6104b735907827a5f96e", "score": "0.51184726", "text": "func (o RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatement) []RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementOrStatementStatementXssMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "8832fd2484740d6ce04152a093381453", "score": "0.5117578", "text": "func (o *ViewBoardColumnSettings) GetTicketsOk() (*bool, bool) {\n\tif o == nil || o.Tickets == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Tickets, true\n}", "title": "" }, { "docid": "1465967cb09c874ab01e2ad2ca2a3659", "score": "0.511642", "text": "func (o *RecurrenceTransaction) GetTagsOk() (*[]string, bool) {\n\tif o == nil || o.Tags == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Tags, true\n}", "title": "" }, { "docid": "90f1211d56eb07256df81f6c2f9df4c9", "score": "0.51154995", "text": "func (o *WorkflowRollbackWorkflow) GetSelectedTasksOk() ([]WorkflowRollbackWorkflowTask, bool) {\n\tif o == nil || o.SelectedTasks == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SelectedTasks, true\n}", "title": "" }, { "docid": "ca2e3033f530f1273b88c14a1f4c1ccf", "score": "0.5111654", "text": "func (o GooglePrivacyDlpV2InfoTypeTransformationsPtrOutput) Transformations() GooglePrivacyDlpV2InfoTypeTransformationArrayOutput {\n\treturn o.ApplyT(func(v *GooglePrivacyDlpV2InfoTypeTransformations) []GooglePrivacyDlpV2InfoTypeTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Transformations\n\t}).(GooglePrivacyDlpV2InfoTypeTransformationArrayOutput)\n}", "title": "" }, { "docid": "a59124ffd994479a21b9b350f62da446", "score": "0.5110119", "text": "func (o RuleGroupRuleStatementOrStatementStatementOrStatementStatementSqliMatchStatementPtrOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementOrStatementStatementSqliMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v *RuleGroupRuleStatementOrStatementStatementOrStatementStatementSqliMatchStatement) []RuleGroupRuleStatementOrStatementStatementOrStatementStatementSqliMatchStatementTextTransformation {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementOrStatementStatementSqliMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "9815753fb000810dcb064a1dddc7f79a", "score": "0.51098895", "text": "func (o *ViewBoardColumnSettings) GetRemindersOk() (*bool, bool) {\n\tif o == nil || o.Reminders == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Reminders, true\n}", "title": "" }, { "docid": "042619050920968841014d7bf6151850", "score": "0.5109651", "text": "func (o *GvmVM) GetLatitudeOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Latitude.Get(), o.Latitude.IsSet()\n}", "title": "" }, { "docid": "811ab7b7f57aabc4472ee307cdf64f10", "score": "0.5108682", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementSizeConstraintStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "dbb4ec5e4d9fd955c6862664dbe01f42", "score": "0.51044464", "text": "func (o *Template) GetKeywordsOk() (*string, bool) {\n\tif o == nil || IsNil(o.Keywords) {\n\t\treturn nil, false\n\t}\n\treturn o.Keywords, true\n}", "title": "" }, { "docid": "f082ed5ae2ce7e7840f9686107a42aae", "score": "0.50989586", "text": "func (o *CustomerFiscalEntitiesRequest) GetMetadataOk() (map[string]map[string]interface{}, bool) {\n\tif o == nil || IsNil(o.Metadata) {\n\t\treturn map[string]map[string]interface{}{}, false\n\t}\n\treturn o.Metadata, true\n}", "title": "" }, { "docid": "ca03049c387050545bf50aa5c6a61564", "score": "0.5098565", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementByteMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "23c79c4df3f5c96d903b79a32e12497f", "score": "0.5096898", "text": "func (o *PeoplePersonOfPeople) GetCanViewProjectTemplatesOk() (*bool, bool) {\n\tif o == nil || o.CanViewProjectTemplates == nil {\n\t\treturn nil, false\n\t}\n\treturn o.CanViewProjectTemplates, true\n}", "title": "" }, { "docid": "d540781cb416dc3dcb39056bc45315f1", "score": "0.50968754", "text": "func (o RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatement) []RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementOrStatementStatementRegexMatchStatementTextTransformationArrayOutput)\n}", "title": "" }, { "docid": "00039a1e131239c3185b144cd692d40d", "score": "0.50958765", "text": "func (o RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatementOutput) TextTransformations() RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatementTextTransformationArrayOutput {\n\treturn o.ApplyT(func(v RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatement) []RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatementTextTransformation {\n\t\treturn v.TextTransformations\n\t}).(RuleGroupRuleStatementOrStatementStatementNotStatementStatementSqliMatchStatementTextTransformationArrayOutput)\n}", "title": "" } ]
f17d14c8f9e09763c884b83b389a2df9
NotifyUser is for send message to used about current market price
[ { "docid": "cc5c9b63236254fb775680a142f907d4", "score": "0.8324391", "text": "func NotifyUser(userID int64, market string, price float64) error {\n\n\tmarket = strings.ToUpper(market)\n\tpriceString := strconv.FormatFloat(price, 'f', -1, 64)\n\n\tmessage := fmt.Sprintf(\"%v = %v\", market, priceString)\n\n\treturn sendMessage(userID, message)\n}", "title": "" } ]
[ { "docid": "ac0b1939cd8dc8c260283900a5166178", "score": "0.7179344", "text": "func (t *Trigger) alertUser(currPrice float64) {\n\tcardData, err := t.CardID.Fetch()\n\tif err != nil {\n\t\tlog.Error(err)\n\t\treturn\n\t}\n\n\tvar condition string\n\tswitch t.Condition {\n\tcase GreaterThan:\n\t\tcondition = \"rose\"\n\tcase LessThan:\n\t\tcondition = \"dropped\"\n\t}\n\n\temailBody := fmt.Sprintf(\"The price on %s has %s to your threshold of $%.2f.\\nThe price is now $%.2f\", cardData.Name, condition, t.Threshold, currPrice)\n\n\te := email.NewEmail()\n\te.From = fmt.Sprintf(\"MTGDrop <%s>\", alertEmail)\n\te.To = []string{t.Email}\n\te.Subject = \"MTGDrop: Price Alert\"\n\te.Text = []byte(emailBody)\n\tif err := e.Send(\"smtp.gmail.com:587\", smtp.PlainAuth(\"\", alertEmail, alertPass, \"smtp.gmail.com\")); err != nil {\n\t\tlog.Error(err)\n\t\tlog.Error(\"Failed to send email\")\n\t}\n}", "title": "" }, { "docid": "19be2ca56b204211be9a34a52318c2cc", "score": "0.66619486", "text": "func (u *User) Notify(SBI, HDFC, IDFC int) {\n\tfmt.Println(\"User: \", u.id, \"SBI: \", SBI, \"HDFC: \", HDFC, \"IDFC: \", IDFC)\n}", "title": "" }, { "docid": "7146dfa01054936cd1c8f2348b620eec", "score": "0.6439807", "text": "func (u *user) notify() {\n fmt.Printf(\"Sending user email to %s<%s>\\n\", u.name, u.email)\n}", "title": "" }, { "docid": "9bba5cf28526ee071fb7085a4ccf8060", "score": "0.6389925", "text": "func (u user) notify() {\n\tfmt.Printf(\"Sending User Email To %s<%s>\\n\", u.name, u.email)\n}", "title": "" }, { "docid": "79480ffa3e966100c147dd0a2c8582a1", "score": "0.6373446", "text": "func (gop GoService) SendNotification(user, content string) error {\n\tfmt.Printf(\"Hey %v this is your new information: %v\\n\", user, content)\n\treturn nil\n}", "title": "" }, { "docid": "725338a541907b47f68ee7d084d66cc3", "score": "0.6248292", "text": "func NotifyListAlert(userID int64) error {\n\n\tuser, err := data.GetUser(userID)\n\tif err != nil {\n\t\tlog.Println(\"fail get user\")\n\t\treturn err\n\t}\n\n\tvar message string\n\tif len(user.WatchList) == 0 {\n\t\tmessage = \"you have no market price alert right now\"\n\t} else {\n\t\tmessage = \"Here is your market price alert list:\\n\"\n\t\tfor _, watch := range user.WatchList {\n\t\t\talertText := fmt.Sprintf(\"%v %v %v | id=%v\", strings.ToUpper(watch.Market), watch.When, strconv.FormatFloat(watch.PriceLimit, 'f', -1, 64), watch.ID)\n\t\t\tmessage = fmt.Sprintf(\"%v\\n%v\", message, alertText)\n\t\t}\n\t}\n\n\treturn sendMessage(userID, message)\n}", "title": "" }, { "docid": "54a69c082f6c9418c63fffb12fb1089f", "score": "0.6189448", "text": "func (u *user) notify() {\n\tfmt.Printf(\"Sending user email to %s<%s>\",\n\t\tu.name,\n\t\tu.email)\n}", "title": "" }, { "docid": "f7dc46afbb4bca46fb5fbe3facf41403", "score": "0.61689085", "text": "func (u *user) notify() {\n\tfmt.Printf(\"Sending user email to %s<%s>\\n\",\n\t\tu.name,\n\t\tu.email)\n}", "title": "" }, { "docid": "53ce7be3ed30cdbd94e460e7135464a2", "score": "0.5884494", "text": "func SendNotification(w http.ResponseWriter, r *http.Request) {\n\ts.db = driver.GetDB()\n\tvar subscriber models.Subscriber\n\n\tvar bookID int\n\tjson.NewDecoder(r.Body).Decode(&bookID)\n\trows, err := s.db.Query(\"SELECT * from public.subscribers WHERE book_id=$1\", bookID)\n\tlogFatal(err)\n\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\terr := rows.Scan(&subscriber.ID, &subscriber.Email, &subscriber.BookID)\n\t\tlogFatal(err)\n\t\ts.subscribers = append(s.subscribers, subscriber)\n\t}\n\n\tspew.Dump(s.subscribers, \"s.subscribers\")\n\tfor _, sub := range s.subscribers {\n\t\tfmt.Printf(\"Отправлена нотификация на почту %v. Книга %v теперь доступна\\n\", sub.Email, sub.BookID)\n\t}\n\n}", "title": "" }, { "docid": "90c0e94f628a631201d1ef834cf7bdae", "score": "0.58709395", "text": "func main() {\n\t// Create a value of type user.\n\tu := &user{\"Jill\", \"jill@email.com\"}\n\n\t// Pass a pointer of of type user to the function.\n\tsendNotification(u)\n}", "title": "" }, { "docid": "fd7cf49492d5bf230d7fd01c0024d898", "score": "0.5810684", "text": "func (c *Controller) NotifyUser(nm *models.NotificationMessage) error {\n\tif nm.Account.Nick == \"\" {\n\t\tc.logger.Error(\"Nick is not set\")\n\t\treturn nil\n\t}\n\n\tnm.EventId = createEventId()\n\n\treturn c.Pubnub.NotifyUser(nm)\n}", "title": "" }, { "docid": "590e52374374a6e4b523db7af04da6d3", "score": "0.5729153", "text": "func (s *pslack) Notify(data []byte) (err error) {\n\tslack.Send(s.webhook, \"\", s.pay)\n\treturn nil\n}", "title": "" }, { "docid": "d8eb5fb2365b582489aa0af97d620d55", "score": "0.5635862", "text": "func Notify(w http.ResponseWriter, r *http.Request) {\n\turl := \"https://api.notify.me/secret/channel/username/tim\"\n\n\treq, err := http.NewRequest(\"GET\", url, nil)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t}\n\n\treq.Header.Set(\"Origin\", \"https://notify.me\")\n\treq.Header.Set(\"Authorization\", os.Getenv(\"NOTIFY_KEY\"))\n\treq.Header.Set(\"User-Agent\", \"timcole.me homepage\")\n\n\tres, err := (&http.Client{\n\t\tTimeout: 30 * time.Second,\n\t}).Do(req)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t}\n\tdefer res.Body.Close()\n\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(200)\n\tw.Write(body)\n}", "title": "" }, { "docid": "ea33bf8dad67355b6b689021eebee198", "score": "0.5572612", "text": "func (m *Manager) Notify(val interface{}) error {\n\tglog.V(2).Infof(\"manager sending notify request\")\n\tdefer glog.V(2).Infof(\"received notify response\")\n\trequest := managerRequest{\n\t\top: managerOpNotify,\n\t\tval: val,\n\t\tresponse: make(chan error),\n\t}\n\tm.requests <- request\n\treturn <-request.response\n}", "title": "" }, { "docid": "5c5e2440854fd7cffef65b4eb3751365", "score": "0.5567314", "text": "func sendNotify(urgency int, msg string) {\n\tvar urgencyStr string\n\tswitch urgency {\n\tcase 1:\n\t\turgencyStr = \"low\"\n\tcase 2:\n\t\turgencyStr = \"critical\"\n\tdefault:\n\t\turgencyStr = \"normal\"\n\t}\n\n\terr := exec.Command(\"notify-send\", \"--urgency=\"+urgencyStr, \"--expire-time=3000\",\n\t\tnameService, msg).Start()\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n}", "title": "" }, { "docid": "fa3322e05021ca5d97fa3261e3906292", "score": "0.5554284", "text": "func (app *AppData) Notify(title string, body string, duration time.Duration) {\n\n\ticon := app.Config.Icon\n\n\tif icon == \"\" {\n\t\ticon = \"view-private\" // or \"flag\"\n\t}\n\n\tNotify(app.Connection, \"Secret Service\", icon, title, body, duration)\n}", "title": "" }, { "docid": "5fda89aae1685ae76a5bce34aa14c0a0", "score": "0.5551399", "text": "func (u *user52) notify52() {\n\tfmt.Printf(\"Send user email to %s<%s>\\n\", u.name, u.email)\n}", "title": "" }, { "docid": "a44be74f4472d1bb1abeee75edb9ec03", "score": "0.5534846", "text": "func NotifyInvalidMarket(userID int64, market string) error {\n\tmarket = strings.ToUpper(market)\n\n\tmessage := fmt.Sprintf(\"Invalid '%v' market code\\n\\n List of available market is:\\n\", market)\n\tfor market := range data.SupportedMarket {\n\t\tmessage = fmt.Sprintf(\"%v\\n%v\", message, market)\n\t}\n\n\treturn sendMessage(userID, message)\n}", "title": "" }, { "docid": "bc0212cc3d67a188c78f1e856f1bf6e5", "score": "0.5534416", "text": "func (p *EchoPort) Notification(c echo.Context) error {\n\n\tconn, err := p.upgrader.Upgrade(c.Response(), c.Request(), c.Response().Header())\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusUpgradeRequired, err.Error())\n\t}\n\tdefer conn.Close()\n\n\t// Subscribe user\n\tnotifChan, err := p.service.Subscribe(auth.Context(c), conn)\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusUpgradeRequired, \"error to subscribe\")\n\t}\n\tdefer p.service.Unsubscribe(auth.Context(c))\n\n\tfor {\n\t\terr = conn.WriteJSON(<-notifChan)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(\"Sent to websocket\")\n\t}\n}", "title": "" }, { "docid": "99ddd23c5b2e41aaaeb2c4310c2dc70b", "score": "0.5514047", "text": "func (c *BgolddClient) NotifySpent(outPoints []*wire.OutPoint) error {\n\tselect {\n\tcase c.rescanUpdate <- outPoints:\n\tcase <-c.quit:\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "51a4314a1d7a2fe588f31c657e46fb81", "score": "0.55026215", "text": "func (n *OpsGenie) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {\n\treq, retry, err := n.createRequest(ctx, as...)\n\tif err != nil {\n\t\treturn retry, err\n\t}\n\n\tc, err := commoncfg.NewClientFromConfig(*n.conf.HTTPConfig, \"opsgenie\")\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tresp, err := ctxhttp.Do(ctx, c, req)\n\n\tif err != nil {\n\t\treturn true, err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn n.retry(resp.StatusCode)\n}", "title": "" }, { "docid": "ca5008f1c86e062ffcef81998037b6b1", "score": "0.5484335", "text": "func NotifySuccessAdd(userID int64, market string, price float64, when string) error {\n\tmarket = strings.ToUpper(market)\n\tpriceString := strconv.FormatFloat(price, 'f', -1, 64)\n\n\tmessage := fmt.Sprintf(\"Created new alert for:\\n%v %v %v\", market, when, priceString)\n\n\treturn sendMessage(userID, message)\n}", "title": "" }, { "docid": "24540958ac7b9fe820c058db6f1d405a", "score": "0.545788", "text": "func (c *Client) Notify(ctx context.Context, method string, params interface{}) error {\n\treq, err := c.note(ctx, method, params)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = c.send(ctx, jmessages{req})\n\treturn err\n}", "title": "" }, { "docid": "d9d0bf8fc5692d99d7c6c96df6189fd6", "score": "0.5439438", "text": "func (e *EventReceiver) Notify(ctx context.Context, event flamingo.Event) {\n\tswitch currentEvent := event.(type) {\n\t// Clean cart cache on logout\n\tcase *auth.WebLogoutEvent:\n\t\tif e.cartCache != nil {\n\t\t\t_ = e.cartCache.DeleteAll(ctx, currentEvent.Request.Session())\n\t\t}\n\t// Handle WebLoginEvent and merge the cart\n\tcase *auth.WebLoginEvent:\n\t\tweb.RunWithDetachedContext(ctx, func(ctx context.Context) {\n\t\t\te.handleLoginEvent(ctx, currentEvent)\n\t\t})\n\t// Clean the cart cache when the cart should be invalidated\n\tcase *cartDomain.InvalidateCartEvent:\n\t\tif e.cartCache != nil {\n\t\t\tcartID, err := e.cartCache.BuildIdentifier(ctx, currentEvent.Session)\n\t\t\tif err == nil {\n\t\t\t\t_ = e.cartCache.Invalidate(ctx, currentEvent.Session, cartID)\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "87a4e87822a5410c772946a9aac8f75e", "score": "0.5425179", "text": "func (c *Client) Notify(method string, msg interface{}) error {\n\tbody, err := json.Marshal(msg)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tc.txMut.Lock()\n\tdefer c.txMut.Unlock()\n\treturn c.tx.SendMessage(txMessage{\n\t\tBatched: false,\n\t\tObjects: []*txObject{{\n\t\t\tRequest: &txRequest{\n\t\t\t\tNotification: true,\n\t\t\t\tMethod: method,\n\t\t\t\tParams: body,\n\t\t\t},\n\t\t}},\n\t})\n}", "title": "" }, { "docid": "3dbcecdcc83320431b19aa0aa54869af", "score": "0.54243135", "text": "func sendNotifications(notifications notifications.NotificationManager, quantname string, creator pixlUser.UserInfo) error {\n\ttemplate := make(map[string]interface{})\n\ttemplate[\"quantname\"] = quantname\n\ttemplate[\"subject\"] = fmt.Sprintf(\"New quantification(%v) has been published.\", quantname)\n\tusers := []string{\"auth0|\" + creator.UserID}\n\treturn notifications.SendAll(\"quant-published\", template, users, false)\n}", "title": "" }, { "docid": "642eca4cbcfe2208043dba65b0e969be", "score": "0.542364", "text": "func (u *user) notify1() {\n\tfmt.Printf(\"user: Sending user Email To %s<%s>\\n\",\n\t\tu.name,\n\t\tu.email)\n}", "title": "" }, { "docid": "95a005f41bed22ac38d7c9b9a1dfc20e", "score": "0.5414956", "text": "func (e EmailNotifier) Notify(email string, event Event, payload string) error {\n\tdomain := config.Opts.Mailer.Domain\n\tapiKey := config.Opts.Mailer.APIKey\n\tsender := fmt.Sprintf(\"no-reply@%s\", domain)\n\tsubject := fmt.Sprintf(\"[%s] %s Notification\", time.Now().Format(\"2006-01-02\"), event)\n\n\tif domain == \"\" && apiKey == \"\" {\n\t\tconfig.Opts.Logger.Warn().Msg(\"[WARNING] Mailgun not set up. Falling back to console output...\")\n\t\tconfig.Opts.Logger.Info().Str(\"email\", email).Str(\"subject\", subject).Str(\"payload\", payload).Msg(\"A notification attempt was made for user.\")\n\t\treturn nil\n\t}\n\n\tvar body string\n\tswitch event {\n\tcase PasswordReset:\n\t\tbody = fmt.Sprintf(passwordResetTemplate, email, payload)\n\tcase GenerateConfirmCode:\n\t\tbody = fmt.Sprintf(confirmCodeTemplate, email, payload)\n\tcase Welcome:\n\t\tbody = fmt.Sprintf(welcomeTemplate, email)\n\t}\n\n\tmg := mailgun.NewMailgun(domain, apiKey)\n\tmessage := mg.NewMessage(sender, subject, body, email)\n\t_, _, err := mg.Send(message)\n\treturn err\n}", "title": "" }, { "docid": "62f8fcc74528884bb5178e1b119fc681", "score": "0.5402904", "text": "func (u *user) notify2() {\n\tfmt.Printf(\"user: Sending user email To %s<%s>\\n\",\n\t\tu.name,\n\t\tu.email)\n}", "title": "" }, { "docid": "56d72120d0718c9a3259500c2ba26889", "score": "0.53906316", "text": "func TestUseNotify(t *testing.T) {\n\tconvey.Convey(\"TestUseNotify \", t, func() {\n\t\t_, err := s.UseNotify(c, &model.ArgAllowanceCheck{Mid: 1, OrderNo: \"1\"})\n\t\tconvey.So(err, convey.ShouldBeNil)\n\t})\n}", "title": "" }, { "docid": "416f1b7c3d012938639ac6153e9a81e6", "score": "0.53848726", "text": "func sendNotification(n notifier) {\n\tn.notify1()\n}", "title": "" }, { "docid": "6b59d11546e8380ff66882180814f6e6", "score": "0.5382123", "text": "func (handler *SubscriberHandler) NotifySubscriber(s Subscriber) {\n\n\t// log\n\tfmt.Println(\"Notifying \", *s.WebhookURL)\n\n\t// calc rate\n\trate, err := handler.Monitor.Latest(*s.BaseCurrency, *s.TargetCurrency)\n\tif err != nil {\n\t\tfmt.Println(\"\\tERROR: failed to get latest between \", *s.BaseCurrency,\n\t\t\t\" and \", *s.MinTriggerValue)\n\t}\n\n\t// log\n\tfmt.Println(\"\\tRate: \", rate)\n\tfmt.Println(\"\\tMinTriggerValue: \", *s.MinTriggerValue)\n\tfmt.Println(\"\\tMaxTriggerValue: \", *s.MaxTriggerValue)\n\n\t// should notify?\n\tif rate >= *s.MinTriggerValue && rate <= *s.MaxTriggerValue {\n\n\t\t// prepare payload\n\t\tpayload := CurrencyPayload{\n\t\t\tBaseCurrency: *s.BaseCurrency,\n\t\t\tTargetCurrency: *s.TargetCurrency,\n\t\t\tCurrentRate: rate,\n\t\t\tMinTriggerValue: *s.MinTriggerValue,\n\t\t\tMaxTriggerValue: *s.MaxTriggerValue,\n\t\t}\n\n\t\t// try 3 times to send notification\n\t\tfor i := 0; i < 3; i++ {\n\n\t\t\t// try sending notification\n\t\t\terr = sendNotification(*s.WebhookURL, payload)\n\t\t\tif err != nil {\n\n\t\t\t\t// failure... sleep to try again if not end of loop\n\t\t\t\tfmt.Println(\"\\tdidn't manage to notify: \", err.Error())\n\t\t\t\tif i < 3 {\n\t\t\t\t\ttime.Sleep(time.Second * 2)\n\t\t\t\t}\n\t\t\t} else {\n\n\t\t\t\t// success! break\n\t\t\t\tfmt.Println(\"\\tDid notify!\")\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\tfmt.Println(\"\\tdone trying\")\n\n}", "title": "" }, { "docid": "b6409bf9afafa22b92b829572a53b30e", "score": "0.5365856", "text": "func (s *weaponUI) notify(info string) {\n\tfor _, ch := range s.notifyChans {\n\t\tch <- info\n\t}\n}", "title": "" }, { "docid": "46682c5eacd84c7defc58ac34a22a253", "score": "0.5361429", "text": "func sendNotification(n notifier) {\n\tn.notify()\n}", "title": "" }, { "docid": "ff13562206d831c68c65be8a05e45aeb", "score": "0.53184336", "text": "func (p *SimplePeer) Notify(update Update) error {\n\tdata, err := json.MarshalIndent(update, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\tbody := bytes.NewReader(data)\n\treq, err := http.NewRequest(http.MethodPost, p.URL.String(), body)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif p.Header != nil {\n\t\tfor key, v := range p.Header {\n\t\t\tfor i, val := range v {\n\t\t\t\tif i == 0 {\n\t\t\t\t\treq.Header.Set(key, val)\n\t\t\t\t} else {\n\t\t\t\t\treq.Header.Add(key, val)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\tresp, err := p.Client.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn fmt.Errorf(\"peer responded to notification with non-200 status code: %v\", resp.StatusCode)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "86a38759877e36934966c76d0454b9a6", "score": "0.52848554", "text": "func (bot *TipBot) sendHandler(c *tb.Callback) {\n\t// remove buttons from confirmation message\n\t_, err := bot.telegram.Edit(c.Message, MarkdownEscape(c.Message.Text), &tb.ReplyMarkup{})\n\tif err != nil {\n\t\tlog.Errorln(\"[sendHandler] \" + err.Error())\n\t}\n\t// decode callback data\n\t// log.Debug(\"[sendHandler] Callback: %s\", c.Data)\n\tuser, err := GetUser(c.Sender, *bot)\n\tif err != nil {\n\t\tlog.Printf(\"[GetUser] User: %d: %s\", c.Sender.ID, err.Error())\n\t\treturn\n\t}\n\tif user.StateKey != lnbits.UserStateConfirmSend {\n\t\tlog.Errorf(\"[sendHandler] User StateKey does not match! User: %d: StateKey: %d\", c.Sender.ID, user.StateKey)\n\t\treturn\n\t}\n\n\t// decode StateData in which we have information about the send payment\n\tsplits := strings.Split(user.StateData, \"|\")\n\tif len(splits) < 3 {\n\t\tlog.Error(\"[sendHandler] Not enough arguments in callback data\")\n\t\tlog.Errorf(\"user.StateData: %s\", user.StateData)\n\t\treturn\n\t}\n\ttoId, err := strconv.Atoi(splits[0])\n\tif err != nil {\n\t\tlog.Errorln(\"[sendHandler] \" + err.Error())\n\t}\n\ttoUserStrWithoutAt := splits[1]\n\tamount, err := strconv.Atoi(splits[2])\n\tif err != nil {\n\t\tlog.Errorln(\"[sendHandler] \" + err.Error())\n\t}\n\tsendMemo := \"\"\n\tif len(splits) > 3 {\n\t\tsendMemo = strings.Join(splits[3:], \"|\")\n\t}\n\n\t// reset state\n\tResetUserState(user, *bot)\n\n\t// we can now get the wallets of both users\n\tto := &tb.User{ID: toId, Username: toUserStrWithoutAt}\n\tfrom := c.Sender\n\ttoUserStrMd := GetUserStrMd(to)\n\tfromUserStrMd := GetUserStrMd(from)\n\ttoUserStr := GetUserStr(to)\n\tfromUserStr := GetUserStr(from)\n\n\ttransactionMemo := fmt.Sprintf(\"Send from %s to %s (%d sat).\", fromUserStr, toUserStr, amount)\n\tt := NewTransaction(bot, from, to, amount, TransactionType(\"send\"))\n\tt.Memo = transactionMemo\n\n\tsuccess, err := t.Send()\n\tif !success || err != nil {\n\t\t// NewMessage(m, WithDuration(0, bot.telegram))\n\t\tbot.trySendMessage(c.Sender, fmt.Sprintf(sendErrorMessage, err))\n\t\terrmsg := fmt.Sprintf(\"[/send] Error: Transaction failed. %s\", err)\n\t\tlog.Errorln(errmsg)\n\t\treturn\n\t}\n\n\tbot.trySendMessage(from, fmt.Sprintf(sendSentMessage, amount, toUserStrMd))\n\tbot.trySendMessage(to, fmt.Sprintf(sendReceivedMessage, fromUserStrMd, amount))\n\t// send memo if it was present\n\tif len(sendMemo) > 0 {\n\t\tbot.trySendMessage(to, fmt.Sprintf(\"✉️ %s\", MarkdownEscape(sendMemo)))\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "9574f1814ea595817b865200fe427564", "score": "0.52817416", "text": "func (b *Batch) Notify(method string, msg interface{}) error {\n\tbody, err := json.Marshal(msg)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tb.msg.Objects = append(b.msg.Objects, &txObject{\n\t\tRequest: &txRequest{\n\t\t\tNotification: true,\n\t\t\tMethod: method,\n\t\t\tParams: body,\n\t\t},\n\t})\n\n\treturn nil\n}", "title": "" }, { "docid": "20fcb6ea2c555ba0d9c57526de9c14c3", "score": "0.52747834", "text": "func notifyMe(privateKey string) {\n\t//Gmail setup and authentication requirements:\n\t//@email_addr contains the email account address.\n\t//@email_pass contains the email account password.\n\t//@email_prov contains the email account provider.\n\n\temail_addr := \"INSERT EMAIL ADDRESS HERE\"\n\temail_pass := \"INSERT EMAIL PASSWORD HERE\"\n\temail_prov := \"INSERT EMAIL PROVIDER HERE\"\n\tauth := smtp.PlainAuth(\"\", email_addr, email_pass, email_prov)\n\n\t//Email communication setup requirements:\n\t//@to contains the email recipient.\n\t//@msg contains the body of the email.\n\n\tto := []string{email_addr}\n\tmsg := []byte(\"To:\" + email_addr + \"\\r\\n\" +\n\t\t\"Subject: You've Just Compromised A Cryptocurrency Wallet\\r\\n\" +\n\t\t\"\\r\\n\" +\n\t\t\"Please Check The Following Private Key For Assets: \" + \" \" + privateKey)\n\n\terr := smtp.SendMail(email_prov+\":587\", auth, email_addr, to, msg)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "title": "" }, { "docid": "c0f5f3391014a15f0236970162253648", "score": "0.5256752", "text": "func (s *Service) Notify(n *domain.Notification) error {\n\tfmt.Println(n)\n\treturn nil\n}", "title": "" }, { "docid": "708a6cb81ef485f63d22b340802fb38b", "score": "0.52535355", "text": "func (b Bot) SendNotifyMessage(txt string, ChatId int64, kb interface{}) {\n\tif ok, n := b.Members[ChatId]; !ok {\n\t\tlog.Println(\"Haven't got this user!\")\n\t} else if n {\n\t\tmsg := tgbotapi.NewMessage(b.Dlg[ChatId].ChatId, txt)\n\t\tmsg.ParseMode = \"markdown\"\n\t\tmsg.ReplyMarkup = kb\n\t\tmsg.DisableWebPagePreview = true\n\t\tb.Bot.Send(msg)\n\t\tb.Dlg[ChatId].MessageId++\n\t}\n}", "title": "" }, { "docid": "80b0af6ad5a4934356a8656e9dd8fbb3", "score": "0.52455866", "text": "func (s *LendingOhlcvService) NotifyTrade(trade *types.LendingTrade) {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\tfor _, d := range s.getConfig() {\n\t\tkey := s.getTickKey(trade.Term, trade.LendingToken, d.duration, d.unit)\n\t\ts.updateTick(key, trade)\n\t}\n\tif trade.BorrowingRelayer.Hex() == trade.InvestingRelayer.Hex() {\n\t\ts.updateRelayerTick(trade.BorrowingRelayer, s.getTickKey(trade.Term, trade.LendingToken, 1, \"hour\"), trade)\n\t} else {\n\t\ts.updateRelayerTick(trade.BorrowingRelayer, s.getTickKey(trade.Term, trade.LendingToken, 1, \"hour\"), trade)\n\t\ts.updateRelayerTick(trade.InvestingRelayer, s.getTickKey(trade.Term, trade.LendingToken, 1, \"hour\"), trade)\n\t}\n\tlastFrame := s.lastTimeFrame()\n\ts.updatelasttimeframe(trade.CreatedAt.Unix(), lastFrame)\n\tid := utils.GetLendingChannelID(trade.Term, trade.LendingToken)\n\ts.bulkPairs[id] = true\n}", "title": "" }, { "docid": "25a78e96105fe67fb0ffa3afe25f28e8", "score": "0.52328384", "text": "func (m *Mailer) SendStakeIncomeNotification(email, lang string, param Param) error {\n\treturn m.sendInvite(email, param, EmailLanguage(lang), StakingIncome)\n}", "title": "" }, { "docid": "1d2178dd86f5c214df681f2961e7cc9b", "score": "0.52268624", "text": "func (b *BufferNotifier) Notify(email string, event Event, payload string) error {\n\tvar body string\n\tswitch event {\n\tcase PasswordReset:\n\t\tbody = fmt.Sprintf(passwordResetTemplate, email, payload)\n\tcase GenerateConfirmCode:\n\t\tbody = fmt.Sprintf(confirmCodeTemplate, email, payload)\n\t}\n\tb.buffer.WriteString(body)\n\treturn nil\n}", "title": "" }, { "docid": "9b86d9b5cebd340f5567ab8f05b9115e", "score": "0.5213147", "text": "func Notify(msg string) error {\n\treturn bot.SendMessage(chat, msg, nil)\n}", "title": "" }, { "docid": "dfdcc69de4e244db4c223666b0610531", "score": "0.52035445", "text": "func (nm *NodesManager) NotifyMsg(buf []byte) {\n\tlog.Debug(\"User data received\")\n\tmessageType, message, err := getTypeAndEncodedMsg(buf)\n\tif err != nil {\n\t\tlog.Error(\"Could not receive message: %s\", err)\n\t\treturn\n\t}\n\n\tswitch messageType {\n\tcase MSG_DIVS_PKG_ETH:\n\t\tlog.Debug(\"Data packet received: %d bytes\", len(message))\n\t\t// TODO: send the message to the TAP device... maybe we should enqueue it\n\t\t// TODO: and then a worker could perform the real delivery\n\tdefault:\n\t\tlog.Error(\"Unknown message received: %s\", messageType)\n\t}\n}", "title": "" }, { "docid": "8d87ecc536bee532109af76053e98a81", "score": "0.5202159", "text": "func (s *rabbitMQService) notify(at *auth.AppToken, action string) error {\n\tcontent, err := json.Marshal(map[string]interface{}{\n\t\t\"app_token\": at,\n\t\t\"action\": action,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = s.ch.Publish(\n\t\t\"token_update\", // exchange\n\t\t\"\", // routing key\n\t\tfalse, // mandatory\n\t\tfalse, // immediate\n\t\tamqp.Publishing{\n\t\t\tDeliveryMode: amqp.Persistent,\n\t\t\tContentType: \"text/json\",\n\t\t\tBody: content,\n\t\t\tAppId: \"auth_service\",\n\t\t})\n\t// fmt.Println(\"**** RabbitMQ ****\", at.Token, action)\n\treturn err\n}", "title": "" }, { "docid": "406b49fce5c999bb015c25c0a4194fc5", "score": "0.52001584", "text": "func (quote *Quote) OnLike(likedBy *User) {\n\tif !quote.IsValid() {\n\t\tcolor.Red(\"Invalid quote: %s\", quote.ID)\n\t\treturn\n\t}\n\n\tif likedBy.ID == quote.CreatedBy {\n\t\treturn\n\t}\n\n\tif !quote.Creator().Settings().Notification.QuoteLikes {\n\t\treturn\n\t}\n\n\tgo func() {\n\t\tquote.Creator().SendNotification(&PushNotification{\n\t\t\tTitle: likedBy.Nick + \" liked your \" + quote.Character().Name.Canonical + \" quote\",\n\t\t\tMessage: quote.Text.English,\n\t\t\tIcon: \"https:\" + likedBy.AvatarLink(\"large\"),\n\t\t\tLink: \"https://notify.moe\" + likedBy.Link(),\n\t\t\tType: NotificationTypeLike,\n\t\t})\n\t}()\n}", "title": "" }, { "docid": "1b7c6e0fe5187ba55d2857588bd94858", "score": "0.5175954", "text": "func (c *Client) Notify(ctx context.Context, method string, args ...interface{}) error {\n\top := new(requestOp)\n\tmsg, err := c.newMessage(method, args...)\n\tif err != nil {\n\t\treturn err\n\t}\n\tmsg.ID = nil\n\n\tif c.isHTTP {\n\t\treturn c.sendHTTP(ctx, op, msg)\n\t}\n\treturn c.send(ctx, op, msg)\n}", "title": "" }, { "docid": "69d9ffd046c73ec3e9a5a7d61579840c", "score": "0.5171007", "text": "func notifyUserActivity(topicChan <-chan Topic){\n\n\tfor activity := range userActivityChan {\n\t\tswitch activity.UserActivity {\n\t\tcase UserActivityPostMessage:\n\t\tcase UserActivityPutMessage:\n\t\tcase UserActivityPostAnswer:\n\t\tcase UserActivityPutAnswer:\n\t\tcase UserActivityPostComment:\n\t\tcase UserActivityPostCommentReply:\n\t\tcase UserActivityPutComment:\n\t\tdefault:\n\t\t\tcontinue\n\n\t\t}\n\n\t}\n}", "title": "" }, { "docid": "66d2a43b83219ca206cd0bd22e75d556", "score": "0.5155211", "text": "func notifyVIP(account model.Account) {\n\tif account.Id == \"10000\" {\n\t\tgo func(account model.Account) {\n\t\t\tvipNotification := model.VipNotification{AccountId: account.Id, ReadAt: time.Now().UTC().String()}\n\t\t\tdata, _ := json.Marshal(vipNotification)\n\t\t\tLOGGER.Infoln(\"Notifying VIP account %v\\n\", account.Id)\n\t\t\terr := MessagingClient.PublishOnQueue(data, \"vip_queue\")\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(err.Error())\n\t\t\t}\n\t\t}(account)\n\t}\n}", "title": "" }, { "docid": "4558e0efeec794de16cdbb916872579b", "score": "0.5145654", "text": "func (o *NotebookNotebook) SetNotifyCurrentUser(v bool) {\n\to.NotifyCurrentUser = &v\n}", "title": "" }, { "docid": "f6d146aa226eae15bc6795ac572b16c8", "score": "0.514346", "text": "func (c *PropertiesUserLinksCreateCall) NotifyNewUser(notifyNewUser bool) *PropertiesUserLinksCreateCall {\n\tc.urlParams_.Set(\"notifyNewUser\", fmt.Sprint(notifyNewUser))\n\treturn c\n}", "title": "" }, { "docid": "5b36b17236732a86b7406b33b6bf2407", "score": "0.5139845", "text": "func (p *PositionService) ListenNotify(ctx context.Context) error {\n\tp.pricesForPNL = make(map[string]map[string]chan models.Price)\n\t_, err := pgx.NewConnPool(pgx.ConnPoolConfig{\n\t\tConnConfig: pgx.ConnConfig{\n\t\t\tHost: \"localhost\",\n\t\t\tPort: 5432,\n\t\t\tDatabase: \"positions\",\n\t\t\tUser: \"postgres\",\n\t\t\tPassword: \"12345\",\n\t\t},\n\t\tAfterConnect: func(conn *pgx.Conn) error {\n\t\t\terr := conn.Listen(\"db_notifications_open\")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\terr = conn.Listen(\"db_notifications_close\")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tfor {\n\t\t\t\tmsg, err := conn.WaitForNotification(ctx)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tpos := models.Price{}\n\t\t\t\tif err = json.Unmarshal([]byte(msg.Payload), &pos); err != nil {\n\t\t\t\t\tlog.Errorf(\"error while unmarchaling: %v\", err)\n\t\t\t\t}\n\n\t\t\t\tif msg.Channel == \"db_notifications_open\" {\n\n\t\t\t\t\tif p.pricesForPNL[pos.Name] == nil {\n\t\t\t\t\t\tp.pricesForPNL[pos.Name] = make(map[string]chan models.Price)\n\t\t\t\t\t}\n\t\t\t\t\tp.pricesForPNL[pos.Name][pos.Id] = make(chan models.Price)\n\n\t\t\t\t\tgo func() {\n\t\t\t\t\t\tfor {\n\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\tcase lastPrice, find := <-p.pricesForPNL[pos.Name][pos.Id]:\n\t\t\t\t\t\t\t\tif !find {\n\t\t\t\t\t\t\t\t\tlog.Info(\"can't find current price...\")\n\t\t\t\t\t\t\t\t\treturn\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tlog.Infof(fmt.Sprintf(\"[%v] PNL = %v\", pos.Name, p.GetPNL(lastPrice, pos)))\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}()\n\t\t\t\t}\n\t\t\t\tif msg.Channel == \"db_notifications_close\" {\n\n\t\t\t\t\tif ch, ok := p.pricesForPNL[pos.Name][pos.Id]; ok {\n\t\t\t\t\t\tclose(ch)\n\t\t\t\t\t}\n\t\t\t\t\tdelete(p.pricesForPNL[pos.Name], pos.Id)\n\t\t\t\t}\n\n\t\t\t\tcontinue\n\t\t\t}\n\t\t},\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "1676836c047c6a6488f2026f56eb9ceb", "score": "0.51156074", "text": "func (c *Controller) notify(name string, data []byte) error {\n\tc.mu.Lock()\n\tdefer c.mu.Unlock()\n\tif !c.isActive(name) {\n\t\treturn fmt.Errorf(\"Notification service %s doesn't exist\", name)\n\t}\n\tmsg := NewMsg(MsgCodeNotify, name, data)\n\tsmsg, err := rlp.EncodeToBytes(msg)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor _, m := range c.notifiers[name].bins {\n\t\tlog.Debug(\"sending pss notify\", \"name\", name, \"addr\", fmt.Sprintf(\"%x\", m.address), \"topic\", fmt.Sprintf(\"%x\", c.notifiers[name].topic), \"data\", data)\n\t\tgo func(m *sendBin) {\n\t\t\terr = c.pss.SendSym(m.symKeyId, c.notifiers[name].topic, smsg)\n\t\t\tif err != nil {\n\t\t\t\tlog.Warn(\"Failed to send notify to addr %x: %v\", m.address, err)\n\t\t\t}\n\t\t}(m)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "478b38522729e961f15732aa149e5fd9", "score": "0.5114029", "text": "func GivePointsToUser(userID string, numPoints int, authorID string) (message string, err error) {\n\ttimeNow := time.Now().UTC()\n\ttimeString := strconv.Itoa(timeNow.Year()) + \"-\" + timeNow.Month().String() + \"-\" + strconv.Itoa(timeNow.Day())\n\ttimeKey := formatTimeKey(timeString, authorID)\n\n\ttimeValue, timeErr := utils.RedisClient.Get(timeKey).Result()\n\n\tgivenPoints := 0\n\tif timeErr == nil {\n\t\tfmt.Println(\"User has changed \" + timeValue + \" points today.\")\n\t\tgivenPoints, _ = strconv.Atoi(timeValue)\n\t}\n\n\tif givenPoints+numPoints > MAX_POINTS_PER_DAY {\n\t\treturn \"\", errors.New(\"You are attempting to change more than the maximum allotted \" + strconv.Itoa(MAX_POINTS_PER_DAY) + \" points per day. You have added/removed \" + strconv.Itoa(givenPoints) + \" today.\")\n\t}\n\n\tformattedUserKey := formatPointKey(userID)\n\tvalue, err := utils.RedisClient.Get(formattedUserKey).Result()\n\n\tif err == redis.Nil {\n\t\tfmt.Println(\"Points do not exist for user: \" + userID + \". Adding.\")\n\t\tutils.RedisClient.Set(formattedUserKey, numPoints, 0)\n\t} else if err != nil {\n\t\treturn \"\", err\n\t} else {\n\t\tintPoints, _ := strconv.Atoi(value)\n\t\tfmt.Println(\"Current point value for user: \", value)\n\t\tnewPoints := intPoints + numPoints\n\t\tfmt.Println(\"New point value for user: \", newPoints)\n\t\tutils.RedisClient.Set(formattedUserKey, newPoints, 0)\n\t}\n\n\tif timeErr == redis.Nil {\n\t\tfmt.Println(\"User: \" + userID + \" has not given points today. Adding.\")\n\t\tutils.RedisClient.Set(timeKey, numPoints, 0)\n\t} else if timeErr != nil {\n\t\treturn \"\", timeErr\n\t} else {\n\t\tintPoints, _ := strconv.Atoi(timeValue)\n\t\tfmt.Println(\"Current points given for user: \", timeValue)\n\t\tnewPoints := intPoints + numPoints\n\t\tfmt.Println(\"New points given for user: \", newPoints)\n\t\tutils.RedisClient.Set(timeKey, newPoints, 0)\n\t}\n\n\treturn \"Points successfully awarded\", nil\n}", "title": "" }, { "docid": "aa81fe1d09ca1920473aad130de503bf", "score": "0.510758", "text": "func (a *apiManager) notifyChange(arg interface{}) {\n\ta.newSignal <- arg\n}", "title": "" }, { "docid": "0a57bfb8800b3eb675e0e336646c8c8a", "score": "0.5090599", "text": "func (pn *PushoverNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {\n\theaders, uploadBody, err := pn.genPushoverBody(ctx, as...)\n\tif err != nil {\n\t\tpn.log.Error(\"Failed to generate body for pushover\", \"error\", err)\n\t\treturn false, err\n\t}\n\n\tcmd := &SendWebhookSettings{\n\t\tUrl: PushoverEndpoint,\n\t\tHttpMethod: \"POST\",\n\t\tHttpHeader: headers,\n\t\tBody: uploadBody.String(),\n\t}\n\n\tif err := pn.ns.SendWebhook(ctx, cmd); err != nil {\n\t\tpn.log.Error(\"failed to send pushover notification\", \"error\", err, \"webhook\", pn.Name)\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "title": "" }, { "docid": "b10aba1167abda7fa89f7f9343c30433", "score": "0.50662637", "text": "func NotifyChange(p *vote.Poll) {\n\tHubInstance.Broadcast <- p\n}", "title": "" }, { "docid": "95e1f5e4b24ef600f428cb429c629de8", "score": "0.5060687", "text": "func (s *catalogSyncer) Send(notification datastore.PackageUpdateNotification) {\n\tgo func() {\n\t\tlog.Info(\"[sync] sending list of package(s) with new version\")\n\t\ts.notificationCh <- notification\n\t}()\n}", "title": "" }, { "docid": "423b11fd3cd748823496f77beef73abd", "score": "0.5057481", "text": "func (k *KBPKIClient) Notify(ctx context.Context, notification *keybase1.FSNotification) error {\n\treturn k.serviceOwner.KeybaseService().Notify(ctx, notification)\n}", "title": "" }, { "docid": "f6cef5511a209540c09a8b88582e6509", "score": "0.5046958", "text": "func (p Provider) Send(message notify.Message, extras notify.Extras) error {\n\tmsg := Message{\n\t\tAction: \"new_visitor\",\n\t\tContent: extras,\n\t}\n\n\tvar jsonBuffer bytes.Buffer\n\tmarshaled, err := json.Marshal(msg)\n\tif err != nil {\n\t\treturn err\n\t}\n\tjsonBuffer.Write(marshaled)\n\n\tresponse, err := http.Post(p.URL, \"application/json\", &jsonBuffer)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif response.StatusCode != http.StatusOK {\n\t\treturn errors.New(\"http status not ok\")\n\t} else {\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "4109a513a9d975c77db882205fedaf68", "score": "0.50406826", "text": "func HandleUserNotification(r api.APIRequest) (api.APIResponse, error) {\n\tif strings.ToUpper(r.HTTPMethod) == http.MethodPatch {\n\t\treturn updateNotification(r)\n\t}\n\n\treturn api.NotSupported(r)\n}", "title": "" }, { "docid": "f2c482e143ffb8ba2287827261c6c669", "score": "0.50296736", "text": "func (this *Application) Notify(path string, fn ...func(*Request, *Response, func())) {\n\tthis.Verb(\"notify\", path, fn...)\n}", "title": "" }, { "docid": "bf4a85778177137fa0cb4f0c6961c112", "score": "0.5018009", "text": "func (eh *EventHandlerMqtt) TotalItemConsumedForUserChanged(userID uint32, username string, itemID uint32, itemName string, itemCost int32, totalCount uint32) {\n\teh.publishMessage(eh.buildTotalItemConsumedForUserChanged(userID, username, itemID, itemName, itemCost, totalCount))\n}", "title": "" }, { "docid": "02dc3f89f23cfb37a9fc3af272e6b4ab", "score": "0.5016104", "text": "func (console Console) Notify(to, msg string) error {\n\tfmt.Printf(\"[%s]: %s\\n\", to, msg)\n\treturn nil\n}", "title": "" }, { "docid": "e36a730e0c4446ad1992f8c372dad4fb", "score": "0.501402", "text": "func (b *LocalBackend) send(n ipn.Notify) {\n\tb.mu.Lock()\n\tnotify := b.notify\n\tb.mu.Unlock()\n\n\tif notify != nil {\n\t\tn.Version = version.Long\n\t\tnotify(n)\n\t} else {\n\t\tb.logf(\"nil notify callback; dropping %+v\", n)\n\t}\n}", "title": "" }, { "docid": "205848e4f2cae7744911b368772970b2", "score": "0.50109977", "text": "func (c *AccountsUserLinksCreateCall) NotifyNewUser(notifyNewUser bool) *AccountsUserLinksCreateCall {\n\tc.urlParams_.Set(\"notifyNewUser\", fmt.Sprint(notifyNewUser))\n\treturn c\n}", "title": "" }, { "docid": "1c73798543a76ee07a15830268b4c190", "score": "0.5002438", "text": "func (c *Client)handleAlertNotify(w http.ResponseWriter, req *http.Request) {\n\tif err:=req.ParseForm(); err!=nil {\n\t\tif content, err := ioutil.ReadAll(req.Body); err==nil {\n\t\t\tL4g.Error(\"Bitcoin http server handle wallet notify, err content:%s\",\n\t\t\t\terr.Error())\n\t\t}else {\n\t\t\tL4g.Error(\"Bitcoin http server cannot parse form data : %s\", string(content))\n\t\t}\n\t}\n\tmessage := req.Form[\"alert\"]\n\tL4g.Trace(\"alert=%s\", message[0])\n}", "title": "" }, { "docid": "6c35a928d41948f30a4dfca0df213e3c", "score": "0.49973541", "text": "func sendVerificationEmail(user model.User) {\n\tsubject := \"Edge Tech Vietnam - Email verification\"\n\tlog.Println(user.Email)\n\trequest := service.NewRequest([]string{user.Email}, subject)\n\n\tvar info = fmt.Sprintf(\"%d|%d\", user.ID, time.Now().UnixNano())\n\tvar verificationToken = crypt.Encrypt([]byte(info))\n\trequest.Send(\"templates/email.html\", map[string]string {\n\t\t\"username\":user.FullName,\n\t\t\"url\" : config.Get().MailServer.Domain + config.Get().Server.BaseContextPath + \"verify?token=\" + verificationToken,\n\t})\n}", "title": "" }, { "docid": "3a63cb5b725a582b4af058f684d22ae4", "score": "0.49928555", "text": "func Notify(title, message string) error {\n\t_, err := exec.LookPath(\"notify-send\")\n\tif err != nil {\n\t\treturn errors.New(\"Install 'notify-send' and try again\")\n\t}\n\n\tcmd := exec.Command(\"notify-send\", title, message)\n\tcmd.Stderr = os.Stderr\n\tif err = cmd.Run(); err != nil {\n\t\treturn fmt.Errorf(\"Banner: %s\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f3b9fbfeb0dbeb2efdf084e14ed7d782", "score": "0.49913293", "text": "func handleNotifySpent(s *rpcServer, cmd btcjson.Cmd,\n\twalletNotification chan []byte, rc *requestContexts) error {\n\n\tid := cmd.Id()\n\treply := &btcjson.Reply{Id: &id}\n\n\tnotifyCmd, ok := cmd.(*btcws.NotifySpentCmd)\n\tif !ok {\n\t\treturn btcjson.ErrInternal\n\t}\n\n\ts.ws.AddSpentRequest(walletNotification, rc, notifyCmd.OutPoint,\n\t\tcmd.Id())\n\n\tmreply, _ := json.Marshal(reply)\n\twalletNotification <- mreply\n\treturn nil\n}", "title": "" }, { "docid": "cee6775df898ba4639a15e3acdecb1ff", "score": "0.49910334", "text": "func NotifyInfo(ctx context.Context, m PubSubMessage) error {\n\n\t//ctx := context.Background()\n\tr, err := getNewInfo(ctx)\n\tif err != nil {\n\t\tlog.Fatalf(\"%v\", err)\n\t}\n\n\tif len(r) == 0 {\n\t\treturn nil\n\t}\n\n\tvar lastUpd string\n\tvar messageString string\n\n\tmessageString = \"===========================\\n\"\n\tfor _, gri := range r {\n\t\tmessageString += \"[Title]\\n\" + gri.Title + \"(\" + gri.IncidentNo + \")\\n\\n\"\n\t\tmessageString += \"[Status]\\n\" + gri.Status + \"\\n\\n\"\n\t\tmessageString += \"[Update]\\n\" + gri.Updated + \"\\n\\n\"\n\t\tmessageString += \"[LINK]\\n\" + gri.URL + \"\\n\\n\"\n\t\tmessageString += \"[Detail]\\n\" + gri.Detail + \"\\n\\n\"\n\t\tmessageString += \"--------------------------\\n\"\n\n\t\tlastUpd = gri.Updated\n\t}\n\tSendMessage(messageString)\n\n\t// ToDo : update update-time collection\n\tupdateLastUpdTime(ctx, lastUpd)\n\treturn nil\n}", "title": "" }, { "docid": "bd36ab69edff9a93fe1f541da54e3836", "score": "0.49893615", "text": "func (o *Ops) SendTo(user, message string) {\n\tc := o.server.clientHolder.GetByName(user)\n\tc.conn.Write([]byte(message))\n\tlog.Printf(\"[audit] %s: %s <- %s\", c.room, user, message)\n}", "title": "" }, { "docid": "aeb6f44f9338a1a2c17697b91969aea2", "score": "0.49805042", "text": "func NotifyChange(asset string) {\n\tif !viper.GetBool(\"notification.enabled\") {\n\t\treturn\n\t}\n\tnotifyChange <- asset\n\treturn\n}", "title": "" }, { "docid": "8f4477a187cd81b1c3483d04f860f2b4", "score": "0.4970625", "text": "func notify(c appengine.Context, com *Commit, builder, logHash string) {\n\tif !updateCL(c, com, builder, logHash) {\n\t\t// Send a mail notification if the CL can't be found.\n\t\tsendFailMail(c, com, builder, logHash)\n\t}\n}", "title": "" }, { "docid": "427d200359084b637971f7bfc9c51daa", "score": "0.49499658", "text": "func (b *BlockChain) sendNotification(typ NotificationType, data interface{}) {\n\t// Ignore it if the caller didn't request notifications.\n\tif b.notifications == nil {\n\t\treturn\n\t}\n\n\t// Generate and send the notification.\n\tn := Notification{Type: typ, Data: data}\n\tb.notifications(&n)\n}", "title": "" }, { "docid": "40da778e6d60d814bc38b08beffb8d8b", "score": "0.49480534", "text": "func notify(msg string, topic string, region string) {\n\tlog.Println(msg)\n\tif config.Disablenotification {\n\t\tlog.Println(\"notification is disabled\")\n\t\treturn\n\t}\n\tsess := session.Must(session.NewSession())\n\t//region for AWS SNS topics\n\tsvc := sns.New(sess, aws.NewConfig().WithRegion(region))\n\tparams := &sns.PublishInput{\n\t\tMessage: aws.String(msg),\n\t\tTopicArn: aws.String(topic),\n\t}\n\t_, err := svc.Publish(params)\n\tif err != nil {\n\t\tlog.Println(\"failed to send SNS\", err)\n\t}\n}", "title": "" }, { "docid": "ceca83a5082cf799fa20e292c632602e", "score": "0.49409547", "text": "func (c *Conn) Notify(ctx context.Context, method string, params interface{}, opts ...CallOption) error {\n\treq := &Request{Method: method, Notif: true}\n\tfor _, opt := range opts {\n\t\tif opt == nil {\n\t\t\tcontinue\n\t\t}\n\t\tif err := opt.apply(req); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif params != nil {\n\t\tif err := req.SetParams(params); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t_, err := c.send(ctx, &anyMessage{request: req}, false)\n\treturn err\n}", "title": "" }, { "docid": "148f1ecfca2064545abea673a8889e22", "score": "0.49357253", "text": "func (c *NotificationController) NotificationUpdate() {\n\tlog.Println(\"notificationnnnnnnnnnnnnnnnnnnnnnnnnn\")\n\tcompanyTeamName := c.Ctx.Input.Param(\":companyTeamName\")\n\tUpdateIdArray := c.GetStrings(\"DeletedId\")\n\texpiryId := c.GetStrings(\"DeletedExpiryId\")\n\tuserId := c.GetStrings(\"DeletedUserId\")\n\tlog.Println(\"uuuitititit\",expiryId)\n\tlog.Println(\"userid\",userId)\n\tw := c.Ctx.ResponseWriter\n\tdbStatus:= models.UpdateAllNotifications(c.AppEngineCtx,companyTeamName,UpdateIdArray,expiryId,userId)\n\tswitch dbStatus {\n\tcase true:\n\t//w.Write([]byte(\"true\"))\n\n\tcase false:\n\t\tw.Write([]byte(\"false\"))\n\n\t}\n}", "title": "" }, { "docid": "bbd2478431a1394015189c7471a20967", "score": "0.49330112", "text": "func CheckNotifyEmailHandle(c *fiber.Ctx) error {\n\n\t// Create service\n\tnotificationService, serviceErr := service.NewNotificationService(database.Db)\n\tif serviceErr != nil {\n\t\tlog.Error(\"[CheckNotifyEmailHandle.NewNotificationService] %s\", serviceErr.Error())\n\t\treturn c.Status(http.StatusInternalServerError).JSON(utils.Error(\"internal/notificationService\", \"Error happened while creating notificationService!\"))\n\t}\n\n\tnotificationList, err := notificationService.GetLastNotifications()\n\tif err != nil {\n\t\tlog.Error(\"[CheckNotifyEmailHandle.GetLastNotifications] %s\", err.Error())\n\t\treturn c.Status(http.StatusInternalServerError).JSON(utils.Error(\"internal/notificationList\", \"Error happened while getting notification list!\"))\n\t}\n\n\tif !(len(notificationList) > 0) {\n\t\treturn c.SendStatus(http.StatusOK)\n\t}\n\n\tvar recIds []uuid.UUID\n\tfor _, notification := range notificationList {\n\t\tnotification.IsEmailSent = true\n\t\trecIds = append(recIds, notification.NotifyRecieverUserId)\n\t}\n\n\tcurrentUser, ok := c.Locals(\"user\").(types.UserContext)\n\tif !ok {\n\t\tcurrentUser = types.UserContext{}\n\t}\n\n\tuserInfoInReq := &UserInfoInReq{\n\t\tUserId: currentUser.UserID,\n\t\tUsername: currentUser.Username,\n\t\tAvatar: currentUser.Avatar,\n\t\tDisplayName: currentUser.DisplayName,\n\t\tSystemRole: currentUser.SystemRole,\n\t}\n\tmappedSettings, getSettingsErr := getUsersNotificationSettings(recIds, userInfoInReq)\n\tif err != nil {\n\t\tlog.Error(\"[CheckNotifyEmailHandle.getUsersNotificationSettings] %s\", getSettingsErr.Error())\n\t\treturn c.Status(http.StatusInternalServerError).JSON(utils.Error(\"internal/notificationSettings\", \"Error happened while getting user notification setting!\"))\n\n\t}\n\n\tvar updateNotifyIds []uuid.UUID\n\tfor _, notification := range notificationList {\n\t\tkey := getSettingPath(notification.NotifyRecieverUserId, notificationSettingType, settingMappedFromNotify[notification.Type])\n\t\tif mappedSettings[key] == \"true\" {\n\t\t\tlog.Info(\"Sending notify email to %s\", notification.NotifyRecieverEmail)\n\n\t\t\tgo func(notify dto.Notification, c *fiber.Ctx) {\n\t\t\t\tbuf := bytebufferpool.Get()\n\t\t\t\tdefer bytebufferpool.Put(buf)\n\t\t\t\tnotify.Title = getNotificationTitleByType(notify.Type, notify.OwnerDisplayName)\n\t\t\t\temailData := fiber.Map{\n\n\t\t\t\t\t\"AppName\": *coreConfig.AppConfig.AppName,\n\t\t\t\t\t\"AppURL\": notifyConfig.NotificationConfig.WebURL,\n\t\t\t\t\t\"Title\": notify.Title,\n\t\t\t\t\t\"Avatar\": notify.OwnerAvatar,\n\t\t\t\t\t\"FullName\": notify.OwnerDisplayName,\n\t\t\t\t\t\"ViewLink\": combineURL(notifyConfig.NotificationConfig.WebURL, notify.URL),\n\t\t\t\t\t\"UnsubscribeLink\": combineURL(notifyConfig.NotificationConfig.WebURL, \"settings/notify\"),\n\t\t\t\t}\n\t\t\t\tc.App().Config().Views.Render(buf, \"notify_email\", emailData, c.App().Config().ViewsLayout)\n\t\t\t\terr := sendEmailNotification(notify, buf.String())\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Error(\"Send email notification - %s\", err.Error())\n\t\t\t\t}\n\t\t\t\tlog.Info(\"Notify email sent to %s\", notify.NotifyRecieverEmail)\n\t\t\t}(notification, c)\n\t\t}\n\n\t\tupdateNotifyIds = append(updateNotifyIds, notification.ObjectId)\n\t}\n\n\tif len(updateNotifyIds) > 0 {\n\t\terr = notificationService.UpdateEmailSent(updateNotifyIds)\n\t\tif err != nil {\n\t\t\tlog.Error(\"Update last notifications - %s\", err.Error())\n\t\t\tlog.Error(\"[CheckNotifyEmailHandle.UpdateEmailSent] %s\", getSettingsErr.Error())\n\t\t\treturn c.Status(http.StatusInternalServerError).JSON(utils.Error(\"internal/updateEmailSent\", \"Error happened while updating notification!\"))\n\t\t}\n\t}\n\n\treturn c.SendStatus(http.StatusOK)\n\n}", "title": "" }, { "docid": "edab5e22441cbb64547f72bae74ac6cc", "score": "0.49261367", "text": "func (c *Client) Send(jnl journal.Journal) {\n\tfor _, n := range c.notifiers {\n\t\tlog.Debug().Msgf(\"Sending %s notification...\", n.Name())\n\t\tif err := n.Send(jnl); err != nil {\n\t\t\tlog.Error().Err(err).Msgf(\"%s notification failed\", n.Name())\n\t\t}\n\t}\n}", "title": "" }, { "docid": "e9e90d3e5ae690d484e52b721c42f082", "score": "0.49239942", "text": "func (s Service) Notify(subject, body string) error {\n\treturn s.NotifyWithHeaders(subject, body, nil)\n}", "title": "" }, { "docid": "826e9e5a781c7abcd69391f234e78f3d", "score": "0.490274", "text": "func ShowNotification(header string, body string) {\n\terr := beeep.Notify(header, body, \"assets/information.png\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "title": "" }, { "docid": "62682d6b69a742a6771ee5ffdee3ce4b", "score": "0.48969695", "text": "func (c *CMPPlugin) Notify(m *global.EventMessage) error {\n\t/*request_com := global.Component{Id: m.ComponentId}\n\tcom, comerr := request_com.Get(m.ComponentId)\n\tif(comerr != nil) {\n\t\treturn comerr\n\t}\n\trequest_ci := global.CI{Id: com.Inputs.CIID}\n\tci, cierr := request_ci.Get(com.Inputs.CIID)\n\tif(cierr != nil) {\n\t\treturn cierr\n\t}\n\tif(ci.SCM == \"CMP\") {\n\t\tlog.Info(\"CMP is worked\")\n\t} else {\n\t\tlog.Info(\"CMP is skipped\")\n\t}*/\n\treturn nil\n}", "title": "" }, { "docid": "d8a307cbab4ed9fbcb24edaa9ca81b86", "score": "0.48917997", "text": "func Notify(topic string, message Message) {\n\tmessageJSON, err := json.Marshal(message)\n\tif err != nil {\n\t\tlogrus.Error(err)\n\t\treturn\n\t}\n\tif client != nil && client.IsConnected() && client.IsConnectionOpen() {\n\t\tclient.Publish(\"notification/\"+topic, 0, false, messageJSON)\n\t\treturn\n\t}\n\t_, ok := mapChannels[\"notification/\"+topic]\n\tif !ok {\n\t\tmapChannels[\"notification/\"+topic] = make(chan []byte, 100)\n\t}\n\tif client != nil {\n\t\tselect {\n\t\tcase mapChannels[\"notification/\"+topic] <- messageJSON:\n\t\tcase <-time.After(3 * time.Second):\n\t\t\t//client = nil //maybe disconnect\n\t\t\t//..it's blocked doing nothing\n\t\t}\n\t}\n\n}", "title": "" }, { "docid": "be9f0180e6ea1a465ae3239848949af0", "score": "0.4885842", "text": "func UpdateUserStock(userId string, stock string, amount float64) error {\n\tglog.Info(\"DB:\\tExecuting STOCK UPDATE for \", userId, \" stock: \", stock, \" amount: \", amount)\n\tstmt, err := DB.Prepare(\"UPDATE stock SET amount= amount + ? where user_id =? and symbol=?\")\n\n\tif err != nil {\n\t\tglog.Error(err, \" \", userId)\n\t\treturn errors.New(\"Cannot create an update stock query\")\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec(amount, userId, stock)\n\n\tif err != nil {\n\t\tglog.Error(err, \" \", userId)\n\t\treturn errors.New(\"Cannot execute an update stock query\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "04bba2fe1b86e7979fa2b3f5a9c9c810", "score": "0.48844627", "text": "func (kn *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error {\n\tstate := evalContext.Rule.State\n\n\tcustomData := triggMetrString\n\tfor _, evt := range evalContext.EvalMatches {\n\t\tcustomData = customData + fmt.Sprintf(\"%s: %v\\n\", evt.Metric, evt.Value)\n\t}\n\n\tkn.log.Info(\"Notifying Kafka\", \"alert_state\", state)\n\n\trecordJSON := simplejson.New()\n\trecords := make([]interface{}, 1)\n\n\tbodyJSON := simplejson.New()\n\t//get alert state in the kafka output issue #11401\n\tbodyJSON.Set(\"alert_state\", state)\n\tbodyJSON.Set(\"description\", evalContext.Rule.Name+\" - \"+evalContext.Rule.Message)\n\tbodyJSON.Set(\"client\", \"Grafana\")\n\tbodyJSON.Set(\"details\", customData)\n\tbodyJSON.Set(\"incident_key\", \"alertId-\"+strconv.FormatInt(evalContext.Rule.ID, 10))\n\n\truleURL, err := evalContext.GetRuleURL()\n\tif err != nil {\n\t\tkn.log.Error(\"Failed get rule link\", \"error\", err)\n\t\treturn err\n\t}\n\tbodyJSON.Set(\"client_url\", ruleURL)\n\n\tif evalContext.ImagePublicURL != \"\" {\n\t\tcontexts := make([]interface{}, 1)\n\t\timageJSON := simplejson.New()\n\t\timageJSON.Set(\"type\", \"image\")\n\t\timageJSON.Set(\"src\", evalContext.ImagePublicURL)\n\t\tcontexts[0] = imageJSON\n\t\tbodyJSON.Set(\"contexts\", contexts)\n\t}\n\n\tvalueJSON := simplejson.New()\n\tvalueJSON.Set(\"value\", bodyJSON)\n\trecords[0] = valueJSON\n\trecordJSON.Set(\"records\", records)\n\tbody, _ := recordJSON.MarshalJSON()\n\n\ttopicURL := kn.Endpoint + \"/topics/\" + kn.Topic\n\n\tcmd := &models.SendWebhookSync{\n\t\tUrl: topicURL,\n\t\tBody: string(body),\n\t\tHttpMethod: \"POST\",\n\t\tHttpHeader: map[string]string{\n\t\t\t\"Content-Type\": \"application/vnd.kafka.json.v2+json\",\n\t\t\t\"Accept\": \"application/vnd.kafka.v2+json\",\n\t\t},\n\t}\n\n\tif err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {\n\t\tkn.log.Error(\"Failed to send notification to Kafka\", \"error\", err, \"body\", string(body))\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "653865efc98ee0577f4508af43343a00", "score": "0.48844516", "text": "func broadcast(meta *sheetWSMetaEntry, rawNtf []byte) {\n\tmeta.userMap.Range(func(k interface{}, v interface{}) bool {\n\t\tuserMeta := v.(*sheetWSUserMetaEntry)\n\t\tuserMeta.notifyChan <- rawNtf\n\t\treturn true\n\t})\n}", "title": "" }, { "docid": "a7483020b73a3aac11f26782f55aad6b", "score": "0.48809054", "text": "func notifySuccess(msg string) {\n\tbeeep.Notify(\"smartlight - Success\", \"smartlight service \"+msg, iconSuccessPath)\n}", "title": "" }, { "docid": "159778a324ac2f4ead6f0f96445c1f3b", "score": "0.48708707", "text": "func (s *SlackNotifier) Notify(routingKey string, msg ...string) error {\n\tlog.Println(\"Starting slack notify\")\n\tif s.ClientToken == \"\" {\n\t\treturn errors.New(\"No client token set\")\n\t}\n\tlog.Println(\"Building slack attachment\")\n\tattachments := slack.Attachment{\n\t\tPretext: \"Important Notification\",\n\t\tText: strings.Join(msg, \"\\n\"),\n\t}\n\tlog.Println(\"Configuring slack sending parameters\")\n\tparams := slack.PostMessageParameters{\n\t\tAttachments: []slack.Attachment{attachments},\n\t}\n\tlog.Println(\"The number of channel being messaged are:\", len(s.Channels))\n\tfor _, channel := range s.Channels {\n\t\t_, _, err := s.Client.PostMessage(channel, \"Notification from application\", params)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlog.Println(\"Notified slack channel:\", channel)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f6a28dfe477590bade2f867d07bf5ce6", "score": "0.48664358", "text": "func (s *NodeServer) Notify(ctx context.Context, req *rpcpb.NotifyRequest) (*rpcpb.NotifyResponse, error) {\n\tresp := &rpcpb.NotifyResponse{}\n\n\tif err := s.validate(ctx, req.Data, req.Signature, req.NetworkID); err != nil {\n\t\treturn resp, status.Errorf(codes.InvalidArgument, \"input validation failed: %v\", err)\n\t}\n\n\tswitch req.MsgType {\n\tcase rpcpb.NotifyMsgType_TX:\n\t\ttx, err := ultpb.DecodeTx(req.Data)\n\t\tif err != nil {\n\t\t\treturn resp, status.Error(codes.InvalidArgument, \"decode tx failed\")\n\t\t}\n\t\t// Get the txKey.\n\t\ttxKey, err := ultpb.GetTxKey(tx)\n\t\tif err != nil {\n\t\t\treturn resp, status.Errorf(codes.Internal, \"get tx key failed: %v\", err)\n\t\t}\n\t\ttxf := &future.Tx{Tx: tx, TxKey: txKey}\n\t\ttxf.Init()\n\t\ts.txFuture <- txf\n\t\tif err := txf.Error(); err != nil {\n\t\t\treturn resp, status.Errorf(codes.Internal, \"add tx failed: %v\", err)\n\t\t}\n\tcase rpcpb.NotifyMsgType_STATEMENT:\n\t\tstmt, err := ultpb.DecodeStatement(req.Data)\n\t\tif err != nil {\n\t\t\treturn resp, status.Error(codes.InvalidArgument, \"decode statement failed\")\n\t\t}\n\t\tsf := &future.Statement{Stmt: stmt}\n\t\tsf.Init()\n\t\ts.stmtFuture <- sf\n\t\tif err := sf.Error(); err != nil {\n\t\t\treturn resp, status.Errorf(codes.Internal, \"add statement failed: %v\", err)\n\t\t}\n\t}\n\n\treturn resp, nil\n}", "title": "" }, { "docid": "323442ccb8412bc2f32526fe92fbc071", "score": "0.4857169", "text": "func notifyPlayer(msg string, receiver net.Conn) {\n\treceiver.Write([]byte(msg))\n}", "title": "" }, { "docid": "76e760c5ba577f244a94acca6a0f4079", "score": "0.48554364", "text": "func (module *EmailNotifier) Notify(status *protocol.ConsumerGroupStatus, eventID string, startTime time.Time, stateGood bool) {\n\tlogger := module.Log.With(\n\t\tzap.String(\"cluster\", status.Cluster),\n\t\tzap.String(\"group\", status.Group),\n\t\tzap.String(\"id\", eventID),\n\t\tzap.String(\"status\", status.Status.String()),\n\t)\n\n\tvar tmpl *template.Template\n\tif stateGood {\n\t\ttmpl = module.templateClose\n\t} else {\n\t\ttmpl = module.templateOpen\n\t}\n\n\t// Put the from and to lines in without the template. Template should set the subject line, followed by a blank line\n\tbytesToSend := bytes.NewBufferString(\"From: \" + module.from + \"\\nTo: \" + module.to + \"\\n\")\n\tmessageBody, err := executeTemplate(tmpl, module.extras, status, eventID, startTime)\n\tif err != nil {\n\t\tlogger.Error(\"failed to assemble\", zap.Error(err))\n\t\treturn\n\t}\n\tbytesToSend.Write(messageBody.Bytes())\n\n\terr = module.sendMailFunc(module.serverWithPort, module.auth, module.from, []string{module.to}, bytesToSend.Bytes())\n\tif err != nil {\n\t\tlogger.Error(\"failed to send\", zap.Error(err))\n\t}\n}", "title": "" }, { "docid": "66055767669b0c216c9d3c8657520911", "score": "0.48546222", "text": "func notification(w http.ResponseWriter, r *http.Request) {\n\treqPayload := &coreapi.ChargeReqWithMap{}\n\terr := json.NewDecoder(r.Body).Decode(reqPayload)\n\tif err != nil {\n\t\t// do something\n\t\treturn\n\t}\n\n\tencode, _ := json.Marshal(reqPayload)\n\tresArray := make(map[string]string)\n\terr = json.Unmarshal(encode, &resArray)\n\n\tresp, e := c.CheckTransaction(resArray[\"order_id\"])\n\tif e != nil {\n\t\thttp.Error(w, e.GetMessage(), http.StatusInternalServerError)\n\t\treturn\n\t} else {\n\t\tif resp != nil {\n\t\t\tif resp.TransactionStatus == \"capture\" {\n\t\t\t\tif resp.FraudStatus == \"challenge\" {\n\t\t\t\t\t// TODO set transaction status on your database to 'challenge' e.g: 'Payment status challenged. Please take action on your Merchant Administration Portal\n\t\t\t\t} else if resp.FraudStatus == \"accept\" {\n\t\t\t\t\t// TODO set transaction status on your database to 'success'\n\t\t\t\t}\n\t\t\t} else if resp.TransactionStatus == \"settlement\" {\n\t\t\t\t// TODO set transaction status on your databaase to 'success'\n\t\t\t} else if resp.TransactionStatus == \"deny\"{\n\t\t\t\t// TODO you can ignore 'deny', because most of the time it allows payment retries\n\t\t\t\t// and later can become success\n\t\t\t} else if resp.TransactionStatus == \"cancel\" || resp.TransactionStatus == \"expire\" {\n\t\t\t\t// TODO set transaction status on your databaase to 'failure'\n\t\t\t} else if resp.TransactionStatus == \"pending\" {\n\t\t\t\t// TODO set transaction status on your databaase to 'pending' / waiting payment\n\t\t\t}\n\t\t}\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write([]byte(\"ok\"))\n}", "title": "" }, { "docid": "d0e016a57a1827fca37485200ff4970b", "score": "0.48404914", "text": "func (u *User) Notification(key data.UUID) (*Notification, error) {\n\tn := &Notification{}\n\terr := data.NotificationGet(n, key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif n.Username != u.Username {\n\t\treturn nil, data.ErrNotFound\n\t}\n\treturn n, nil\n}", "title": "" }, { "docid": "4fa5a067db9519940a39b9ddccaad06a", "score": "0.48291355", "text": "func (n *Notifier) Notify(id ID, data interface{}) error {\n\tenc, err := json.Marshal(data)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tn.mu.Lock()\n\tdefer n.mu.Unlock()\n\n\tif n.sub == nil {\n\t\tpanic(\"can't Notify before subscription is created\")\n\t} else if n.sub.ID != id {\n\t\tpanic(\"Notify with wrong ID\")\n\t}\n\tif n.activated {\n\t\treturn n.send(n.sub, enc)\n\t}\n\tn.buffer = append(n.buffer, enc)\n\treturn nil\n}", "title": "" }, { "docid": "ca1f5de966251faa9fbbb133ff27df1e", "score": "0.4828597", "text": "func (n *httpNotifier) Notify(result notification.HookResult) error {\n\tpayload := result.Logs(n.PrefixFilter)\n\tif strings.TrimSpace(payload) == \"\" {\n\t\t// Nothing to notify, abort\n\t\treturn nil\n\t}\n\n\tnotif := &notifPayload{\n\t\tID: strconv.FormatUint(result.ID(), 10),\n\t\tName: result.Name(),\n\t\tText: payload,\n\t\tError: result.Err(),\n\t}\n\tnotifJSON, err := json.Marshal(notif)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(\"POST\", n.URL.String(), bytes.NewBuffer(notifJSON))\n\tif err != nil {\n\t\treturn err\n\t}\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tresp.Body.Close()\n\tlogger.Info.Printf(\"job %s#%d notification sent to %s\\n\", result.Name(), result.ID(), n.URL.String())\n\treturn nil\n}", "title": "" }, { "docid": "42e95f1e9fb622d34be06e02d7f5361d", "score": "0.48269248", "text": "func SendNotify(configuration *Settings, domain, currentIP string) error {\n\terr := SendTelegramNotify(configuration, domain, currentIP)\n\tif err != nil {\n\t\tlog.Println(\"Send telegram notification with error:\", err.Error())\n\t}\n\terr = SendMailNotify(configuration, domain, currentIP)\n\tif err != nil {\n\t\tlog.Println(\"Send email notification with error:\", err.Error())\n\t}\n\terr = SendSlackNotify(configuration, domain, currentIP)\n\tif err != nil {\n\t\tlog.Println(\"Send slack notification with error:\", err.Error())\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d0e24d514cd124c2d3a76c9ce9679912", "score": "0.48216513", "text": "func (*eventCenter) Notify(eventType types.EventType, value interface{}) (err error) {\n\treturn nil\n}", "title": "" }, { "docid": "77ac04622e13759f06c497071d9806eb", "score": "0.4818299", "text": "func (i *Item) UpdateStock(quantity int) {\n\ti.quantity = quantity\n\tif i.quantity > 0 {\n\t\tfmt.Printf(\"%s in stock!!! Notifying Subscribed Customers...\\n\", i.name)\n\t\ti.NotifyAll()\n\t}\n}", "title": "" } ]
868b12bead74af5094c624e259bb6c8b
synchronise tries to sync up our local chain with a remote peer.
[ { "docid": "ee23fdb98b39f532cbb3184746301023", "score": "0.6194514", "text": "func (h *clientHandler) synchronise(peer *peer) {\n\t// Short circuit if the peer is nil.\n\tif peer == nil {\n\t\treturn\n\t}\n\t// Make sure the peer's TD is higher than our own.\n\tlatest := h.backend.blockchain.CurrentHeader()\n\tcurrentTd := rawdb.ReadTd(h.backend.chainDb, latest.Hash(), latest.Number.Uint64())\n\tif currentTd != nil && peer.headBlockInfo().Td.Cmp(currentTd) < 0 {\n\t\treturn\n\t}\n\t// Recap the checkpoint.\n\t//\n\t// The light client may be connected to several different versions of the server.\n\t// (1) Old version server which can not provide stable checkpoint in the handshake packet.\n\t// => Use hardcoded checkpoint or empty checkpoint\n\t// (2) New version server but simple checkpoint syncing is not enabled(e.g. mainnet, new testnet or private network)\n\t// => Use hardcoded checkpoint or empty checkpoint\n\t// (3) New version server but the provided stable checkpoint is even lower than the hardcoded one.\n\t// => Use hardcoded checkpoint\n\t// (4) New version server with valid and higher stable checkpoint\n\t// => Use provided checkpoint\n\tvar checkpoint = &peer.checkpoint\n\tvar hardcoded bool\n\tif h.checkpoint != nil && h.checkpoint.SectionIndex >= peer.checkpoint.SectionIndex {\n\t\tcheckpoint = h.checkpoint // Use the hardcoded one.\n\t\thardcoded = true\n\t}\n\t// Determine whether we should run checkpoint syncing or normal light syncing.\n\t//\n\t// Here has four situations that we will disable the checkpoint syncing:\n\t//\n\t// 1. The checkpoint is empty\n\t// 2. The latest head block of the local chain is above the checkpoint.\n\t// 3. The checkpoint is hardcoded(recap with local hardcoded checkpoint)\n\t// 4. For some networks the checkpoint syncing is not activated.\n\tmode := checkpointSync\n\tswitch {\n\tcase checkpoint.Empty():\n\t\tmode = lightSync\n\t\tlog.Debug(\"Disable checkpoint syncing\", \"reason\", \"empty checkpoint\")\n\tcase latest.Number.Uint64() >= (checkpoint.SectionIndex+1)*h.backend.iConfig.ChtSize-1:\n\t\tmode = lightSync\n\t\tlog.Debug(\"Disable checkpoint syncing\", \"reason\", \"local chain beyond the checkpoint\")\n\tcase hardcoded:\n\t\tmode = legacyCheckpointSync\n\t\tlog.Debug(\"Disable checkpoint syncing\", \"reason\", \"checkpoint is hardcoded\")\n\tcase h.backend.oracle == nil || !h.backend.oracle.isRunning():\n\t\tif h.checkpoint == nil {\n\t\t\tmode = lightSync // Downgrade to light sync unfortunately.\n\t\t} else {\n\t\t\tcheckpoint = h.checkpoint\n\t\t\tmode = legacyCheckpointSync\n\t\t}\n\t\tlog.Debug(\"Disable checkpoint syncing\", \"reason\", \"checkpoint syncing is not activated\")\n\t}\n\t// Notify testing framework if syncing has completed(for testing purpose).\n\tdefer func() {\n\t\tif h.syncDone != nil {\n\t\t\th.syncDone()\n\t\t}\n\t}()\n\tstart := time.Now()\n\tif mode == checkpointSync || mode == legacyCheckpointSync {\n\t\t// Validate the advertised checkpoint\n\t\tif mode == checkpointSync {\n\t\t\tif err := h.validateCheckpoint(peer); err != nil {\n\t\t\t\tlog.Debug(\"Failed to validate checkpoint\", \"reason\", err)\n\t\t\t\th.removePeer(peer.id)\n\t\t\t\treturn\n\t\t\t}\n\t\t\th.backend.blockchain.AddTrustedCheckpoint(checkpoint)\n\t\t}\n\t\tlog.Debug(\"Checkpoint syncing start\", \"peer\", peer.id, \"checkpoint\", checkpoint.SectionIndex)\n\n\t\t// Fetch the start point block header.\n\t\t//\n\t\t// For the ethash consensus engine, the start header is the block header\n\t\t// of the checkpoint.\n\t\t//\n\t\t// For the clique consensus engine, the start header is the block header\n\t\t// of the latest epoch covered by checkpoint.\n\t\tctx, cancel := context.WithTimeout(context.Background(), time.Second*5)\n\t\tdefer cancel()\n\t\tif !checkpoint.Empty() && !h.backend.blockchain.SyncCheckpoint(ctx, checkpoint) {\n\t\t\tlog.Debug(\"Sync checkpoint failed\")\n\t\t\th.removePeer(peer.id)\n\t\t\treturn\n\t\t}\n\t}\n\t// Fetch the remaining block headers based on the current chain header.\n\tif err := h.downloader.Synchronise(peer.id, peer.Head(), peer.Td(), downloader.LightSync); err != nil {\n\t\tlog.Debug(\"Synchronise failed\", \"reason\", err)\n\t\treturn\n\t}\n\tlog.Debug(\"Synchronise finished\", \"elapsed\", common.PrettyDuration(time.Since(start)))\n}", "title": "" } ]
[ { "docid": "63e15ed43648b10d27dd971841d9a332", "score": "0.66165704", "text": "func (gsp *Gossiper) synchronizeWithPeer(same bool, toAsk []vector.PeerStatus, toSend []vector.PeerStatus, peerAddr string) {\n\tif same {\n\t\tfmt.Printf(\"IN SYNC WITH %s \\n\", peerAddr)\n\t\treturn\n\t}\n\tif len(toSend) > 0 {\n\t\t// we have new messages to send to the peer : start mongering\n\t\t//get the rumor we need to send from storage\n\t\trumorMsg := gsp.RumorStorage.Get(toSend[0].Identifier, toSend[0].NextID)\n\t\tif rumorMsg != nil {\n\t\t\tgsp.rumormonger(rumorMsg, peerAddr)\n\t\t}\n\t} else if len(toAsk) > 0 {\n\t\t// send status for triggering peer mongering\n\t\tgsp.sendStatusPacket(peerAddr)\n\t}\n}", "title": "" }, { "docid": "cc8898604ff42a1f537546db773ef988", "score": "0.62640876", "text": "func (pm *PeerManager) syncPeersToTunnelServer(peers []string) {\n\tif len(peers) == 0 {\n\t\tblog.Errorf(\"PeerManager discovery self peer failed, wait next event to recovery\")\n\t\treturn\n\t}\n\n\tpm.Lock()\n\tdefer pm.Unlock()\n\n\tnewSet := map[string]bool{}\n\tready := false\n\tfor _, peer := range peers {\n\t\tif peer == pm.options.PeerID {\n\t\t\tready = true\n\t\t} else {\n\t\t\tnewSet[peer] = true\n\t\t}\n\t}\n\n\tnewPeers, outDatedPeers, _ := diff(newSet, pm.peers)\n\t// add new peers\n\tfor _, peer := range newPeers {\n\t\tblog.Infof(\"PeerManager add new peer %s\", peer)\n\t\tpm.options.Tunnel.AddPeer(\n\t\t\tpm.protocol+peer+pm.options.PeerConnectURL,\n\t\t\tpeer, pm.options.PeerToken, pm.options.PeerTLS)\n\t}\n\t// remove deleted peers\n\tfor _, peer := range outDatedPeers {\n\t\tblog.Infof(\"PeerManager clean outdated peer %s\", peer)\n\t\tpm.options.Tunnel.RemovePeer(peer)\n\t}\n\n\tpm.peers = newSet\n\tpm.ready = ready\n}", "title": "" }, { "docid": "5bea160fbac2399a31db3f97c2004173", "score": "0.6000294", "text": "func (pm *ProtocolManager) syncer() {\n\t// Start and ensure cleanup of sync mechanisms\n\tfor {\n\t\tselect {\n\t\tcase <-pm.newPeerCh:\n\t\tcase <-pm.noMorePeers:\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "e074235486b075ba03997b39c24187c2", "score": "0.5989023", "text": "func (sm *SyncManager) startSync() {\n\t// Return now if we're already syncing.\n\tif sm.syncPeer != nil {\n\t\treturn\n\t}\n\n\tbest := chain.GetInstance().Tip()\n\tvar bestPeer *peer.Peer\n\tfor peer, state := range sm.peerStates {\n\t\tif !state.syncCandidate {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Remove sync candidate peers that are no longer candidates due\n\t\t// to passing their latest known block. NOTE: The < is\n\t\t// intentional as opposed to <=. While technically the peer\n\t\t// doesn't have a later block when it's equal, it will likely\n\t\t// have one soon so it is a reasonable choice. It also allows\n\t\t// the case where both are at 0 such as during regression test.\n\t\tif peer.LastBlock() < best.Height {\n\t\t\tcontinue\n\t\t}\n\n\t\t// TODO(davec): Use a better algorithm to choose the best peer.\n\t\t// For now, just pick the first available candidate.\n\t\tbestPeer = peer\n\t}\n\n\t// Start syncing from the best peer if one was selected.\n\tif bestPeer != nil {\n\t\tactiveChain := chain.GetInstance()\n\t\tpindexStart := activeChain.GetIndexBestHeader()\n\t\t/**\n\t\t * If possible, start at the block preceding the currently best\n\t\t * known header. This ensures that we always get a non-empty list of\n\t\t * headers back as long as the peer is up-to-date. With a non-empty\n\t\t * response, we can initialise the peer's known best block. This\n\t\t * wouldn't be possible if we requested starting at pindexBestHeader\n\t\t * and got back an empty response.\n\t\t */\n\t\tif pindexStart.Prev != nil {\n\t\t\tpindexStart = pindexStart.Prev\n\t\t}\n\t\tlocator := activeChain.GetLocator(pindexStart)\n\t\tlog.Info(\"Syncing to block height %d from peer %v\",\n\t\t\tbestPeer.LastBlock(), bestPeer.Addr())\n\n\t\tbestPeer.PushGetHeadersMsg(*locator, &zeroHash)\n\n\t\tsm.syncPeer = bestPeer\n\t\tstate := sm.peerStates[sm.syncPeer]\n\t\tstate.onStartSync(sm.syncPeer)\n\n\t\tif sm.current() {\n\t\t\tlog.Debug(\"request mempool in startSync\")\n\t\t\tbestPeer.RequestMemPool()\n\t\t}\n\t} else {\n\t\tlog.Warn(\"No sync peer candidates available\")\n\t}\n}", "title": "" }, { "docid": "dc6910712bec87e4305291b69301d155", "score": "0.590688", "text": "func syncto(host string, username string, dirtree *Watcher, state map[string]File, filters, deleted_filters []Sfile, persist persistfunc) map[string]File {\n\tDPrintf(\"syncto : try and connect\")\n\tconn, err := net.Dial(\"tcp\", host)\n\tif !check(err, true) {\n\t\treturn state\n\t}\n\tdefer conn.Close()\n\t//DPrintf(\"syncto : poll dirtree, filters are %v\", filters)\n\tDPrintf(\"syncto : poll dirtree\")\n\tmodified, deleted := dirtree.Poll(filters, deleted_filters)\n\t//DPrintf(\"deleted : %v\", deleted)\n\tDPrintf(\"syncto : calculate deltas\")\n\tversions := delta(modified, deleted, state)\n\t//DPrintf(\"syncto : send version vectors, %v\", versions)\n\n\t//chunks := makechunks(modified)\n\t//DPrintf(\"time-vectors: %v\", sync_version)\n\twants := send_versions(conn, versions)\n\tcount := 0\n\tfor k, v := range wants {\n\t\tif v.Send {\n\t\t\t//send and sync\n\t\t\tDPrintf(\"sending file %v -> %v%%\", k, float32(count)/float32(len(wants)))\n\t\t\t//if send_file_chunks(conn, k, FastRollhash(k)) {\n\t\t\tif send_file_chunks(conn, k, RollhashSha(k)) {\n\t\t\t\t//if send_file_chunks(conn, k, v.Chunks) {\n\t\t\t\t//update the file's synchronization vector on success\n\t\t\t\tfile := versions[k]\n\t\t\t\tversions[k] = file.SyncModify().BackSync(username)\n\t\t\t} else {\n\t\t\t\tDPrintf(\"the file did not send\")\n\t\t\t\tbreak\n\t\t\t}\n\t\t} else if v.Sync {\n\t\t\tDPrintf(\"just sync %v\", k)\n\t\t\t//just sync but dont send\n\t\t\tfile := versions[k]\n\t\t\tversions[k] = file.SyncModify().BackSync(username)\n\t\t}\n\t\tcount += 1\n\t}\n\tDPrintf(\"syncto : done sending files\")\n\treturn versions\n}", "title": "" }, { "docid": "863be3aacdc27f4b575258425bb650f5", "score": "0.58496535", "text": "func (pps *PPNetServer) syncPeerAddr() {\n\tchanged := false\n\tfor i := 0; i < len(pps.recentPeers); i++ {\n\t\tp := pps.network.GetPeerFromAddr(pps.recentPeers[i])\n\t\tif p == nil || (p != nil && p.GetSyncState() != common.ESTABLISH) {\n\t\t\tpps.recentPeers = append(pps.recentPeers[:i], pps.recentPeers[i+1:]...)\n\t\t\tchanged = true\n\t\t\ti--\n\t\t}\n\t}\n\tleft := common.RECENT_LIMIT - len(pps.recentPeers)\n\tif left > 0 {\n\t\tnp := pps.network.GetNp()\n\t\tnp.Lock()\n\t\tvar ip net.IP\n\t\tfor _, p := range np.List {\n\t\t\taddr, _ := p.GetAddr16()\n\t\t\tip = addr[:]\n\t\t\tnodeAddr := ip.To16().String() + \":\" +\n\t\t\t\tstrconv.Itoa(int(p.GetSyncPort()))\n\t\t\tfound := false\n\t\t\tfor i := 0; i < len(pps.recentPeers); i++ {\n\t\t\t\tif nodeAddr == pps.recentPeers[i] {\n\t\t\t\t\tfound = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !found {\n\t\t\t\tpps.recentPeers = append(pps.recentPeers, nodeAddr)\n\t\t\t\tleft--\n\t\t\t\tchanged = true\n\t\t\t\tif left == 0 {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tnp.Unlock()\n\t}\n\tif changed {\n\t\tbuf, err := json.Marshal(pps.recentPeers)\n\t\tif err != nil {\n\t\t\t//log.Error(\"package recent peer fail: \", err)\n\t\t\treturn\n\t\t}\n\t\terr = ioutil.WriteFile(common.RECENT_FILE_NAME, buf, os.ModePerm)\n\t\tif err != nil {\n\t\t\t//log.Error(\"write recent peer fail: \", err)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "5dcc73adb3465196b3f6d2ba540d8437", "score": "0.5845922", "text": "func TestSynchronize(t *testing.T) {\n\tif testing.Short() {\n\t\tt.SkipNow()\n\t}\n\tcst1, err := createConsensusSetTester(t.Name() + \"1\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer cst1.Close()\n\tcst2, err := createConsensusSetTester(t.Name() + \"2\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer cst2.Close()\n\n\t// mine on cst2 until it is above cst1\n\tfor cst1.cs.dbBlockHeight() >= cst2.cs.dbBlockHeight() {\n\t\tb, _ := cst2.miner.FindBlock()\n\t\terr = cst2.cs.AcceptBlock(b)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\n\t// connect gateways, triggering a Synchronize\n\terr = cst1.gateway.Connect(cst2.gateway.Address())\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// blockchains should now match\n\tfor i := 0; i < 50; i++ {\n\t\tif cst1.cs.dbCurrentBlockID() != cst2.cs.dbCurrentBlockID() {\n\t\t\ttime.Sleep(250 * time.Millisecond)\n\t\t}\n\t}\n\tif cst1.cs.dbCurrentBlockID() != cst2.cs.dbCurrentBlockID() {\n\t\tt.Fatal(\"Synchronize failed\")\n\t}\n\n\t// Mine on cst2 until it is more than 'MaxCatchUpBlocks' ahead of cst1.\n\t// NOTE: we have to disconnect prior to this, otherwise cst2 will relay\n\t// blocks to cst1.\n\tcst1.gateway.Disconnect(cst2.gateway.Address())\n\tcst2.gateway.Disconnect(cst1.gateway.Address())\n\tfor cst2.cs.dbBlockHeight() < cst1.cs.dbBlockHeight()+3+MaxCatchUpBlocks {\n\t\t_, err := cst2.miner.AddBlock()\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\t// reconnect\n\terr = cst1.gateway.Connect(cst2.gateway.Address())\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// block heights should now match\n\tfor i := 0; i < 50; i++ {\n\t\tif cst1.cs.dbBlockHeight() != cst2.cs.dbBlockHeight() {\n\t\t\ttime.Sleep(250 * time.Millisecond)\n\t\t}\n\t}\n\tif cst1.cs.dbBlockHeight() != cst2.cs.dbBlockHeight() {\n\t\tt.Fatal(\"synchronize failed\")\n\t}\n\n\t// extend cst2 with a \"bad\" (old) block, and synchronize. cst1 should\n\t// reject the bad block.\n\tcst2.cs.mu.Lock()\n\tid, err := cst2.cs.dbGetPath(0)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tcst2.cs.dbPushPath(id)\n\tcst2.cs.mu.Unlock()\n\n\t// Sleep for a few seconds to allow the network call between the two time\n\t// to occur.\n\ttime.Sleep(5 * time.Second)\n\tif cst1.cs.dbBlockHeight() == cst2.cs.dbBlockHeight() {\n\t\tt.Fatal(\"cst1 did not reject bad block\")\n\t}\n}", "title": "" }, { "docid": "488d6eb00b487ae3ae86b5dbd6af4e41", "score": "0.5819755", "text": "func TestIntegrationRelaySynchronize(t *testing.T) {\n\tif testing.Short() {\n\t\tt.SkipNow()\n\t}\n\tcst1, err := blankConsensusSetTester(t.Name()+\"1\", modules.ProdDependencies)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer cst1.Close()\n\tcst2, err := blankConsensusSetTester(t.Name()+\"2\", modules.ProdDependencies)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer cst2.Close()\n\tcst3, err := blankConsensusSetTester(t.Name()+\"3\", modules.ProdDependencies)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer cst3.Close()\n\n\t// Connect them like so: cst1 <-> cst2 <-> cst3\n\terr = cst1.gateway.Connect(cst2.gateway.Address())\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\terr = cst2.gateway.Connect(cst3.gateway.Address())\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Make sure cst1 is not connected to cst3.\n\tcst1.gateway.Disconnect(cst3.gateway.Address())\n\tcst3.gateway.Disconnect(cst1.gateway.Address())\n\n\t// Spin until the connection calls have completed.\n\tfor i := 0; i < 100; i++ {\n\t\ttime.Sleep(150 * time.Millisecond)\n\t\tif len(cst1.gateway.Peers()) >= 1 && len(cst3.gateway.Peers()) >= 1 {\n\t\t\tbreak\n\t\t}\n\t}\n\tif len(cst1.gateway.Peers()) < 1 || len(cst3.gateway.Peers()) < 1 {\n\t\tt.Fatal(\"Peer connection has failed.\")\n\t}\n\n\t// Mine a block on cst1, expecting the block to propagate from cst1 to\n\t// cst2, and then to cst3.\n\tb1, err := cst1.miner.AddBlock()\n\tif err != nil {\n\t\tt.Log(b1.ID())\n\t\tt.Log(cst1.cs.CurrentBlock().ID())\n\t\tt.Log(cst2.cs.CurrentBlock().ID())\n\t\tt.Fatal(err)\n\t}\n\n\t// Spin until the block has propagated to cst2.\n\tfor i := 0; i < 100; i++ {\n\t\ttime.Sleep(150 * time.Millisecond)\n\t\tif cst2.cs.CurrentBlock().ID() == b1.ID() {\n\t\t\tbreak\n\t\t}\n\t}\n\tif cst2.cs.CurrentBlock().ID() != b1.ID() {\n\t\tt.Fatal(\"Block propagation has failed\")\n\t}\n\t// Spin until the block has propagated to cst3.\n\tfor i := 0; i < 100; i++ {\n\t\ttime.Sleep(150 * time.Millisecond)\n\t\tif cst3.cs.CurrentBlock().ID() == b1.ID() {\n\t\t\tbreak\n\t\t}\n\t}\n\tif cst3.cs.CurrentBlock().ID() != b1.ID() {\n\t\tt.Fatal(\"Block propagation has failed\")\n\t}\n\n\t// Mine a block on cst2.\n\tb2, err := cst2.miner.AddBlock()\n\tif err != nil {\n\t\tt.Log(b1.ID())\n\t\tt.Log(b2.ID())\n\t\tt.Log(cst2.cs.CurrentBlock().ID())\n\t\tt.Log(cst3.cs.CurrentBlock().ID())\n\t\tt.Fatal(err)\n\t}\n\t// Spin until the block has propagated to cst1.\n\tfor i := 0; i < 100; i++ {\n\t\ttime.Sleep(150 * time.Millisecond)\n\t\tif cst1.cs.CurrentBlock().ID() == b2.ID() {\n\t\t\tbreak\n\t\t}\n\t}\n\tif cst1.cs.CurrentBlock().ID() != b2.ID() {\n\t\tt.Fatal(\"block propagation has failed\")\n\t}\n\t// Spin until the block has propagated to cst3.\n\tfor i := 0; i < 100; i++ {\n\t\ttime.Sleep(150 * time.Millisecond)\n\t\tif cst3.cs.CurrentBlock().ID() == b2.ID() {\n\t\t\tbreak\n\t\t}\n\t}\n\tif cst3.cs.CurrentBlock().ID() != b2.ID() {\n\t\tt.Fatal(\"block propagation has failed\")\n\t}\n\n\t// Mine a block on cst3.\n\tb3, err := cst3.miner.AddBlock()\n\tif err != nil {\n\t\tt.Log(b1.ID())\n\t\tt.Log(b2.ID())\n\t\tt.Log(b3.ID())\n\t\tt.Log(cst1.cs.CurrentBlock().ID())\n\t\tt.Log(cst2.cs.CurrentBlock().ID())\n\t\tt.Log(cst3.cs.CurrentBlock().ID())\n\t\tt.Fatal(err)\n\t}\n\t// Spin until the block has propagated to cst1.\n\tfor i := 0; i < 100; i++ {\n\t\ttime.Sleep(150 * time.Millisecond)\n\t\tif cst1.cs.CurrentBlock().ID() == b3.ID() {\n\t\t\tbreak\n\t\t}\n\t}\n\tif cst1.cs.CurrentBlock().ID() != b3.ID() {\n\t\tt.Fatal(\"block propagation has failed\")\n\t}\n\t// Spin until the block has propagated to cst2.\n\tfor i := 0; i < 100; i++ {\n\t\ttime.Sleep(150 * time.Millisecond)\n\t\tif cst2.cs.CurrentBlock().ID() == b3.ID() {\n\t\t\tbreak\n\t\t}\n\t}\n\tif cst2.cs.CurrentBlock().ID() != b3.ID() {\n\t\tt.Fatal(\"block propagation has failed\")\n\t}\n\n\t// Check that cst1 and cst3 are not peers, if they are peers then this test\n\t// is invalid because it has failed to be certain that blocks can make\n\t// multiple hops.\n\tif len(cst1.gateway.Peers()) != 1 || cst1.gateway.Peers()[0].NetAddress == cst3.gateway.Address() {\n\t\tt.Log(\"Test is invalid, cst1 and cst3 have connected to each other\")\n\t}\n\tif len(cst3.gateway.Peers()) != 1 || cst3.gateway.Peers()[0].NetAddress == cst1.gateway.Address() {\n\t\tt.Log(\"Test is invalid, cst3 and cst1 have connected to each other\")\n\t}\n}", "title": "" }, { "docid": "f31c7a19384c56a49eb6e97a26021b58", "score": "0.5810985", "text": "func (s *Sync) syncFromConn() error {\n\tfor {\n\t\tselect {\n\t\tcase _, _ = <-s.stop:\n\t\t\treturn nil\n\t\tdefault:\n\t\t\tif s.curPeer == nil {\n\t\t\t\treturn errors.New(\"no current peer\")\n\t\t\t}\n\t\t\tlocalHeight := s.chain.LastHeight()\n\n\t\t\t// Get the block of the remote node from the next block height,\n\t\t\t// If the error is that the peer has stopped, delete the peer.\n\t\t\t// If the storage fails locally, the remote block verification\n\t\t\t// is performed, the verification proves that the local block\n\t\t\t// is wrong, and the local chain is rolled back to the valid block.\n\n\t\t\tblocks, err := s.request.GetBlocks(s.curPeer.Conn, localHeight+1, s.curPeer.Speed)\n\t\t\tif err != nil {\n\t\t\t\tif err == request.Err_PeerClosed {\n\t\t\t\t\ts.reducePeerSpeed()\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := s.insert(blocks); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8eba803ee4032f5760e3c770cc13d029", "score": "0.57929015", "text": "func (b *blockManager) startSync(peers *list.List) {\n\t// Return now if we're already syncing.\n\tif b.syncPeer != nil {\n\t\treturn\n\t}\n\n\t_, bestHeight, err := b.cfg.BlockHeaders.ChainTip()\n\tif err != nil {\n\t\tlog.Errorf(\"Failed to get hash and height for the \"+\n\t\t\t\"latest block: %s\", err)\n\t\treturn\n\t}\n\n\tvar bestPeer *ServerPeer\n\tvar enext *list.Element\n\tfor e := peers.Front(); e != nil; e = enext {\n\t\tenext = e.Next()\n\t\tsp := e.Value.(*ServerPeer)\n\n\t\t// Remove sync candidate peers that are no longer candidates\n\t\t// due to passing their latest known block.\n\t\t//\n\t\t// NOTE: The < is intentional as opposed to <=. While\n\t\t// techcnically the peer doesn't have a later block when it's\n\t\t// equal, it will likely have one soon so it is a reasonable\n\t\t// choice. It also allows the case where both are at 0 such as\n\t\t// during regression test.\n\t\tif sp.LastBlock() < int32(bestHeight) {\n\t\t\tpeers.Remove(e)\n\t\t\tcontinue\n\t\t}\n\n\t\t// TODO: Use a better algorithm to choose the best peer.\n\t\t// For now, just pick the candidate with the highest last block.\n\t\tif bestPeer == nil || sp.LastBlock() > bestPeer.LastBlock() {\n\t\t\tbestPeer = sp\n\t\t}\n\t}\n\n\t// Start syncing from the best peer if one was selected.\n\tif bestPeer != nil {\n\t\tlocator, err := b.cfg.BlockHeaders.LatestBlockLocator()\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Failed to get block locator for the \"+\n\t\t\t\t\"latest block: %s\", err)\n\t\t\treturn\n\t\t}\n\n\t\tlog.Infof(\"Syncing to block height %d from peer %s\",\n\t\t\tbestPeer.LastBlock(), bestPeer.Addr())\n\n\t\t// Now that we know we have a new sync peer, we'll lock it in\n\t\t// within the proper attribute.\n\t\tb.syncPeerMutex.Lock()\n\t\tb.syncPeer = bestPeer\n\t\tb.syncPeerMutex.Unlock()\n\n\t\t// By default will use the zero hash as our stop hash to query\n\t\t// for all the headers beyond our view of the network based on\n\t\t// our latest block locator.\n\t\tstopHash := &zeroHash\n\n\t\t// If we're still within the range of the set checkpoints, then\n\t\t// we'll use the next checkpoint to guide the set of headers we\n\t\t// fetch, setting our stop hash to the next checkpoint hash.\n\t\tif b.nextCheckpoint != nil && int32(bestHeight) < b.nextCheckpoint.Height {\n\t\t\tlog.Infof(\"Downloading headers for blocks %d to \"+\n\t\t\t\t\"%d from peer %s\", bestHeight+1,\n\t\t\t\tb.nextCheckpoint.Height, bestPeer.Addr())\n\n\t\t\tstopHash = b.nextCheckpoint.Hash\n\t\t} else {\n\t\t\tlog.Infof(\"Fetching set of headers from tip \"+\n\t\t\t\t\"(height=%v) from peer %s\", bestHeight,\n\t\t\t\tbestPeer.Addr())\n\t\t}\n\n\t\t// With our stop hash selected, we'll kick off the sync from\n\t\t// this peer with an initial GetHeaders message.\n\t\t_ = b.SyncPeer().PushGetHeadersMsg(locator, stopHash)\n\t} else {\n\t\tlog.Warnf(\"No sync peer candidates available\")\n\t}\n}", "title": "" }, { "docid": "5891796a1d18152b1d6fed30bee32077", "score": "0.5736843", "text": "func (w *RemoteClusterTunnelManager) Sync(ctx context.Context) error {\n\t// Fetch desired reverse tunnels and convert them to a set of\n\t// remoteClusterKeys.\n\twantTunnels, err := w.cfg.AccessPoint.GetReverseTunnels(ctx)\n\tif err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\twantClusters := make(map[remoteClusterKey]bool, len(wantTunnels))\n\tfor _, tun := range wantTunnels {\n\t\tfor _, addr := range tun.GetDialAddrs() {\n\t\t\twantClusters[remoteClusterKey{cluster: tun.GetClusterName(), addr: addr}] = true\n\t\t}\n\t}\n\n\tw.mu.Lock()\n\tdefer w.mu.Unlock()\n\n\t// Delete pools that are no longer needed.\n\tfor k, pool := range w.pools {\n\t\tif wantClusters[k] {\n\t\t\tcontinue\n\t\t}\n\t\tpool.Stop()\n\t\ttrustedClustersStats.DeleteLabelValues(pool.Cluster)\n\t\tdelete(w.pools, k)\n\t}\n\n\t// Start pools that were added since last sync.\n\tvar errs []error\n\tfor k := range wantClusters {\n\t\tif _, ok := w.pools[k]; ok {\n\t\t\tcontinue\n\t\t}\n\n\t\ttrustedClustersStats.WithLabelValues(k.cluster).Set(0)\n\t\tpool, err := w.newAgentPool(ctx, w.cfg, k.cluster, k.addr)\n\t\tif err != nil {\n\t\t\terrs = append(errs, trace.Wrap(err))\n\t\t\tcontinue\n\t\t}\n\t\tw.pools[k] = pool\n\t}\n\treturn trace.NewAggregate(errs...)\n}", "title": "" }, { "docid": "946020e76a540f7c7cc70bba374b7187", "score": "0.5718564", "text": "func (ps PeerStore) Sync(_ map[model.PeerHash]model.PeerStats) error {\n\tpanic(\"implement me\")\n}", "title": "" }, { "docid": "f6307c528afca99c9fd8d94dc6227e7e", "score": "0.5598978", "text": "func (c *Chain) Sync() error {\n\treturn c.trustedNode.Sync()\n}", "title": "" }, { "docid": "4ea11c5a08285e6b18b218c6d4c095ca", "score": "0.55989707", "text": "func (lumberjackSink) Sync() error { return nil }", "title": "" }, { "docid": "cd07ba3b4797c58603bc65fd856d4db1", "score": "0.5587094", "text": "func (s *Syncer) Sync(src, dst SyncPlugin) {\n\ts.syncLocal(src, dst)\n}", "title": "" }, { "docid": "2c2a7393bf8ce0053e169ecff24281d9", "score": "0.55859923", "text": "func (this *NetServer) startSyncAccept(listener net.Listener) {\n\tfor {\n\t\tconn, err := listener.Accept()\n\n\t\tif err != nil {\n\t\t\tthis.log.Error(\"[p2p] error accepting \", err.Error())\n\t\t\treturn\n\t\t}\n\n\t\tthis.log.Debug(\"[p2p] remote sync node connect with \",\n\t\t\tconn.RemoteAddr(), conn.LocalAddr())\n\t\tif !this.AddrValid(conn.RemoteAddr().String()) {\n\t\t\tthis.log.Warnf(\"[p2p] remote %s not in reserved list, close it \", conn.RemoteAddr())\n\t\t\tconn.Close()\n\t\t\tcontinue\n\t\t}\n\n\t\tif this.IsAddrInInConnRecord(conn.RemoteAddr().String()) {\n\t\t\tconn.Close()\n\t\t\tcontinue\n\t\t}\n\n\t\tsyncAddrCount := uint(this.GetInConnRecordLen())\n\t\tif syncAddrCount >= this.ctx.Config().P2P.MaxConnInBound {\n\t\t\tthis.log.Warnf(\"[p2p] SyncAccept: total connections(%d) reach the max limit(%d), conn closed\",\n\t\t\t\tsyncAddrCount, this.ctx.Config().P2P.MaxConnInBound)\n\t\t\tconn.Close()\n\t\t\tcontinue\n\t\t}\n\n\t\tremoteIp, err := common.ParseIPAddr(conn.RemoteAddr().String())\n\t\tif err != nil {\n\t\t\tthis.log.Warn(\"[p2p] parse ip error \", err.Error())\n\t\t\tconn.Close()\n\t\t\tcontinue\n\t\t}\n\t\tconnNum := this.GetIpCountInInConnRecord(remoteIp)\n\t\tif connNum >= this.ctx.Config().P2P.MaxConnInBoundForSingleIP {\n\t\t\tthis.log.Warnf(\"[p2p] SyncAccept: connections(%d) with ip(%s) has reach the max limit(%d), \"+\n\t\t\t\t\"conn closed\", connNum, remoteIp, this.ctx.Config().P2P.MaxConnInBoundForSingleIP)\n\t\t\tconn.Close()\n\t\t\tcontinue\n\t\t}\n\n\t\tremotePeer := peer.NewPeer(this.log)\n\t\taddr := conn.RemoteAddr().String()\n\t\tthis.AddInConnRecord(addr)\n\n\t\tthis.AddPeerSyncAddress(addr, remotePeer)\n\n\t\tremotePeer.SyncLink.SetAddr(addr)\n\t\tremotePeer.SyncLink.SetConn(conn)\n\t\tremotePeer.AttachSyncChan(this.SyncChan)\n\t\tgo remotePeer.SyncLink.Rx(this.NetworkMagic)\n\t\tgo remotePeer.SyncLink.Tx(this.NetworkMagic)\n\t}\n}", "title": "" }, { "docid": "09f9addc1164f4ee9d06d0b1f8b61094", "score": "0.551744", "text": "func Synchronise(ch SyncChannels, id int) {\n\tvar (\n\t\tregisteredOrders [NumFloors][NumButtons - 1]AckList\n\t\televList [NumElevators]Elev\n\t\tsendMsg Message\n\t\trecentlyDied [NumElevators]bool\n\t\tsomeUpdate bool\n\t\toffline bool\n\t)\n\n\ttimeout := make(chan bool)\n\tgo func() { time.Sleep(1 * time.Second); timeout <- true }()\n\n\tselect {\n\tcase initMsg := <-ch.IncomingMsg:\n\t\televList = initMsg.Elevator\n\t\tregisteredOrders = initMsg.RegisteredOrders\n\t\tsomeUpdate = true\n\tcase <-timeout:\n\t\toffline = true\n\t}\n\n\tlostID := -1\n\treassignTimer := time.NewTimer(5 * time.Second)\n\tbroadcastTicker := time.NewTicker(100 * time.Millisecond)\n\tsingleModeTicker := time.NewTicker(100 * time.Millisecond)\n\treassignTimer.Stop()\n\tsingleModeTicker.Stop()\n\n\tfor {\n\t\tif offline {\n\t\t\tif elevList[id].Online {\n\t\t\t\toffline = false\n\t\t\t\treInitTimer := time.NewTimer(1 * time.Second)\n\t\t\tREINIT:\n\t\t\t\tfor {\n\t\t\t\t\tselect {\n\t\t\t\t\tcase reInitMsg := <-ch.IncomingMsg:\n\t\t\t\t\t\tif reInitMsg.Elevator != elevList && reInitMsg.ID != id {\n\t\t\t\t\t\t\ttmpElevator := elevList[id]\n\t\t\t\t\t\t\televList = reInitMsg.Elevator\n\t\t\t\t\t\t\televList[id] = tmpElevator\n\t\t\t\t\t\t\tsomeUpdate = true\n\t\t\t\t\t\t\treInitTimer.Stop()\n\t\t\t\t\t\t\tbreak REINIT\n\t\t\t\t\t\t}\n\t\t\t\t\tcase <-reInitTimer.C:\n\t\t\t\t\t\tbreak REINIT\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif lostID != -1 {\n\t\t\tfmt.Println(\"ELEVATOR\", lostID, \"DIED\")\n\t\t\trecentlyDied[lostID] = true\n\t\t\tlostID = -1\n\t\t}\n\n\t\tselect {\n\t\tcase newElev := <-ch.UpdateSync:\n\t\t\toldQueue := elevList[id].Queue\n\t\t\tif newElev.State == Undefined {\n\t\t\t\tch.PeerTxEnable <- false\n\t\t\t} else if newElev.State != Undefined && elevList[id].State == Undefined {\n\t\t\t\tch.PeerTxEnable <- true\n\t\t\t}\n\n\t\t\televList[id] = newElev\n\t\t\televList[id].Queue = oldQueue\n\t\t\tsomeUpdate = true\n\n\t\tcase newOrder := <-ch.OrderUpdate:\n\t\t\tif newOrder.Finished {\n\t\t\t\televList[id].Queue[newOrder.Floor] = [NumButtons]bool{}\n\t\t\t\tsomeUpdate = true\n\t\t\t\tif newOrder.Btn != BtnInside {\n\t\t\t\t\tregisteredOrders[newOrder.Floor][BtnUp].ImplicitAcks[id] = Finished\n\t\t\t\t\tregisteredOrders[newOrder.Floor][BtnDown].ImplicitAcks[id] = Finished\n\t\t\t\t\tfmt.Println(\"We Finished order\", newOrder.Btn, \"at floor\", newOrder.Floor+1)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif newOrder.Btn == BtnInside {\n\t\t\t\t\televList[id].Queue[newOrder.Floor][newOrder.Btn] = true\n\t\t\t\t\tsomeUpdate = true\n\t\t\t\t} else {\n\t\t\t\t\tregisteredOrders[newOrder.Floor][newOrder.Btn].DesignatedElevator = newOrder.DesignatedElevator\n\t\t\t\t\tregisteredOrders[newOrder.Floor][newOrder.Btn].ImplicitAcks[id] = Acked\n\t\t\t\t\tfmt.Println(\"We acknowledged a new order\", newOrder.Btn, \"at floor\", newOrder.Floor+1)\n\t\t\t\t\tfmt.Println(\"\\tdesignated to\", registeredOrders[newOrder.Floor][newOrder.Btn].DesignatedElevator)\n\t\t\t\t}\n\t\t\t}\n\n\t\tcase msg := <-ch.IncomingMsg:\n\t\t\tif msg.ID == id || !elevList[msg.ID].Online || !elevList[id].Online {\n\t\t\t\tcontinue\n\t\t\t} else {\n\t\t\t\tif msg.Elevator != elevList {\n\t\t\t\t\ttmpElevator := elevList[id]\n\t\t\t\t\televList = msg.Elevator\n\t\t\t\t\televList[id] = tmpElevator\n\t\t\t\t\tsomeUpdate = true\n\t\t\t\t}\n\t\t\t\tfor elevator := 0; elevator < NumElevators; elevator++ {\n\t\t\t\t\tif elevator == id || !elevList[msg.ID].Online || !elevList[id].Online {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\t\t\t\t\tfor floor := 0; floor < NumFloors; floor++ {\n\t\t\t\t\t\tfor btn := BtnUp; btn < BtnInside; btn++ {\n\t\t\t\t\t\t\tswitch msg.RegisteredOrders[floor][btn].ImplicitAcks[elevator] {\n\t\t\t\t\t\t\tcase NotAcked:\n\t\t\t\t\t\t\t\tif registeredOrders[floor][btn].ImplicitAcks[id] == Finished {\n\t\t\t\t\t\t\t\t\tregisteredOrders = copyAckList(msg, registeredOrders, elevator, floor, id, btn)\n\t\t\t\t\t\t\t\t} else if registeredOrders[floor][btn].ImplicitAcks[elevator] != NotAcked {\n\t\t\t\t\t\t\t\t\tregisteredOrders[floor][btn].ImplicitAcks[elevator] = NotAcked\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tcase Acked:\n\t\t\t\t\t\t\t\tif registeredOrders[floor][btn].ImplicitAcks[id] == NotAcked {\n\t\t\t\t\t\t\t\t\tfmt.Println(\"Order \", btn, \"from \", msg.ID, \"in floor\", floor+1, \"has been acked!\")\n\t\t\t\t\t\t\t\t\tregisteredOrders = copyAckList(msg, registeredOrders, elevator, floor, id, btn)\n\t\t\t\t\t\t\t\t} else if registeredOrders[floor][btn].ImplicitAcks[elevator] != Acked {\n\t\t\t\t\t\t\t\t\tregisteredOrders[floor][btn].ImplicitAcks[elevator] = Acked\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tif checkAllAckStatus(elevList, registeredOrders[floor][btn].ImplicitAcks, Acked) &&\n\t\t\t\t\t\t\t\t\t!elevList[id].Queue[floor][btn] &&\n\t\t\t\t\t\t\t\t\tregisteredOrders[floor][btn].DesignatedElevator == id {\n\t\t\t\t\t\t\t\t\tfmt.Println(\"We've been assigned a new order!\")\n\t\t\t\t\t\t\t\t\televList[id].Queue[floor][btn] = true\n\t\t\t\t\t\t\t\t\tsomeUpdate = true\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tcase Finished:\n\t\t\t\t\t\t\t\tif registeredOrders[floor][btn].ImplicitAcks[id] == Acked {\n\t\t\t\t\t\t\t\t\tregisteredOrders = copyAckList(msg, registeredOrders, elevator, floor, id, btn)\n\t\t\t\t\t\t\t\t} else if registeredOrders[floor][btn].ImplicitAcks[elevator] != Finished {\n\t\t\t\t\t\t\t\t\tregisteredOrders[floor][btn].ImplicitAcks[elevator] = Finished\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tif checkAllAckStatus(elevList, registeredOrders[floor][btn].ImplicitAcks, Finished) {\n\t\t\t\t\t\t\t\t\tregisteredOrders[floor][btn].ImplicitAcks[id] = NotAcked\n\t\t\t\t\t\t\t\t\tif registeredOrders[floor][btn].DesignatedElevator == id {\n\t\t\t\t\t\t\t\t\t\televList[id].Queue[floor][btn] = false\n\t\t\t\t\t\t\t\t\t\tsomeUpdate = true\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif someUpdate {\n\t\t\t\t\tch.UpdateQueue <- elevList\n\t\t\t\t\tsomeUpdate = false\n\t\t\t\t}\n\t\t\t}\n\n\t\tcase <-singleModeTicker.C:\n\t\t\tfor floor := 0; floor < NumFloors; floor++ {\n\t\t\t\tfor btn := BtnUp; btn < BtnInside; btn++ {\n\t\t\t\t\tif registeredOrders[floor][btn].ImplicitAcks[id] == Acked &&\n\t\t\t\t\t\t!elevList[id].Queue[floor][btn] {\n\t\t\t\t\t\tfmt.Println(\"We've been assigned a new order!\")\n\t\t\t\t\t\televList[id].Queue[floor][btn] = true\n\t\t\t\t\t\tsomeUpdate = true\n\t\t\t\t\t}\n\t\t\t\t\tif registeredOrders[floor][btn].ImplicitAcks[id] == Finished {\n\t\t\t\t\t\tregisteredOrders[floor][btn].ImplicitAcks[id] = NotAcked\n\t\t\t\t\t}\n\n\t\t\t\t}\n\t\t\t}\n\t\t\tif someUpdate {\n\t\t\t\tch.UpdateQueue <- elevList\n\t\t\t\tsomeUpdate = false\n\t\t\t}\n\n\t\tcase <-broadcastTicker.C:\n\t\t\tif !offline {\n\t\t\t\tsendMsg.RegisteredOrders = registeredOrders\n\t\t\t\tsendMsg.Elevator = elevList\n\t\t\t\tsendMsg.ID = id\n\t\t\t\tch.OutgoingMsg <- sendMsg\n\t\t\t}\n\n\t\tcase p := <-ch.PeerUpdate:\n\t\t\tfmt.Printf(\"Peer update:\\n\")\n\t\t\tfmt.Printf(\" Peers: %q\\n\", p.Peers)\n\t\t\tfmt.Printf(\" New: %q\\n\", p.New)\n\t\t\tfmt.Printf(\" Lost: %q\\n\", p.Lost)\n\t\t\tif len(p.Peers) == 0 {\n\t\t\t\toffline = true\n\t\t\t\tsingleModeTicker.Stop()\n\t\t\t} else if len(p.Peers) == 1 {\n\t\t\t\tsingleModeTicker = time.NewTicker(100 * time.Millisecond)\n\t\t\t} else {\n\t\t\t\tsingleModeTicker.Stop()\n\t\t\t}\n\n\t\t\tif len(p.New) > 0 {\n\t\t\t\tnewID, _ := strconv.Atoi(p.New)\n\t\t\t\televList[newID].Online = true\n\t\t\t} else if len(p.Lost) > 0 {\n\t\t\t\tlostID, _ = strconv.Atoi(p.Lost[0])\n\t\t\t\televList[lostID].Online = false\n\t\t\t\tif elevList[lostID].Queue != [NumFloors][NumButtons]bool{} && !recentlyDied[lostID] {\n\t\t\t\t\treassignTimer.Reset(1 * time.Second)\n\t\t\t\t}\n\t\t\t}\n\t\t\tvar onlineElevators [NumElevators]bool\n\t\t\tfor elevator := 0; elevator < NumElevators; elevator++ {\n\t\t\t\tonlineElevators[elevator] = elevList[elevator].Online\n\t\t\t}\n\t\t\tfmt.Println(\"Online elevators changed: \", onlineElevators)\n\t\t\ttmpList := onlineElevators\n\n\t\t\tgo func() { ch.OnlineElevators <- tmpList }()\n\n\t\tcase <-reassignTimer.C:\n\t\t\tfor elevator := 0; elevator < NumElevators; elevator++ {\n\t\t\t\tif !recentlyDied[elevator] {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\trecentlyDied[elevator] = false\n\t\t\t\tfor floor := 0; floor < NumFloors; floor++ {\n\t\t\t\t\tfor btn := BtnUp; btn < BtnInside; btn++ {\n\t\t\t\t\t\tif elevList[elevator].Queue[floor][btn] {\n\t\t\t\t\t\t\televList[id].Queue[floor][btn] = true\n\t\t\t\t\t\t\televList[elevator].Queue[floor][btn] = false\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tch.UpdateQueue <- elevList\n\t\t\tsomeUpdate = false\n\t\t}\n\t}\n}", "title": "" }, { "docid": "6de9f1132a6f1e3dc2341153851cbd3b", "score": "0.5468216", "text": "func (client *commitMsgClient) mainSync() error {\n\treq := &types.ReqNil{}\n\treply, err := client.paraClient.grpcClient.IsSync(context.Background(), req)\n\tif err != nil {\n\t\tplog.Error(\"Paracross main is syncing\", \"err\", err.Error())\n\t\treturn err\n\t}\n\tif !reply.IsOk {\n\t\tplog.Error(\"Paracross main reply not ok\")\n\t\treturn err\n\t}\n\n\tplog.Info(\"Paracross main sync succ\")\n\treturn nil\n\n}", "title": "" }, { "docid": "592d3dddddaa66a09141159707e65216", "score": "0.5455332", "text": "func (s *Server) converge() error {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\tif err := utils.CreateBridge(defaultBridgeName); err != nil && err != types.ErrBridgeExists {\n\t\treturn err\n\t}\n\n\t// TODO: check peer tunnels\n\t// TODO: max peer tunnels\n\tpeers := make([]*PeerInfo, 0, len(s.peers))\n\tfor _, p := range s.peers {\n\t\tpeers = append(peers, p)\n\t}\n\tlocalPeers := s.getLocalPeers(peers)\n\tlocalPeerInfo := map[string]*PeerInfo{}\n\tfor _, p := range localPeers {\n\t\tlocalPeerInfo[p.Name] = p\n\t}\n\n\tfor _, info := range localPeers {\n\t\tname := info.Name\n\t\taddress := info.Address\n\t\tlogrus.WithFields(logrus.Fields{\n\t\t\t\"name\": name,\n\t\t\t\"addr\": info,\n\t\t}).Debug(\"checking peer tunnel\")\n\n\t\tport, err := s.getTunnelPort(address)\n\t\tif err != nil {\n\t\t\tlogrus.WithFields(logrus.Fields{\n\t\t\t\t\"name\": name,\n\t\t\t\t\"address\": address,\n\t\t\t}).Errorf(\"error getting tunnel port: %s\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\thost, _, err := getHostPort(address)\n\t\tif err != nil {\n\t\t\tlogrus.WithFields(logrus.Fields{\n\t\t\t\t\"name\": name,\n\t\t\t\t\"address\": address,\n\t\t\t}).Errorf(\"error getting host address for peer: %s\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\t// create tunnel\n\t\tip := net.ParseIP(host)\n\t\tiface := fmt.Sprintf(\"ol-%s-%d\", defaultBridgeName, port)\n\t\tttl := 60\n\n\t\t// TODO: allow custom vxlan id\n\t\tif err := utils.CreateVxlan(iface, 1024, ip, port, ttl); err != nil && err != types.ErrTunnelExists {\n\t\t\tlogrus.WithFields(logrus.Fields{\n\t\t\t\t\"name\": name,\n\t\t\t\t\"address\": address,\n\t\t\t}).Errorf(\"error creating vxlan %s: %s\", iface, err)\n\t\t\tcontinue\n\t\t}\n\n\t\t// update tunnels\n\t\ts.tunnels[name] = iface\n\n\t\t// connect to bridge\n\t\tif err := utils.ConnectToBridge(iface, defaultBridgeName); err != nil {\n\t\t\tlogrus.WithFields(logrus.Fields{\n\t\t\t\t\"name\": name,\n\t\t\t\t\"address\": address,\n\t\t\t}).Errorf(\"error connecting iface %s to bridge %s: %s\", iface, defaultBridgeName, err)\n\t\t\tcontinue\n\t\t}\n\t}\n\n\t// remove tunnels that are no longer needed (peer is gone)\n\tfor name, tunnel := range s.tunnels {\n\t\tlogrus.WithFields(logrus.Fields{\n\t\t\t\"name\": name,\n\t\t\t\"tunnel\": tunnel,\n\t\t}).Debug(\"checking current tunnel for removal\")\n\t\tif _, ok := localPeerInfo[name]; !ok {\n\t\t\tlogrus.WithFields(logrus.Fields{\n\t\t\t\t\"name\": name,\n\t\t\t}).Info(\"removing tunnel\")\n\t\t\tif err := utils.DeleteVxlan(tunnel); err != nil {\n\t\t\t\tlogrus.Errorf(\"error removing tunnel: %s\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tdelete(s.tunnels, name)\n\t\t}\n\n\t}\n\n\tlogrus.WithFields(logrus.Fields{\n\t\t\"peers\": localPeerInfo,\n\t\t\"tunnels\": s.tunnels,\n\t\t\"ips\": s.ips,\n\t}).Debug(\"converged\")\n\n\treturn nil\n}", "title": "" }, { "docid": "b7f10cd2f23701612474c57d32e4f76f", "score": "0.5448026", "text": "func Sync() error {\n\treturn nil\n}", "title": "" }, { "docid": "bb53d671d87d4dcebf2a6b52fa84fe62", "score": "0.5423835", "text": "func (_IPancakePair *IPancakePairTransactor) Sync(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _IPancakePair.contract.Transact(opts, \"sync\")\n}", "title": "" }, { "docid": "7bbb0982811906c44e385868d49538a7", "score": "0.54010075", "text": "func sendTransactionsAndSyncUpPeers(n *networkProcesses, orderer *nwo.Orderer, basePeer *nwo.Peer, channelName string, peersToSyncUp ...*nwo.Peer) {\n\tBy(\"creating transactions\")\n\trunTransactions(n.network, orderer, basePeer, \"mycc\", channelName)\n\tbasePeerLedgerHeight := nwo.GetLedgerHeight(n.network, basePeer, channelName)\n\n\tBy(\"stopping orderer\")\n\tn.ordererProcess.Signal(syscall.SIGTERM)\n\tEventually(n.ordererProcess.Wait(), n.network.EventuallyTimeout).Should(Receive())\n\tn.ordererProcess = nil\n\n\tBy(\"starting the peers contained in the peersToSyncUp list\")\n\tstartPeers(n, true, peersToSyncUp...)\n\n\tBy(\"ensuring the peers are synced up\")\n\tassertPeersLedgerHeight(n.network, peersToSyncUp, basePeerLedgerHeight, channelName)\n\n\tBy(\"restarting orderer\")\n\tn.ordererRunner = n.network.OrdererRunner(orderer)\n\tn.ordererProcess = ifrit.Invoke(n.ordererRunner)\n\tEventually(n.ordererProcess.Ready(), n.network.EventuallyTimeout).Should(BeClosed())\n}", "title": "" }, { "docid": "da165b220566a44f700df61f49b415b1", "score": "0.5387584", "text": "func (ni *NormalImpl) SyncLocationAssignToStruct() {\n\tconn, err := grpc.Dial(*AssignHost, grpc.WithInsecure())\n\tif err != nil {\n\t\tglog.Info(err)\n\t}\n\tdefer conn.Close()\n\tc := Assigneer.NewAssigneerClient(conn)\n\tret, err := c.SyncLocation(context.Background(), &Assigneer.SyncLocationReq{\n\t\tKeeperID: 0,\n\t})\n\tif err != nil {\n\t\tglog.Info(err)\n\t}\n\tvar NewKeeperIDs []int\n\tfor i := 0; i < len(ret.KeeperIDs); i++ {\n\t\tNewKeeperIDs = append(NewKeeperIDs, int(ret.KeeperIDs[i]))\n\t\tni.hosts[ret.KeeperIDs[i]] = ret.Hosts[i]\n\t}\n\tglog.Infof(\"keeperIds : %v, Hosts : %v\", ret.KeeperIDs, ret.Hosts)\n\tni.assign.SetKeeperIDs(NewKeeperIDs)\n}", "title": "" }, { "docid": "17edf34aa809aa6f23af5498bfe10b25", "score": "0.53741354", "text": "func syncfrom(from net.Conn, dirtree *Watcher, state map[string]File, filters, deleted_filters []Sfile, persist persistfunc) (map[string]File, []Sfile, []Sfile) {\n\tdefer from.Close()\n\t//DPrintf(\"syncfrom : poll dirtree, filters are %v\", filters)\n\tDPrintf(\"syncfrom : poll dirtree\")\n\tmodified, deleted := dirtree.Poll(filters, deleted_filters)\n\t//DPrintf(\"syncfrom : calculate deltas, current state is %v\", state)\n\tDPrintf(\"syncfrom : calculate deltas\")\n\tversions := delta(modified, deleted, state)\n\tDPrintf(\"syncfrom : received their versions\")\n\tproposed_versions, them_id := receive_versions(from)\n\t//\tDPrintf(\"syncfrom : resolve differences: \\n\\tus %v\\n\\tthem %v\", versions, proposed_versions)\n\tfiles_wanted, resolutions := resolve_tvpair_with_delete(proposed_versions, versions, resolution_complain)\n\t//DPrintf(\"syncfrom : request files %v\", want)\n\tgetfiles(from, files_wanted)\n\tDPrintf(\"syncfrom : done\")\n\n\tfilters = make([]Sfile, 0)\n\tdeleted_filters = make([]Sfile, 0)\n\t//receive new files and merged files\n\tfor {\n\t\tnewfile, good, rename := receive_file_chunks(from)\n\t\tif !good {\n\t\t\tif newfile != \"\" {\n\t\t\t\tos.Remove(newfile)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t\tmerged := false\n\t\t//DPrintf(\"%v\", resolutions)\n\t\tval, exists := resolutions[newfile]\n\t\tif exists && val.Resolution == MERGE {\n\t\t\tmerged = merge(newfile, newfile+\"~\")\n\t\t\tif !merged {\n\t\t\t\tbreak\n\t\t\t}\n\t\t} else {\n\t\t\trename()\n\t\t}\n\n\t\tDPrintf(\"got file %v\", newfile)\n\t\tfile := proposed_versions[newfile]\n\t\tnfo, err := os.Stat(newfile)\n\t\tif err == nil {\n\t\t\t//fix up time in the new file\n\t\t\tfile.Time = nfo.ModTime()\n\n\t\t\t//update the synchronization vector in the file\n\n\t\t\t//stick it in the map\n\t\t\tif merged {\n\t\t\t\tversions[newfile] = file.BackSync(them_id).SyncModify().Modify()\n\t\t\t} else {\n\t\t\t\tversions[newfile] = file.BackSync(them_id).SyncModify()\n\t\t\t}\n\t\t\tfilters = append(filters, Sfile{file.Path, file.Time, false})\n\t\t} else if os.IsNotExist(err) {\n\t\t\t//fix up time in the new file\n\t\t\t//file.Time = nfo.ModTime()\n\n\t\t\t//update the synchronization vector in the file\n\n\t\t\t//stick it in the map\n\t\t\toldtime := versions[newfile].Time\n\t\t\tversions[newfile] = file.BackSync(them_id).SyncModify()\n\t\t\tdeleted_filters = append(deleted_filters, Sfile{file.Path, oldtime, false})\n\t\t\tDPrintf(\"deleted a file %v\", file.Path)\n\t\t} else {\n\t\t\tcheck(err, false)\n\t\t}\n\t}\n\n\tfor _, c := range resolutions {\n\t\t//\t\tif c.Resolution == MERGE {\n\t\t//\t\t\t//update version vector for merges\n\t\t//\t\t\tmerged := merge(c.Filename)\n\t\t//\t\t\tif !merged {\n\t\t//\t\t\t\t//dont merge if something went wrong\n\t\t//\t\t\t\tcontinue\n\t\t//\t\t\t}\n\t\t//\t\t\tmodTime, good := getmodtime(c.Filename)\n\t\t//\t\t\tif !good {\n\t\t//\t\t\t\tcontinue\n\t\t//\t\t\t}\n\t\t//\t\t\tfile := versions[c.Filename]\n\t\t//\t\t\tfile.Time = modTime\n\t\t//\t\t\tfile = file.Modify()\n\t\t//\t\t\tversions[c.Filename] = file\n\t\t//\t\t\tfilters = append(filters, Sfile{file.Path, file.Time, false})\n\t\t//\t\t} else if c.Resolution == KEEP_OURS {\n\t\tif c.Resolution == KEEP_OURS {\n\t\t\t//update sync vector for conflicts we keep\n\t\t\tfile := versions[c.Filename]\n\t\t\tversions[c.Filename] = file.BackSync(them_id).SyncModify()\n\t\t}\n\n\t}\n\n\t//Cleanup(\".\")\n\treturn versions, filters, deleted_filters\n}", "title": "" }, { "docid": "fa31a016bcebc1b0fd659670a14c9f6d", "score": "0.53645676", "text": "func (controller *MainController) RPCSync(dbMap *gorp.DbMap, skipVoteBitsSync bool) error {\n\tmultisigScripts, err := models.GetAllCurrentMultiSigScripts(dbMap)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = walletSvrsSync(controller.rpcServers, multisigScripts)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !skipVoteBitsSync {\n\t\t// TODO: Wait for wallets to sync, or schedule the vote bits sync somehow.\n\t\t// For now, just skip full vote bits sync in favor of on-demand user's vote\n\t\t// bits sync if the wallets are busy at this point.\n\n\t\t// Allow sync to get going before attempting vote bits sync.\n\t\ttime.Sleep(2 * time.Second)\n\n\t\t// Look for that -4 message from wallet that says: \"the wallet is\n\t\t// currently syncing to the best block, please try again later\"\n\t\twsm := controller.rpcServers\n\t\terr = wsm.CheckWalletsReady()\n\t\tif err != nil /*strings.Contains(err.Error(), \"try again later\")*/ {\n\t\t\t// If importscript is running, it will take a while.\n\t\t\tlog.Errorf(\"Wallets are syncing. Unable to initiate votebits sync: %v\",\n\t\t\t\terr)\n\t\t} else {\n\t\t\t// Sync vote bits for all tickets owned by the wallet\n\t\t\terr = wsm.SyncVoteBits()\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(err)\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "18c57530dceeeccbbf31a70ac33deeb2", "score": "0.53643477", "text": "func Sync(config *config.Sync) error {\n\tfor _, server := range config.DesiredState.Servers {\n\t\tfor _, desiredApp := range config.DesiredState.Apps {\n\t\t\terr := func() error {\n\t\t\t\tknownApp := findApp(config.KnownState, desiredApp.Name)\n\t\t\t\tif reflect.DeepEqual(knownApp, desiredApp) {\n\t\t\t\t\tfmt.Printf(\"Skipping app %s on server %s because it's already on the desired state.\",\n\t\t\t\t\t\tknownApp.Name, server)\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\tprettyPrintSync(knownApp, desiredApp, server)\n\t\t\t\taddress := fmt.Sprintf(\"%s:%s\", server.IP, \"22\")\n\t\t\t\tclient, err := sshclient.DialWithPasswd(address, config.User, config.Password)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tdefer client.Close()\n\n\t\t\t\treturn synchronize(client, knownApp, desiredApp)\n\t\t\t}()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "8e7961cec7a2c3eb2d156b3e8e2abb28", "score": "0.53593236", "text": "func (d *Gitclient) Sync() {\n\td.Clone()\n\td.Pull()\n\td.Checkout(d.Ref)\n}", "title": "" }, { "docid": "d6992d9b8ce62fe28192934a50025362", "score": "0.5339564", "text": "func (s *Syncer) Sync(ctx context.Context) (err error) {\n\tvar diff Diff\n\n\tctx, save := s.observe(ctx, \"Syncer.Sync\", \"\")\n\tdefer save(&diff, &err)\n\tdefer s.setOrResetLastSyncErr(&err)\n\n\tif s.FailFullSync {\n\t\treturn errors.New(\"Syncer is not enabled\")\n\t}\n\n\tvar streamingInserter func(*Repo)\n\tif s.DisableStreaming {\n\t\tstreamingInserter = func(*Repo) {} //noop\n\t} else {\n\t\tstreamingInserter, err = s.makeNewRepoInserter(ctx)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"syncer.sync.streaming\")\n\t\t}\n\t}\n\n\tvar sourced Repos\n\tif sourced, err = s.sourced(ctx, streamingInserter); err != nil {\n\t\treturn errors.Wrap(err, \"syncer.sync.sourced\")\n\t}\n\n\tstore := s.Store\n\tif tr, ok := s.Store.(Transactor); ok {\n\t\tvar txs TxStore\n\t\tif txs, err = tr.Transact(ctx); err != nil {\n\t\t\treturn errors.Wrap(err, \"syncer.sync.transact\")\n\t\t}\n\t\tdefer txs.Done(&err)\n\t\tstore = txs\n\t}\n\n\tvar stored Repos\n\tif stored, err = store.ListRepos(ctx, StoreListReposArgs{}); err != nil {\n\t\treturn errors.Wrap(err, \"syncer.sync.store.list-repos\")\n\t}\n\n\tdiff = NewDiff(sourced, stored)\n\tupserts := s.upserts(diff)\n\n\tif err = store.UpsertRepos(ctx, upserts...); err != nil {\n\t\treturn errors.Wrap(err, \"syncer.sync.store.upsert-repos\")\n\t}\n\n\tif s.Synced != nil {\n\t\ts.Synced <- diff.Repos()\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "42fd02db6b132a587c31888f0df1e570", "score": "0.5337705", "text": "func (_IPancakePair *IPancakePairTransactorSession) Sync() (*types.Transaction, error) {\n\treturn _IPancakePair.Contract.Sync(&_IPancakePair.TransactOpts)\n}", "title": "" }, { "docid": "772c6ff1909d1740bafcb96a79480869", "score": "0.5319351", "text": "func (_IPancakePair *IPancakePairSession) Sync() (*types.Transaction, error) {\n\treturn _IPancakePair.Contract.Sync(&_IPancakePair.TransactOpts)\n}", "title": "" }, { "docid": "9fd29a402276d01d75db322158cc5a8a", "score": "0.5315081", "text": "func (c *Client) Sync(ctx context.Context) error {\n\tmresp, err := c.MemberList(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar eps []string\n\tfor _, m := range mresp.Members {\n\t\tif len(m.Name) != 0 && !m.IsLearner {\n\t\t\teps = append(eps, m.ClientURLs...)\n\t\t}\n\t}\n\tc.SetEndpoints(eps...)\n\treturn nil\n}", "title": "" }, { "docid": "09fa7d13126997eafc6ee2950b50a602", "score": "0.5313885", "text": "func (r *ChannelRouter) syncGraphWithChain() error {\n\t// First, we'll need to check to see if we're already in sync with the\n\t// latest state of the UTXO set.\n\tbestHash, bestHeight, err := r.cfg.Chain.GetBestBlock()\n\tif err != nil {\n\t\treturn err\n\t}\n\tpruneHash, pruneHeight, err := r.cfg.Graph.PruneTip()\n\tif err != nil {\n\t\tswitch {\n\t\t// If the graph has never been pruned, or hasn't fully been\n\t\t// created yet, then we don't treat this as an explicit error.\n\t\tcase err == channeldb.ErrGraphNeverPruned:\n\t\tcase err == channeldb.ErrGraphNotFound:\n\t\tdefault:\n\t\t\treturn err\n\t\t}\n\t}\n\n\tlog.Infof(\"Prune tip for Channel Graph: height=%v, hash=%v\", pruneHeight,\n\t\tpruneHash)\n\n\tswitch {\n\n\t// If the graph has never been pruned, then we can exit early as this\n\t// entails it's being created for the first time and hasn't seen any\n\t// block or created channels.\n\tcase pruneHeight == 0 || pruneHash == nil:\n\t\treturn nil\n\n\t// If the block hashes and heights match exactly, then we don't need to\n\t// prune the channel graph as we're already fully in sync.\n\tcase bestHash.IsEqual(pruneHash) && uint32(bestHeight) == pruneHeight:\n\t\treturn nil\n\t}\n\n\tlog.Infof(\"Syncing channel graph from height=%v (hash=%v) to height=%v \"+\n\t\t\"(hash=%v)\", pruneHeight, pruneHash, bestHeight, bestHash)\n\n\t// If we're not yet caught up, then we'll walk forward in the chain in\n\t// the chain pruning the channel graph with each new block in the chain\n\t// that hasn't yet been consumed by the channel graph.\n\tvar numChansClosed uint32\n\tfor nextHeight := pruneHeight + 1; nextHeight <= uint32(bestHeight); nextHeight++ {\n\t\t// Using the next height, fetch the next block to use in our\n\t\t// incremental graph pruning routine.\n\t\tnextHash, err := r.cfg.Chain.GetBlockHash(int64(nextHeight))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tnextBlock, err := r.cfg.Chain.GetBlock(nextHash)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// We're only interested in all prior outputs that've been\n\t\t// spent in the block, so collate all the referenced previous\n\t\t// outpoints within each tx and input.\n\t\tvar spentOutputs []*wire.OutPoint\n\t\tfor _, tx := range nextBlock.Transactions {\n\t\t\tfor _, txIn := range tx.TxIn {\n\t\t\t\tspentOutputs = append(spentOutputs,\n\t\t\t\t\t&txIn.PreviousOutPoint)\n\t\t\t}\n\t\t}\n\n\t\t// With the spent outputs gathered, attempt to prune the\n\t\t// channel graph, also passing in the hash+height of the block\n\t\t// being pruned so the prune tip can be updated.\n\t\tnumClosed, err := r.cfg.Graph.PruneGraph(spentOutputs, nextHash,\n\t\t\tnextHeight)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tlog.Infof(\"Block %v (height=%v) closed %v channels\",\n\t\t\tnextHash, nextHeight, numClosed)\n\n\t\tnumChansClosed += numClosed\n\t}\n\n\tlog.Infof(\"Graph pruning complete: %v channels we're closed since \"+\n\t\t\"height %v\", numChansClosed, pruneHeight)\n\n\treturn nil\n}", "title": "" }, { "docid": "32a8ce5f47c0121761f5dbdc24205e6a", "score": "0.5313316", "text": "func Sync(TwoPSet twopset.TwoPSet) (twopset.TwoPSet, error) {\n\t// Obtain addresses of peer nodes in the cluster\n\tpeers := GetPeerList()\n\n\t// Return the local TwoPSet back if no peers\n\t// are present along with an error\n\tif len(peers) == 0 {\n\t\treturn TwoPSet, errors.New(\"nil peers present\")\n\t}\n\n\t// Iterate over the peer list and send a /twopset/values GET request\n\t// to each peer to obtain its TwoPSet\n\tfor _, peer := range peers {\n\t\tpeerTwoPSet, err := SendListRequest(peer)\n\t\tif err != nil {\n\t\t\tlog.WithFields(log.Fields{\"error\": err, \"peer\": peer}).Error(\"failed sending twopset values request\")\n\t\t\tcontinue\n\t\t}\n\n\t\t// Merge the peer's TwoPSet with our local TwoPSet\n\t\tTwoPSet = twopset.Merge(TwoPSet, peerTwoPSet)\n\t}\n\n\t// DEBUG log in the case of success\n\t// indicating the new TwoPSet\n\tlog.WithFields(log.Fields{\n\t\t\"set\": TwoPSet,\n\t}).Debug(\"successful twopset sync\")\n\n\t// Return the synced new TwoPSet\n\treturn TwoPSet, nil\n}", "title": "" }, { "docid": "6dc02ff9324a744bf38676b211ef8d1d", "score": "0.52769536", "text": "func Sync(cfg *Config, iface string, logger logrus.FieldLogger) error {\n\tlog := logger.WithField(\"iface\", iface)\n\n\tlink, err := SyncLink(cfg, iface, log)\n\tif err != nil {\n\t\tlog.WithError(err).Errorln(\"cannot sync wireguard link\")\n\t\treturn err\n\t}\n\tlog.Info(\"synced link\")\n\n\tif err := SyncWireguardDevice(cfg, link, log); err != nil {\n\t\tlog.WithError(err).Errorln(\"cannot sync wireguard link\")\n\t\treturn err\n\t}\n\tlog.Info(\"synced link\")\n\n\tif err := SyncAddress(cfg, link, log); err != nil {\n\t\tlog.WithError(err).Errorln(\"cannot sync addresses\")\n\t\treturn err\n\t}\n\tlog.Info(\"synced addresss\")\n\n\tvar managedRoutes []net.IPNet\n\tfor _, peer := range cfg.Peers {\n\t\tfor _, rt := range peer.AllowedIPs {\n\t\t\tmanagedRoutes = append(managedRoutes, rt)\n\t\t}\n\t}\n\tif err := SyncRoutes(cfg, link, managedRoutes, log); err != nil {\n\t\tlog.WithError(err).Errorln(\"cannot sync routes\")\n\t\treturn err\n\t}\n\tlog.Info(\"synced routed\")\n\tlog.Info(\"Successfully synced device\")\n\treturn nil\n\n}", "title": "" }, { "docid": "6872673ea5d152ceeda7e02c1c31b496", "score": "0.526449", "text": "func (rm *ResponseManager) synchronize() {\n\tsync := make(chan error)\n\t_ = rm.sendSyncMessage(&synchronizeMessage{sync}, sync)\n}", "title": "" }, { "docid": "966244323229294248a9734cc4c224cb", "score": "0.52609503", "text": "func (c *chainStore) RunSync(upTo uint64, peers []net.Peer) {\n\tif len(peers) == 0 {\n\t\tpeers = toPeers(c.crypto.GetGroup().Nodes)\n\t}\n\n\tc.syncm.SendSyncRequest(upTo, peers)\n}", "title": "" }, { "docid": "3d65fa2d82b40207f42539cb572c9afc", "score": "0.52326226", "text": "func (_Main *MainTransactorSession) Sync() (*types.Transaction, error) {\n\treturn _Main.Contract.Sync(&_Main.TransactOpts)\n}", "title": "" }, { "docid": "5302286313c303646b40af2a6dafd19a", "score": "0.5188751", "text": "func (_Main *MainSession) Sync() (*types.Transaction, error) {\n\treturn _Main.Contract.Sync(&_Main.TransactOpts)\n}", "title": "" }, { "docid": "1d0ef0a6d714c1b88552175de017f009", "score": "0.51850355", "text": "func Sync() {}", "title": "" }, { "docid": "a578d537d402ee3c41c9615cc2ea563f", "score": "0.5182588", "text": "func (_Main *MainTransactor) Sync(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _Main.contract.Transact(opts, \"sync\")\n}", "title": "" }, { "docid": "d4cd630316d25bc948c9c2717eda0b96", "score": "0.51585805", "text": "func RelayLocal(localConn, remoteConn net.Conn, createConn func([16]byte) net.Conn) {\n\tlrExit := localToRemote(localConn, remoteConn)\n\tvar dataKey [16]byte\n\tn, err := remoteConn.Read(dataKey[:])\n\tif err != nil || n != 16 {\n\t\treturn\n\t}\n\n\tdataBlocks, continuousData, exitJoinBlock, joinBlockfinish := blockJoiner()\n\tdefer func() { exitJoinBlock <- true }()\n\n\texitThreadMan := threadManager(createConn, dataBlocks, dataKey)\n\trecvExit, exitRecv := bufferFromRemote(remoteConn, dataBlocks)\n\tsendExit, exitSend := dataBlockToConn(localConn, continuousData)\n\n\tleave := func() {\n\t\tremoteConn.SetDeadline(time.Now())\n\t\tlocalConn.SetDeadline(time.Now())\n\t\texitRecv <- true\n\t\texitSend <- true\n\t\texitThreadMan <- true\n\t}\n\n\tselect {\n\tcase <-lrExit:\n\t\tleave()\n\t\treturn\n\tcase <-sendExit:\n\t\tleave()\n\t\treturn\n\tcase <-recvExit:\n\t\t// Wait for data process finished or leave\n\t\texitThreadMan <- true\n\t\texitJoinBlock <- false\n\t\texitSend <- false\n\t\t<-joinBlockfinish\n\t\t<-sendExit\n\t}\n\treturn\n}", "title": "" }, { "docid": "a77b8ce19746619531c5038f72ea7904", "score": "0.5149655", "text": "func setupSyncManager(c *config.SyncManagerConfig, exchangeManager iExchangeManager, remoteConfig *config.RemoteControlConfig, websocketRoutineManagerEnabled bool) (*syncManager, error) {\n\tif c == nil {\n\t\treturn nil, fmt.Errorf(\"%T %w\", c, common.ErrNilPointer)\n\t}\n\n\tif !c.SynchronizeOrderbook && !c.SynchronizeTicker && !c.SynchronizeTrades {\n\t\treturn nil, errNoSyncItemsEnabled\n\t}\n\tif exchangeManager == nil {\n\t\treturn nil, errNilExchangeManager\n\t}\n\tif remoteConfig == nil {\n\t\treturn nil, errNilConfig\n\t}\n\n\tif c.NumWorkers <= 0 {\n\t\tc.NumWorkers = config.DefaultSyncerWorkers\n\t}\n\n\tif c.TimeoutREST <= time.Duration(0) {\n\t\tc.TimeoutREST = config.DefaultSyncerTimeoutREST\n\t}\n\n\tif c.TimeoutWebsocket <= time.Duration(0) {\n\t\tc.TimeoutWebsocket = config.DefaultSyncerTimeoutWebsocket\n\t}\n\n\tif c.FiatDisplayCurrency.IsEmpty() {\n\t\treturn nil, fmt.Errorf(\"FiatDisplayCurrency %w\", currency.ErrCurrencyCodeEmpty)\n\t}\n\n\tif !c.FiatDisplayCurrency.IsFiatCurrency() {\n\t\treturn nil, fmt.Errorf(\"%s %w\", c.FiatDisplayCurrency, currency.ErrFiatDisplayCurrencyIsNotFiat)\n\t}\n\n\tif c.PairFormatDisplay == nil {\n\t\treturn nil, fmt.Errorf(\"%T %w\", c.PairFormatDisplay, common.ErrNilPointer)\n\t}\n\n\ts := &syncManager{\n\t\tconfig: *c,\n\t\tremoteConfig: remoteConfig,\n\t\texchangeManager: exchangeManager,\n\t\twebsocketRoutineManagerEnabled: websocketRoutineManagerEnabled,\n\t\tfiatDisplayCurrency: c.FiatDisplayCurrency,\n\t\tformat: *c.PairFormatDisplay,\n\t\ttickerBatchLastRequested: make(map[string]time.Time),\n\t\tcurrencyPairs: make(map[currencyPairKey]*currencyPairSyncAgent),\n\t}\n\n\tlog.Debugf(log.SyncMgr,\n\t\t\"Exchange currency pair syncer config: continuous: %v ticker: %v\"+\n\t\t\t\" orderbook: %v trades: %v workers: %v verbose: %v timeout REST: %v\"+\n\t\t\t\" timeout Websocket: %v\",\n\t\ts.config.SynchronizeContinuously, s.config.SynchronizeTicker, s.config.SynchronizeOrderbook,\n\t\ts.config.SynchronizeTrades, s.config.NumWorkers, s.config.Verbose, s.config.TimeoutREST,\n\t\ts.config.TimeoutWebsocket)\n\ts.inService.Add(1)\n\treturn s, nil\n}", "title": "" }, { "docid": "262d80682355ba107b670b72517cce57", "score": "0.5135934", "text": "func (s *Service) merge(final *FinalStatement, m *merge) (*FinalStatement,\n\tonet.ClientError) {\n\tif m.distrib {\n\t\t// Used not to start merge process 2 times, when one is on run.\n\t\tlog.Lvl2(s.ServerIdentity(), \"Not enter merge\")\n\t\treturn nil, onet.NewClientErrorCode(ErrorMergeInProgress, \"Merge Process in in progress\")\n\t}\n\tlog.Lvl2(\"Merge \", s.ServerIdentity())\n\tm.distrib = true\n\t// Flag indicating that there were connection with other nodes\n\tsyncData, ok := s.syncs[string(final.Desc.Hash())]\n\tif !ok {\n\t\treturn nil, onet.NewClientErrorCode(ErrorMerge, \"Wrong Hash\")\n\t}\n\tfor _, party := range final.Desc.Parties {\n\t\tpopDesc := PopDesc{\n\t\t\tName: final.Desc.Name,\n\t\t\tDateTime: final.Desc.DateTime,\n\t\t\tLocation: party.Location,\n\t\t\tRoster: party.Roster,\n\t\t\tParties: final.Desc.Parties,\n\t\t}\n\t\thash := popDesc.Hash()\n\t\tif _, ok := m.statementsMap[string(hash)]; ok {\n\t\t\t// that's unlikely due to running in cycle\n\t\t\tcontinue\n\t\t}\n\t\tmc := &mergeConfig{Final: final, ID: hash}\n\t\tfor _, si := range party.Roster.List {\n\t\t\tlog.Lvlf2(\"Sending from %s to %s\", s.ServerIdentity(), si)\n\t\t\terr := s.SendRaw(si, mc)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, onet.NewClientErrorCode(ErrorInternal, err.Error())\n\t\t\t}\n\t\t\tvar mcr *mergeConfigReply\n\t\t\tselect {\n\t\t\tcase mcr = <-syncData.mcChannel:\n\t\t\t\tbreak\n\t\t\tcase <-time.After(timeout):\n\t\t\t\treturn nil, onet.NewClientErrorCode(ErrorTimeout,\n\t\t\t\t\t\"timeout on waiting response MergeConfig\")\n\t\t\t}\n\t\t\tif mcr == nil {\n\t\t\t\treturn nil, onet.NewClientErrorCode(ErrorMerge,\n\t\t\t\t\t\"Error during merging\")\n\t\t\t}\n\t\t\tif mcr.PopStatus == PopStatusOK {\n\t\t\t\tm.statementsMap[string(hash)] = mcr.Final\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif _, ok = m.statementsMap[string(hash)]; !ok {\n\t\t\treturn nil, onet.NewClientErrorCode(ErrorMerge,\n\t\t\t\t\"merge with party failed\")\n\t\t}\n\t}\n\n\tnewFinal := &FinalStatement{}\n\t*newFinal = *final\n\tnewFinal.Desc = &PopDesc{}\n\t*newFinal.Desc = *final.Desc\n\n\t// Unite the lists\n\tlocs := make([]string, 0)\n\tRoster := &onet.Roster{}\n\tna := make([]abstract.Point, 0)\n\tfor _, f := range m.statementsMap {\n\t\t// although there must not be any intersection\n\t\t// in attendies list it's better to check it\n\t\t// not simply extend the list\n\t\tna = unionAttendies(na, f.Attendees)\n\t\tRoster = unionRoster(Roster, f.Desc.Roster)\n\t\tlocs = append(locs, f.Desc.Location)\n\t}\n\tsortAll(locs, Roster.List, na)\n\tnewFinal.Desc.Location = strings.Join(locs, DELIMETER)\n\tnewFinal.Desc.Roster = Roster\n\tnewFinal.Attendees = na\n\tnewFinal.Merged = true\n\treturn newFinal, nil\n}", "title": "" }, { "docid": "734377a0c7dd624f4840c5a695a18303", "score": "0.51353526", "text": "func fakeSyncer(syncCh <-chan *syncRequest, done <-chan struct{}) {\n\tfor {\n\t\tselect {\n\t\tcase <-syncCh:\n\t\tcase <-done:\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ba9ad20a85aa8970ef067d2f61aefe5b", "score": "0.5133718", "text": "func (r *Reactor) Sync(stateProvider StateProvider, discoveryTime time.Duration) (sm.State, *types.Commit, error) {\n\tr.mtx.Lock()\n\tif r.syncer != nil {\n\t\tr.mtx.Unlock()\n\t\treturn sm.State{}, nil, errors.New(\"a state sync is already in progress\")\n\t}\n\tr.syncer = newSyncer(r.cfg, r.Logger, r.conn, r.connQuery, stateProvider, r.tempDir)\n\tr.mtx.Unlock()\n\n\thook := func() {\n\t\tr.Logger.Debug(\"Requesting snapshots from known peers\")\n\t\t// Request snapshots from all currently connected peers\n\n\t\tr.Switch.BroadcastEnvelope(p2p.Envelope{\n\t\t\tChannelID: SnapshotChannel,\n\t\t\tMessage: &ssproto.SnapshotsRequest{},\n\t\t})\n\t}\n\n\thook()\n\n\tstate, commit, err := r.syncer.SyncAny(discoveryTime, hook)\n\n\tr.mtx.Lock()\n\tr.syncer = nil\n\tr.mtx.Unlock()\n\treturn state, commit, err\n}", "title": "" }, { "docid": "89f3f76ea882683942ba85d57d279979", "score": "0.5125775", "text": "func (s *Syncer) synchronize(ctx context.Context) bool {\n\tctx = log.WithNewSessionID(ctx)\n\tlogger := s.logger.WithContext(ctx)\n\n\tif s.isClosed() {\n\t\tlogger.Warning(\"attempting to sync while shutting down\")\n\t\treturn false\n\t}\n\n\tif s.ticker.GetCurrentLayer().Uint32() == 0 {\n\t\treturn false\n\t}\n\n\t// at most one synchronize process can run at any time\n\tif !s.setSyncerBusy() {\n\t\tlogger.Info(\"sync is already running, giving up\")\n\t\treturn false\n\t}\n\n\t// no need to worry about race condition for s.run. only one instance of synchronize can run at a time\n\ts.run++\n\tlogger.With().Info(fmt.Sprintf(\"starting sync run #%v\", s.run),\n\t\tlog.String(\"sync_state\", s.getSyncState().String()),\n\t\tlog.FieldNamed(\"current\", s.ticker.GetCurrentLayer()),\n\t\tlog.FieldNamed(\"latest\", s.mesh.LatestLayer()),\n\t\tlog.FieldNamed(\"processed\", s.mesh.ProcessedLayer()))\n\n\ts.setStateBeforeSync(ctx)\n\tvar (\n\t\tvQueue chan types.LayerID\n\t\tvDone chan struct{}\n\t\t// number of attempts to start a validation goroutine\n\t\tattempt = 0\n\t\t// whether the data sync succeed. validation failure is not checked by design.\n\t\tsuccess = true\n\t\t// the last layer that is synced in this run\n\t\tlastSynced = s.mesh.ProcessedLayer()\n\t)\n\n\tattemptFunc := func() bool {\n\t\tif missing := s.mesh.MissingLayer(); (missing != types.LayerID{}) {\n\t\t\tlogger.With().Debug(\"fetching data for missing layer\", missing)\n\t\t\tif err := s.syncLayer(ctx, missing); err != nil {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tvQueue <- missing\n\t\t}\n\t\t// using ProcessedLayer() instead of LatestLayer() so we can validate layers on a best-efforts basis\n\t\t// and retry in the next sync run if validation fails.\n\t\t// our clock starts ticking from 1, so it is safe to skip layer 0\n\t\t// always sync to currentLayer-1 to reduce race with gossip and hare/tortoise\n\t\tfor layerID := s.mesh.ProcessedLayer().Add(1); layerID.Before(s.ticker.GetCurrentLayer()); layerID = layerID.Add(1) {\n\t\t\tif layerID.After(lastSynced) {\n\t\t\t\tlastSynced = layerID\n\t\t\t\tif err := s.syncLayer(ctx, layerID); err != nil {\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t}\n\t\t\tvQueue <- layerID\n\t\t\tlogger.With().Debug(\"finished data sync\", layerID)\n\t\t}\n\t\tlogger.With().Debug(\"data is synced, waiting for validation\",\n\t\t\tlog.Int(\"attempt\", attempt),\n\t\t\tlog.FieldNamed(\"current\", s.ticker.GetCurrentLayer()),\n\t\t\tlog.FieldNamed(\"latest\", s.mesh.LatestLayer()),\n\t\t\tlog.FieldNamed(\"processed\", s.mesh.ProcessedLayer()))\n\t\treturn true\n\t}\n\n\t// check if we are on target. if not, do the sync loop again\n\tfor success && (!s.stateOnTarget() || s.mesh.MissingLayer() != types.LayerID{}) {\n\t\tattempt++\n\t\tif attempt > maxAttemptWithinRun {\n\t\t\tlogger.Info(\"all data synced but unable to advance processed layer after max attempts\")\n\t\t\tbreak\n\t\t}\n\t\tvQueue, vDone = s.startValidating(ctx, s.run, attempt)\n\t\tsuccess = attemptFunc()\n\t\tclose(vQueue)\n\t\tselect {\n\t\tcase <-vDone:\n\t\tcase <-s.shutdownCtx.Done():\n\t\t\treturn false\n\t\t}\n\t}\n\n\ts.setStateAfterSync(ctx, success)\n\tlogger.With().Info(fmt.Sprintf(\"finished sync run #%v\", s.run),\n\t\tlog.Bool(\"success\", success),\n\t\tlog.Int(\"attempt\", attempt),\n\t\tlog.String(\"sync_state\", s.getSyncState().String()),\n\t\tlog.FieldNamed(\"current\", s.ticker.GetCurrentLayer()),\n\t\tlog.FieldNamed(\"latest\", s.mesh.LatestLayer()),\n\t\tlog.FieldNamed(\"processed\", s.mesh.ProcessedLayer()))\n\ts.setSyncerIdle()\n\treturn success\n}", "title": "" }, { "docid": "5d3f8710308b3ab314d4a7344b024d23", "score": "0.51250166", "text": "func EnsureBranchSynchronized(branch, remote string) (err error) {\n\t// Check whether the remote counterpart actually exists.\n\texists, err := RemoteBranchExists(branch, remote)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !exists {\n\t\treturn nil\n\t}\n\n\t// Get the data needed.\n\tvar (\n\t\tlocalRef = fmt.Sprintf(\"refs/heads/%v\", branch)\n\t\tremoteRef = fmt.Sprintf(\"refs/remotes/%v/%v\", remote, branch)\n\t)\n\tlocalHexsha, err := Hexsha(localRef)\n\tif err != nil {\n\t\treturn err\n\t}\n\tremoteHexsha, err := Hexsha(remoteRef)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif localHexsha == remoteHexsha {\n\t\t// The branch is up to date, we are done here.\n\t\treturn nil\n\t}\n\n\t// Check whether the local branch can be fast-forwarded.\n\tremoteBranch := fmt.Sprintf(\"%v/%v\", remote, branch)\n\t_, err = Run(\"merge-base\", \"--is-ancestor\", branch, remoteBranch)\n\tif err != nil {\n\t\t// --is-ancestor returns exit status 0 on true, 1 on false, some other on error.\n\t\t// We cannot check the value in a platform-independent way, but we count on the fact that\n\t\t// stderr will be non-empty on error.\n\t\tex, ok := err.(errs.Err)\n\t\tif !ok || len(ex.Hint()) != 0 {\n\t\t\t// In case err is not implementing errs.Err or len(stderr) != 0, we return the error.\n\t\t\treturn err\n\t\t}\n\t\t// Otherwise the error means that --is-ancestor returned false,\n\t\t// so we cannot fast-forward and we have to return an error.\n\t\treturn &ErrRefNotInSync{branch}\n\t}\n\n\t// Perform a fast-forward merge.\n\t// Ask the user before doing so.\n\tfmt.Println()\n\tfmt.Printf(\"Branch '%v' is behind '%v', and can be fast-forwarded.\\n\", branch, remoteBranch)\n\tproceed, err := prompt.Confirm(\"Shall we perform the merge? It's all safe!\", true)\n\tfmt.Println()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !proceed {\n\t\treturn &ErrRefNotInSync{branch}\n\t}\n\n\t// Make sure the right branch is checked out.\n\tcurrentBranch, err := gitutil.CurrentBranch()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif branch != currentBranch {\n\t\t// Checkout the branch to be merged.\n\t\ttask := fmt.Sprintf(\"Checkout branch '%v'\", branch)\n\t\tif err := Checkout(branch); err != nil {\n\t\t\treturn errs.NewError(task, err)\n\t\t}\n\t\tdefer func() {\n\t\t\t// Checkout the original branch on return.\n\t\t\ttask := fmt.Sprintf(\"Checkout branch '%v'\", currentBranch)\n\t\t\tif ex := Checkout(currentBranch); ex != nil {\n\t\t\t\tif err == nil {\n\t\t\t\t\terr = ex\n\t\t\t\t} else {\n\t\t\t\t\terrs.LogError(task, err)\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t}\n\n\t// Merge. Use --ff-only, just to be sure.\n\t// But we have already checked that this will be a fast-forward merge.\n\t_, err = Run(\"merge\", \"--ff-only\", remoteBranch)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlog.Log(fmt.Sprintf(\"Branch '%v' fast-forwarded onto '%v'\", branch, remoteBranch))\n\treturn nil\n}", "title": "" }, { "docid": "1af68a32b7aae8519e5a7d45e41baf46", "score": "0.5118321", "text": "func runTinzenite(t *core.Tinzenite) {\n\t// do this here so that it is guaranteed to be set\n\tt.RegisterPeerValidation(func(address string, wantsTrust bool) {\n\t\tvar allow bool\n\t\tif wantsTrust {\n\t\t\tquestion := shared.CreateYesNo(\"Add peer \" + address[:8] + \" as TRUSTED peer?\")\n\t\t\tallow = question.Ask() > 0\n\t\t} else {\n\t\t\tquestion := shared.CreateYesNo(\"Add peer \" + address[:8] + \" as ENCRYPTED peer?\")\n\t\t\tallow = question.Ask() > 0\n\t\t}\n\t\tif !allow {\n\t\t\tlog.Println(\"Tin: will not add peer, as requested.\")\n\t\t\treturn\n\t\t}\n\t\t// allow peer\n\t\terr := t.AllowPeer(address)\n\t\tif err != nil {\n\t\t\tlog.Println(\"Tinzenite: failed to allow peer:\", err)\n\t\t}\n\t\tlog.Println(\"Tin: will allow peer, as requested.\")\n\t})\n\t// print important info\n\taddress, _ := t.Address()\n\tfmt.Printf(\"Running peer <%s>.\\nID: %s\\n\", t.Name(), address)\n\t// build ticks only once instead of every time\n\t// FIXME: for now using prime numbers to keep them from all ticking at the same time\n\ttickUpdate := time.Tick(time.Duration(7) * time.Second)\n\ttickRemote := time.Tick(time.Duration(29) * time.Second)\n\ttickEncrypted := time.Tick(time.Duration(53) * time.Second)\n\t// prepare quitting via ctrl-c\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\t// loop until close\n\tfor {\n\t\tselect {\n\t\tcase <-tickUpdate:\n\t\t\terr := t.SyncLocal()\n\t\t\tif err != nil {\n\t\t\t\tlogMain(\"SyncLocal error:\", err.Error())\n\t\t\t}\n\t\tcase <-tickRemote:\n\t\t\terr := t.SyncRemote()\n\t\t\tif err != nil {\n\t\t\t\tlogMain(\"SyncRemote error:\", err.Error())\n\t\t\t}\n\t\tcase <-tickEncrypted:\n\t\t\terr := t.SyncEncrypted()\n\t\t\tif err != nil {\n\t\t\t\tlogMain(\"SyncEncrypted error:\", err.Error())\n\t\t\t}\n\t\tcase <-c:\n\t\t\t// on interrupt close tinzenite\n\t\t\tt.Close()\n\t\t\treturn\n\t\t} // select\n\t} // for\n}", "title": "" }, { "docid": "f9ebceaf56f31a3cec7d92e018f0517e", "score": "0.5105363", "text": "func (s *service) syncBlockToStateStorage(commitedBlockPair *protocol.BlockPairContainer) error {\n\t_, err := s.stateStorage.CommitStateDiff(&services.CommitStateDiffInput{\n\t\tResultsBlockHeader: commitedBlockPair.ResultsBlock.Header,\n\t\tContractStateDiffs: commitedBlockPair.ResultsBlock.ContractStateDiffs,\n\t})\n\treturn err\n}", "title": "" }, { "docid": "ada71ff85b3662dcadf509187815a61c", "score": "0.5104634", "text": "func (s *service) syncBlockToStateStorage(committedBlockPair *protocol.BlockPairContainer) error {\n\t_, err := s.stateStorage.CommitStateDiff(&services.CommitStateDiffInput{\n\t\tResultsBlockHeader: committedBlockPair.ResultsBlock.Header,\n\t\tContractStateDiffs: committedBlockPair.ResultsBlock.ContractStateDiffs,\n\t})\n\treturn err\n}", "title": "" }, { "docid": "71441f613e5aa2632ea6c1bc81f36001", "score": "0.51015455", "text": "func (r *Reactor) Sync(stateProvider StateProvider, discoveryTime time.Duration) (sm.State, *types.Commit, error) {\n\tr.mtx.Lock()\n\tif r.syncer != nil {\n\t\tr.mtx.Unlock()\n\t\treturn sm.State{}, nil, errors.New(\"a state sync is already in progress\")\n\t}\n\n\tr.syncer = newSyncer(r.Logger, r.conn, r.connQuery, stateProvider, r.snapshotCh.Out(), r.chunkCh.Out(), r.tempDir)\n\tr.mtx.Unlock()\n\n\t// request snapshots from all currently connected peers\n\tr.Logger.Debug(\"requesting snapshots from known peers\")\n\tr.snapshotCh.Out() <- p2p.Envelope{\n\t\tBroadcast: true,\n\t\tMessage: &ssproto.SnapshotsRequest{},\n\t}\n\n\tstate, commit, err := r.syncer.SyncAny(discoveryTime)\n\n\tr.mtx.Lock()\n\tr.syncer = nil\n\tr.mtx.Unlock()\n\n\treturn state, commit, err\n}", "title": "" }, { "docid": "fa522d5813114dfbdc168a584c47afbf", "score": "0.51006913", "text": "func syncRepo(repo, dest, branch, rev string, depth int) error {\n\ttarget := path.Join(volMount, dest)\n\tgitRepoPath := path.Join(target, \".git\")\n\t_, err := os.Stat(gitRepoPath)\n\tswitch {\n\tcase os.IsNotExist(err):\n\t\terr = initRepo(repo, target, branch, rev, depth)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\tcase err != nil:\n\t\treturn fmt.Errorf(\"error checking if repo exist %q: %v\", gitRepoPath, err)\n\tdefault:\n\t\tneedUpdate, err := gitRemoteChanged(target, branch)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif !needUpdate {\n\t\t\tlog.Printf(\"No change\")\n\t\t\treturn nil\n\t\t}\n\t}\n\n\treturn addWorktreeAndSwap(dest, branch, rev)\n}", "title": "" }, { "docid": "303372e4e4cddc13b3e9abf29f7dcb1f", "score": "0.50985926", "text": "func (r *Replica) sync() {\n\tif !r.Durable {\n\t\treturn\n\t}\n\n\tr.StableStore.Sync()\n}", "title": "" }, { "docid": "2fb2f00d5a9e7bc4d37005cc5bcc6b4d", "score": "0.50931525", "text": "func do_sync(syncinfo Event) bool {\n\tconn, err := net.Dial(\"tcp\", syncinfo.Host)\n\tif !check(err, true) {\n\t\treturn false\n\t}\n\n\tdefer conn.Close()\n\tDPrintf(\"opened connection to %v\", syncinfo.Host)\n\tenc := gob.NewEncoder(conn) // Will write to network.\n\tdec := gob.NewDecoder(conn) // Will read from network.\n\tDPrintf(\"sending file list\")\n\tif !check(enc.Encode(syncinfo.Files), true) {\n\t\treturn false\n\t}\n\tvar reply SyncReplyMsg\n\tDPrintf(\"waiting for reply\")\n\tif !check(dec.Decode(&reply), true) {\n\t\treturn false\n\t}\n\tDPrintf(\"checking their reply\")\n\tfor k, v := range reply.Files {\n\t\tif v {\n\t\t\tDPrintf(\"%v wants file %v\", syncinfo.Host, k)\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "a9a56dd7c3421ad6249a69d927f40edf", "score": "0.50919646", "text": "func (c *pipelineClient) Sync() {\n\tc.waitGroup.Wait()\n}", "title": "" }, { "docid": "207a5e1ecd47179e5ea1f94bff79a191", "score": "0.50749844", "text": "func (b *blockManager) SyncPeer() *ServerPeer {\n\tb.syncPeerMutex.Lock()\n\tdefer b.syncPeerMutex.Unlock()\n\n\treturn b.syncPeer\n}", "title": "" }, { "docid": "ac34ce64b819cfdb899e70cba5fd29e9", "score": "0.50701505", "text": "func (r *epaxosReplica) sync() {\n\tif !r.durable {\n\t\treturn\n\t}\n\t//TODO write to stable store\n}", "title": "" }, { "docid": "ccbc030c941857976fbf35d839cb7a52", "score": "0.5064452", "text": "func SyncRemoteRepo(hubData model.CloningInput) error {\n\thubPath := defaultPath + hubData.ProjectID + \"/\" + hubData.HubName\n\terr := os.RemoveAll(hubPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tupdateHub := model.CreateRemoteChaosHub{\n\t\tHubName: hubData.HubName,\n\t\tRepoURL: hubData.RepoURL,\n\t\tProjectID: hubData.ProjectID,\n\t}\n\tlog.Info(\"downloading remote hub\")\n\terr = DownloadRemoteHub(updateHub)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlog.Info(\"remote hub \", hubData.HubName, \"downloaded \")\n\treturn nil\n}", "title": "" }, { "docid": "1fbfe5c098ddcff6247f68b299d3100a", "score": "0.5043176", "text": "func (s *Server) syncLeader() {\n\tfor isLeader := range s.raft.LeaderCh() {\n\t\tif !isLeader {\n\t\t\tcontinue\n\t\t}\n\n\t\t// although we are listening over serf events\n\t\t// doing it here as well makes sure we are in sync\n\t\tfor _, member := range s.serf.Members() {\n\t\t\tvar err error\n\t\t\tswitch member.Status {\n\t\t\tcase serf.StatusAlive:\n\t\t\t\terr = s.HandleAddPeer(member)\n\t\t\tcase serf.StatusLeft, serf.StatusFailed:\n\t\t\t\terr = s.HandleLeavePeer(member)\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\ts.l.Error(\"leader failed to sync member\", \"error\", err.Error(),\n\t\t\t\t\t\"member name\", member.Name, \"member status\", member.Status)\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "eb59e336b551f343fbba5a2fff20d346", "score": "0.50179625", "text": "func (nm *NetworkManager) SyncNet(wnet Network) error {\n\treq := wnet.Driver\n\tif req == \"\" {\n\t\treq = \"KEYHOLE\"\n\t}\n\n\tdrvr, ok := nm.driver[req]\n\tif !ok {\n\t\tlog.Printf(\"Failure to activate peer on '%s': missing driver '%s'\", wnet.ID, req)\n\t\treturn ErrInternalError\n\t}\n\tident := wnet.ID\n\tif wnet.Interface != \"\" {\n\t\tident = wnet.Interface\n\t}\n\treturn drvr.Configure(ident, wnet.NetState)\n}", "title": "" }, { "docid": "47e280601d175ea0d255aa9776713b70", "score": "0.5010442", "text": "func (s *instanceConnection) Sync(log *logging.Logger) error {\n\tif _, err := s.Run(log, \"sudo sync\", \"\", false); err != nil {\n\t\treturn maskAny(err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "10ad3913e68b0d6d7ee7648d30b83c5b", "score": "0.4980669", "text": "func (w *SyncWorker) ResyncIndex(onNewBlock func(hash string)) error {\n\tremote, err := w.chain.GetBestBlockHash()\n\tif err != nil {\n\t\treturn err\n\t}\n\tlocalBestHeight, local, err := w.db.GetBestBlock()\n\tif err != nil {\n\t\tlocal = \"\"\n\t}\n\n\t// If the locally indexed block is the same as the best block on the\n\t// network, we're done.\n\tif local == remote {\n\t\tglog.Infof(\"resync: synced on %d %s\", localBestHeight, local)\n\t\treturn nil\n\t}\n\n\tvar header *bchain.BlockHeader\n\tif local != \"\" {\n\t\t// Is local tip on the best chain?\n\t\theader, err = w.chain.GetBlockHeader(local)\n\t\tforked := false\n\t\tif err != nil {\n\t\t\tif e, ok := err.(*bchain.RPCError); ok && e.Message == \"Block not found\" {\n\t\t\t\tforked = true\n\t\t\t} else {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\tif header.Confirmations < 0 {\n\t\t\t\tforked = true\n\t\t\t}\n\t\t}\n\n\t\tif forked {\n\t\t\t// find and disconnect forked blocks and then synchronize again\n\t\t\tglog.Info(\"resync: local is forked\")\n\t\t\tvar height uint32\n\t\t\tfor height = localBestHeight - 1; height >= 0; height-- {\n\t\t\t\tlocal, err = w.db.GetBlockHash(height)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tremote, err = w.chain.GetBlockHash(height)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tif local == remote {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\terr = w.db.DisconnectBlocks(height+1, localBestHeight)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn w.ResyncIndex(onNewBlock)\n\t\t}\n\t}\n\n\tvar hash string\n\tif header != nil {\n\t\tglog.Info(\"resync: local is behind\")\n\t\thash = header.Next\n\t\tw.startHeight = localBestHeight\n\t} else {\n\t\t// If the local block is missing, we're indexing from the genesis block\n\t\t// or from the start block specified by flags\n\t\tglog.Info(\"resync: genesis from block \", w.startHeight)\n\t\thash, err = w.chain.GetBlockHash(w.startHeight)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// if parallel operation is enabled and the number of blocks to be connected is large,\n\t// use parallel routine to load majority of blocks\n\tif w.syncWorkers > 1 {\n\t\tchainBestHeight, err := w.chain.GetBestBlockHeight()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif chainBestHeight-w.startHeight > uint32(w.syncChunk) {\n\t\t\tglog.Infof(\"resync: parallel sync of blocks %d-%d, using %d workers\", w.startHeight, chainBestHeight, w.syncWorkers)\n\t\t\terr = w.connectBlocksParallel(w.startHeight, chainBestHeight)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\t// after parallel load finish the sync using standard way,\n\t\t\t// new blocks may have been created in the meantime\n\t\t\treturn w.ResyncIndex(onNewBlock)\n\t\t}\n\t}\n\n\treturn w.connectBlocks(hash, onNewBlock)\n}", "title": "" }, { "docid": "0a847807277f6abd19cc0c3f112d8302", "score": "0.497777", "text": "func Peer2PeerSimSend(client pb.WorkerClient, message []*pb.SimMessageStruct, partitionId int, srcId int) {\n\tfor len(message) > tools.RPCSendSize {\n\t\tslice := message[0:tools.RPCSendSize]\n\t\tmessage = message[tools.RPCSendSize:]\n\t\t_, err := client.SimSend(context.Background(), &pb.SimMessageRequest{Pair: slice})\n\t\tif err != nil {\n\t\t\tlog.Printf(\"%v send to %v error\", srcId, partitionId)\n\t\t\tlog.Fatal(err)\n\t\t}\n\t}\n\tif len(message) != 0 {\n\t\t_, err := client.SimSend(context.Background(), &pb.SimMessageRequest{Pair: message})\n\t\tif err != nil {\n\t\t\tlog.Printf(\"%v send to %v error\", srcId, partitionId)\n\t\t\tlog.Fatal(err)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "d16643ffee5e598acdcb36ee51843e71", "score": "0.49711895", "text": "func Sync(dst, src string) error {\n\treturn NewSyncer().Sync(dst, src)\n}", "title": "" }, { "docid": "6ee0f5dee97321bd62124e8f89485697", "score": "0.49700063", "text": "func (w *spvWallet) connect(ctx context.Context, wg *sync.WaitGroup) error {\n\tif err := logNeutrino(w.netDir); err != nil {\n\t\treturn fmt.Errorf(\"error initializing btcwallet+neutrino logging: %v\", err)\n\t}\n\n\terr := w.startWallet()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ttxNotes := w.wallet.txNotifications()\n\n\t// Nanny for the caches checkpoints and txBlocks caches.\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tdefer w.stop()\n\t\tdefer txNotes.Done()\n\n\t\tticker := time.NewTicker(time.Minute * 20)\n\t\tdefer ticker.Stop()\n\t\texpiration := time.Hour * 2\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ticker.C:\n\t\t\t\tw.txBlocksMtx.Lock()\n\t\t\t\tfor txHash, entry := range w.txBlocks {\n\t\t\t\t\tif time.Since(entry.lastAccess) > expiration {\n\t\t\t\t\t\tdelete(w.txBlocks, txHash)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tw.txBlocksMtx.Unlock()\n\n\t\t\t\tw.checkpointMtx.Lock()\n\t\t\t\tfor outPt, check := range w.checkpoints {\n\t\t\t\t\tif time.Since(check.lastAccess) > expiration {\n\t\t\t\t\t\tdelete(w.checkpoints, outPt)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tw.checkpointMtx.Unlock()\n\n\t\t\tcase note := <-txNotes.C:\n\t\t\t\tif len(note.AttachedBlocks) > 0 {\n\t\t\t\t\tlastBlock := note.AttachedBlocks[len(note.AttachedBlocks)-1]\n\t\t\t\t\tsyncTarget := atomic.LoadInt32(&w.syncTarget)\n\n\t\t\t\t\tfor ib := range note.AttachedBlocks {\n\t\t\t\t\t\tfor _, nt := range note.AttachedBlocks[ib].Transactions {\n\t\t\t\t\t\t\tw.log.Debugf(\"Block %d contains wallet transaction %v\", note.AttachedBlocks[ib].Height, nt.Hash)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif syncTarget == 0 || (lastBlock.Height < syncTarget && lastBlock.Height%10_000 != 0) {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\n\t\t\t\t\tselect {\n\t\t\t\t\tcase w.tipChan <- &block{\n\t\t\t\t\t\thash: *lastBlock.Hash,\n\t\t\t\t\t\theight: int64(lastBlock.Height),\n\t\t\t\t\t}:\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tw.log.Warnf(\"tip report channel was blocking\")\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn nil\n}", "title": "" }, { "docid": "38a288ae2d856a2e819e953a6135d78a", "score": "0.4969672", "text": "func Sync(args ...string) error {\n\tpeer := args[0]\n\tcmdName := args[1]\n\tcmdArgs := pack(strings.Fields(args[2]))\n\n\tclient, err := GetRedisConn(peer)\n\tif err != nil {\n\t\treturn err\n\t}\n\tresp, err := client.Do(cmdName, cmdArgs...)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlogger.Println(\"RESP\", resp)\n\treturn nil\n}", "title": "" }, { "docid": "edeecfafd14a2f94bc084e363a144e80", "score": "0.4965574", "text": "func (e *PublicAPI) Syncing() (interface{}, error) {\n\te.logger.Debugln(\"eth_syncing\")\n\n\tstatus, err := e.clientCtx.Client.Status(e.ctx)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tif !status.SyncInfo.CatchingUp {\n\t\treturn false, nil\n\t}\n\n\treturn map[string]interface{}{\n\t\t// \"startingBlock\": nil, // NA\n\t\t\"currentBlock\": hexutil.Uint64(status.SyncInfo.LatestBlockHeight),\n\t\t// \"highestBlock\": nil, // NA\n\t\t// \"pulledStates\": nil, // NA\n\t\t// \"knownStates\": nil, // NA\n\t}, nil\n}", "title": "" }, { "docid": "e4a0dc178f5d3e7390f0f5ce816751f0", "score": "0.49640006", "text": "func ClientSync(client RPCClient) {\n\t//panic(\"todo\")\n\tindexPath := client.BaseDir + \"/index.txt\"\n\tlocalIndexMap := make(map[string]FileMetaData)\n\tfileDeleteMap := make(map[string]bool)\n\n\t//create index.txt if not exist\n\tCreateIndex(client)\n\tGetIndexMap(&localIndexMap, &fileDeleteMap, indexPath) // read index.txt to map\n\n\t//scan local file, check for any file modification or new file \n\tfileNameUpdate := ScanCheckLocalIndex(localIndexMap, fileDeleteMap ,client)\n\tlog.Println(\"fileNameUpdate: \",fileNameUpdate)\n\n\n\tserverFileInfoMap := new(map[string]FileMetaData) //new return a pointer\n\tsucc := new(bool)\n\tclient.GetFileInfoMap(succ, serverFileInfoMap) // rpc call, get index map from server\n\tPrintMetaMap(*serverFileInfoMap)\n\n\t// Compare local index with remote server index\n\tfor file_name, file_meta_data := range *serverFileInfoMap {\n\t\tif _, ok := localIndexMap[file_name]; !ok { //if file does not exist in local index, server has new file :)\n\t\t\t//download file block from server\n\t\t\thashlist := file_meta_data.BlockHashList\n\t\t\tvar blockList []Block\n\t\t\tDownloadBlock(hashlist, &blockList, client)\n\t\t\tlog.Println(\"blockList: \",blockList)\n\t\t\tJoinBlockAndDownloadFile(blockList, file_name, client)\n\t\t\t\n\t\t\t\n\t\t}\n\t}\n\n\t// client upload new file to server, if fail, download the file from server and update indexMap\n\tfor name,_ := range fileNameUpdate{\n\t\tvar latestVersion = new(int)\n\t\tlocal_fmData := localIndexMap[name]\n\t\terr := client.UpdateFile(&local_fmData, latestVersion)\n\t\tif err != nil {\n\t\t\t// if error, meaning version mismatch. Download the file from server\n\t\t\tfile_meta_data := (*serverFileInfoMap)[name]\n\t\t\thashlist := file_meta_data.BlockHashList\n\t\t\tvar blockList []Block\n\t\t\tDownloadBlock(hashlist, &blockList,client)\n\t\t\tJoinBlockAndDownloadFile(blockList, name, client)\n\t\t\tlocalIndexMap[name] = FileMetaData{name,*latestVersion,hashlist}\n\n\t\t}else{\n\t\t\t// upload to block of file to server\n\t\t\tblockList := GetFileBlock(name, client)\n\t\t\tfor _, block := range blockList{\n\t\t\t\tlog.Println(\"block data: \",block.BlockData)\n\t\t\t\terr = client.PutBlock(block, succ)\n\t\t\t\tPrintError(err, \"Put Block\")\n\t\t}\n\t\t}\n\n\t}\n\tclient.GetFileInfoMap(succ, serverFileInfoMap)\n\tPrintMetaMap(*serverFileInfoMap)\n\n\t//Update the hashlist of corresponding file, rewrite index.txt\n\tUpdateIndexFile(indexPath, localIndexMap, fileNameUpdate)\n\t\n\t\n\n}", "title": "" }, { "docid": "19704d519a641cebe35b17d0de8d43d6", "score": "0.49637875", "text": "func (i *infraClient) syncConfiguration(ctx context.Context) error {\n\ti.initPorts()\n\trouterEntities := make([]RouterEntity, 0)\n\tbrokerEntities := make(map[Host][]BrokerEntity, 0)\n\n\tfor _, endpoint := range i.endpoints {\n\t\tif endpoint.Status.Phase == v1.MessagingEndpointActive && endpoint.Status.Host != \"\" {\n\t\t\tfor _, address := range i.addresses {\n\t\t\t\tif endpoint.Namespace == address.Namespace {\n\t\t\t\t\tresultRouter, err := i.buildRouterAddressEntities(endpoint, address)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\n\t\t\t\t\tresultBroker, err := i.buildBrokerAddressEntities(endpoint, address)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\tfor broker, entities := range resultBroker {\n\t\t\t\t\t\tif brokerEntities[broker] == nil {\n\t\t\t\t\t\t\tbrokerEntities[broker] = entities\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tbrokerEntities[broker] = append(brokerEntities[broker], entities...)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\trouterEntities = append(routerEntities, resultRouter...)\n\t\t\t\t}\n\t\t\t}\n\t\t\te, err := i.buildRouterEndpointEntities(endpoint)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\trouterEntities = append(routerEntities, e...)\n\t\t}\n\t}\n\n\treturn i.syncEntities(ctx, routerEntities, brokerEntities)\n}", "title": "" }, { "docid": "670b903857b9eb88b282d132b46f8f12", "score": "0.49532634", "text": "func (client ManagementStationClient) synchronizeMirrors(ctx context.Context, request common.OCIRequest, binaryReqBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (common.OCIResponse, error) {\n\n\thttpRequest, err := request.HTTPRequest(http.MethodPost, \"/managementStations/{managementStationId}/actions/synchronizeMirrors\", binaryReqBody, extraHeaders)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar response SynchronizeMirrorsResponse\n\tvar httpResponse *http.Response\n\thttpResponse, err = client.Call(ctx, &httpRequest)\n\tdefer common.CloseBodyIfValid(httpResponse)\n\tresponse.RawResponse = httpResponse\n\tif err != nil {\n\t\tapiReferenceLink := \"https://docs.oracle.com/iaas/api/#/en/osmh/20220901/ManagementStation/SynchronizeMirrors\"\n\t\terr = common.PostProcessServiceError(err, \"ManagementStation\", \"SynchronizeMirrors\", apiReferenceLink)\n\t\treturn response, err\n\t}\n\n\terr = common.UnmarshalResponse(httpResponse, &response)\n\treturn response, err\n}", "title": "" }, { "docid": "0418bbdef33b6e82fea6222cae4e274a", "score": "0.49438316", "text": "func (c *BGPController) syncPeers(l log.Logger) error {\n\tvar (\n\t\terrs int\n\t\tneedUpdateAds bool\n\t)\n\tfor _, p := range c.Peers {\n\t\t// First, determine if the peering should be active for this\n\t\t// node.\n\t\tshouldRun := false\n\t\tfor _, ns := range p.cfg.NodeSelectors {\n\t\t\tif ns.Matches(c.nodeLabels) {\n\t\t\t\tshouldRun = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\t// Now, compare current state to intended state, and correct.\n\t\tif p.BGP != nil && !shouldRun {\n\t\t\t// Oops, session is running but shouldn't be. Shut it down.\n\t\t\tl.Log(\"event\", \"peerRemoved\", \"peer\", p.cfg.Addr, \"reason\", \"filteredByNodeSelector\", \"msg\", \"peer deconfigured, closing BGP session\")\n\t\t\tif err := p.BGP.Close(); err != nil {\n\t\t\t\tl.Log(\"op\", \"syncPeers\", \"error\", err, \"peer\", p.cfg.Addr, \"msg\", \"failed to shut down BGP session\")\n\t\t\t}\n\t\t\tp.BGP = nil\n\t\t} else if p.BGP == nil && shouldRun {\n\t\t\t// Session doesn't exist, but should be running. Create\n\t\t\t// it.\n\t\t\tl.Log(\"event\", \"peerAdded\", \"peer\", p.cfg.Addr, \"msg\", \"peer configured, starting BGP session\")\n\t\t\tvar routerID net.IP\n\t\t\tif p.cfg.RouterID != nil {\n\t\t\t\trouterID = p.cfg.RouterID\n\t\t\t}\n\t\t\ts, err := newBGP(c.Logger, net.JoinHostPort(p.cfg.Addr.String(), strconv.Itoa(int(p.cfg.Port))), p.cfg.MyASN, routerID, p.cfg.ASN, p.cfg.HoldTime, p.cfg.Password, c.MyNode)\n\t\t\tif err != nil {\n\t\t\t\tl.Log(\"op\", \"syncPeers\", \"error\", err, \"peer\", p.cfg.Addr, \"msg\", \"failed to create BGP session\")\n\t\t\t\terrs++\n\t\t\t} else {\n\t\t\t\tp.BGP = s\n\t\t\t\tneedUpdateAds = true\n\t\t\t}\n\t\t}\n\t}\n\tif needUpdateAds {\n\t\t// Some new sessions came up, resync advertisement state.\n\t\tif err := c.UpdateAds(); err != nil {\n\t\t\tl.Log(\"op\", \"updateAds\", \"error\", err, \"msg\", \"failed to update BGP advertisements\")\n\t\t\treturn err\n\t\t}\n\t}\n\tif errs > 0 {\n\t\treturn fmt.Errorf(\"%d BGP sessions failed to start\", errs)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5f331ca8e5fca6094440f263ef57377f", "score": "0.49396986", "text": "func (d *SyncServerDatabase) IncrementalSync(userID string, fromPos, toPos types.StreamPosition, numRecentEventsPerRoom int) (res *types.Response, returnErr error) {\n\treturnErr = runTransaction(d.db, func(txn *sql.Tx) error {\n\t\t// Work out which rooms to return in the response. This is done by getting not only the currently\n\t\t// joined rooms, but also which rooms have membership transitions for this user between the 2 stream positions.\n\t\t// This works out what the 'state' key should be for each room as well as which membership block\n\t\t// to put the room into.\n\t\tdeltas, err := d.getStateDeltas(txn, fromPos, toPos, userID)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tres = types.NewResponse(toPos)\n\t\tfor _, delta := range deltas {\n\t\t\tendPos := toPos\n\t\t\tif delta.membershipPos > 0 && delta.membership == \"leave\" {\n\t\t\t\t// make sure we don't leak recent events after the leave event.\n\t\t\t\t// TODO: History visibility makes this somewhat complex to handle correctly. For example:\n\t\t\t\t// TODO: This doesn't work for join -> leave in a single /sync request (see events prior to join).\n\t\t\t\t// TODO: This will fail on join -> leave -> sensitive msg -> join -> leave\n\t\t\t\t// in a single /sync request\n\t\t\t\t// This is all \"okay\" assuming history_visibility == \"shared\" which it is by default.\n\t\t\t\tendPos = delta.membershipPos\n\t\t\t}\n\t\t\trecentStreamEvents, err := d.events.RecentEventsInRoom(txn, delta.roomID, fromPos, endPos, numRecentEventsPerRoom)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\trecentEvents := streamEventsToEvents(recentStreamEvents)\n\t\t\tdelta.stateEvents = removeDuplicates(delta.stateEvents, recentEvents) // roll back\n\n\t\t\tswitch delta.membership {\n\t\t\tcase \"join\":\n\t\t\t\tjr := types.NewJoinResponse()\n\t\t\t\tjr.Timeline.Events = gomatrixserverlib.ToClientEvents(recentEvents, gomatrixserverlib.FormatSync)\n\t\t\t\tjr.Timeline.Limited = false // TODO: if len(events) >= numRecents + 1 and then set limited:true\n\t\t\t\tjr.State.Events = gomatrixserverlib.ToClientEvents(delta.stateEvents, gomatrixserverlib.FormatSync)\n\t\t\t\tres.Rooms.Join[delta.roomID] = *jr\n\t\t\tcase \"leave\":\n\t\t\t\tfallthrough // transitions to leave are the same as ban\n\t\t\tcase \"ban\":\n\t\t\t\t// TODO: recentEvents may contain events that this user is not allowed to see because they are\n\t\t\t\t// no longer in the room.\n\t\t\t\tlr := types.NewLeaveResponse()\n\t\t\t\tlr.Timeline.Events = gomatrixserverlib.ToClientEvents(recentEvents, gomatrixserverlib.FormatSync)\n\t\t\t\tlr.Timeline.Limited = false // TODO: if len(events) >= numRecents + 1 and then set limited:true\n\t\t\t\tlr.State.Events = gomatrixserverlib.ToClientEvents(delta.stateEvents, gomatrixserverlib.FormatSync)\n\t\t\t\tres.Rooms.Leave[delta.roomID] = *lr\n\t\t\t}\n\t\t}\n\n\t\t// TODO: This should be done in getStateDeltas\n\t\treturn d.addInvitesToResponse(txn, userID, res)\n\t})\n\treturn\n}", "title": "" }, { "docid": "86217c26aee8c34f57898ec06948575e", "score": "0.49327782", "text": "func (s *remoteCNIserver) resync() error {\n\ts.Lock()\n\tdefer s.Unlock()\n\n\terr := s.configureVswitchConnectivity()\n\tif err != nil {\n\t\ts.Logger.Error(err)\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "0a7bb239c92c0cea8f15d3b2d649daea", "score": "0.49311775", "text": "func Sync(src string, dest string, recurse bool, filters ...Filter) error {\n\tsrc, dest, err := resolveAbs(src, dest)\n\n\tif err == nil {\n\t\terr = copy(src, dest, NewFilterListFilter(filters...), recurse, 0)\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "c1ae40fd55ff280ef5f65d15e2497503", "score": "0.49277395", "text": "func (sm *SyncManager) updateSyncPeer(dcSyncPeer bool) {\n\tlog.Infof(\"Updating sync peer, no progress for: %v\",\n\t\ttime.Since(sm.lastProgressTime))\n\n\t// First, disconnect the current sync peer if requested.\n\tif dcSyncPeer {\n\t\tsm.syncPeer.Disconnect()\n\t}\n\n\t// Reset any header state before we choose our next active sync peer.\n\tif sm.headersFirstMode {\n\t\tbest := sm.chain.BestSnapshot()\n\t\tsm.resetHeaderState(&best.Hash, best.Height)\n\t}\n\n\tsm.syncPeer = nil\n\tsm.startSync()\n}", "title": "" }, { "docid": "8fb0c997933a0bd069cdc7318ab9b212", "score": "0.49230474", "text": "func (ws writeSyncer) Sync() error {\n\treturn nil\n}", "title": "" }, { "docid": "8fb0c997933a0bd069cdc7318ab9b212", "score": "0.49230474", "text": "func (ws writeSyncer) Sync() error {\n\treturn nil\n}", "title": "" }, { "docid": "32af72f1f4071147034e30bf4211da02", "score": "0.4913339", "text": "func (srv *Server) consensusSynchronizeHandler(w http.ResponseWriter, req *http.Request) {\n\tpeers := srv.gateway.Peers()\n\tif len(peers) == 0 {\n\t\twriteError(w, \"No peers available for syncing\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\tgo srv.cs.Synchronize(peers[0])\n\n\twriteSuccess(w)\n}", "title": "" }, { "docid": "d882b9bc8ca8e360d8b6a47de018a839", "score": "0.49125588", "text": "func (pm *ProtocolManager) syncTransactions(p *peer, txids []common.Hash) {\n\tif len(txids) == 0 {\n\t\treturn\n\t}\n\tselect {\n\tcase pm.txsyncCh <- &txsync{p, txids}:\n\tcase <-pm.quitSync:\n\t}\n}", "title": "" }, { "docid": "f88a1253161b5a72d216ad68128e8aff", "score": "0.4910906", "text": "func (s *Sync) createSyncStream() {\n\tfor {\n\t\tselect {\n\t\tcase _, _ = <-s.stop:\n\t\t\treturn\n\t\tdefault:\n\t\t\ts.findSyncPeer()\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "3204242c496ef5e2bdbf6f1fb2cd3cf6", "score": "0.49095753", "text": "func Sync(ping_message PingMessage, pong_message *PongMessage) bool {\n\n\n\t// send the RPC request, wait for the reply.\n\treturn call(\"Master.Sync\", &ping_message, pong_message)\n\n}", "title": "" }, { "docid": "5bb4956f4f2d4b5392c7abf4fb7e9bb3", "score": "0.49086785", "text": "func (r *ChannelRouter) syncChannelGraph(syncReq *syncRequest) error {\n\ttargetNode := syncReq.node\n\n\t// TODO(roasbeef): need to also store sig data in db\n\t// * will be nice when we switch to pairing sigs would only need one ^_^\n\n\t// We'll collate all the gathered routing messages into a single slice\n\t// containing all the messages to be sent to the target peer.\n\tvar announceMessages []lnwire.Message\n\n\t// First run through all the vertexes in the graph, retrieving the data\n\t// for the announcement we originally retrieved.\n\tvar numNodes uint32\n\tif err := r.cfg.Graph.ForEachNode(func(node *channeldb.LightningNode) error {\n\t\talias, err := lnwire.NewAlias(node.Alias)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tann := &lnwire.NodeAnnouncement{\n\t\t\tSignature: r.fakeSig,\n\t\t\tTimestamp: uint32(node.LastUpdate.Unix()),\n\t\t\tAddress: node.Address,\n\t\t\tNodeID: node.PubKey,\n\t\t\tAlias: alias,\n\t\t}\n\t\tannounceMessages = append(announceMessages, ann)\n\n\t\tnumNodes++\n\n\t\treturn nil\n\t}); err != nil {\n\t\treturn err\n\t}\n\n\t// With the vertexes gathered, we'll no retrieve the initial\n\t// announcement, as well as the latest channel update announcement for\n\t// both of the directed edges that make up the channel.\n\t// TODO(roasbeef): multi-sig keys should also be stored in DB\n\tvar numEdges uint32\n\tif err := r.cfg.Graph.ForEachChannel(func(e1, e2 *channeldb.ChannelEdge) error {\n\t\t// First we'll need to obtain the channel ID for the channel\n\t\t// advertisement. As an edge may not be advertised, we'll grab\n\t\t// the channel ID from the edge that was.\n\t\tvar chanID lnwire.ChannelID\n\t\tswitch {\n\t\tcase e1 != nil:\n\t\t\tchanID = lnwire.NewChanIDFromInt(e1.ChannelID)\n\t\tcase e2 != nil:\n\t\t\tchanID = lnwire.NewChanIDFromInt(e2.ChannelID)\n\t\tcase e1 == nil && e2 == nil:\n\t\t\treturn nil\n\t\tdefault:\n\t\t\tchanID = lnwire.NewChanIDFromInt(e1.ChannelID)\n\t\t}\n\n\t\tchanAnn := &lnwire.ChannelAnnouncement{\n\t\t\tFirstNodeSig: r.fakeSig,\n\t\t\tSecondNodeSig: r.fakeSig,\n\t\t\tChannelID: chanID,\n\t\t\tFirstBitcoinSig: r.fakeSig,\n\t\t\tSecondBitcoinSig: r.fakeSig,\n\t\t}\n\n\t\t// If the edge was advertised, then we'll use the node's\n\t\t// identity within the announcement we send to the sync node.\n\t\t// Otherwise, we'll fill in a dummy key.\n\t\t//\n\t\t// TODO(roasbeef): both else clauses need to be removed\n\t\t// once we fully validate, andrew's PR will reconcile\n\t\t// this\n\t\tif e1 != nil {\n\t\t\tchanAnn.FirstNodeID = e1.Node.PubKey\n\t\t\tchanAnn.FirstBitcoinKey = e1.Node.PubKey\n\t\t} else {\n\t\t\tchanAnn.FirstNodeID = e2.Node.PubKey\n\t\t\tchanAnn.FirstBitcoinKey = e2.Node.PubKey\n\t\t}\n\t\tif e2 != nil {\n\t\t\tchanAnn.SecondNodeID = e2.Node.PubKey\n\t\t\tchanAnn.SecondBitcoinKey = e2.Node.PubKey\n\t\t} else {\n\t\t\tchanAnn.SecondNodeID = e1.Node.PubKey\n\t\t\tchanAnn.SecondBitcoinKey = e1.Node.PubKey\n\t\t}\n\n\t\t// We'll unconditionally queue the channel's existence proof as\n\t\t// it will need to be processed before either of the channel\n\t\t// update announcements.\n\t\tannounceMessages = append(announceMessages, chanAnn)\n\n\t\t// Since it's up to a node's policy as to whether they\n\t\t// advertise the edge in dire direction, we don't create an\n\t\t// advertisement if the edge is nil.\n\t\tif e1 != nil {\n\t\t\tannounceMessages = append(announceMessages, &lnwire.ChannelUpdateAnnouncement{\n\t\t\t\tSignature: r.fakeSig,\n\t\t\t\tChannelID: chanID,\n\t\t\t\tTimestamp: uint32(e1.LastUpdate.Unix()),\n\t\t\t\tFlags: 0,\n\t\t\t\tExpiry: e1.Expiry,\n\t\t\t\tHtlcMinimumMstat: uint32(e1.MinHTLC),\n\t\t\t\tFeeBaseMstat: uint32(e1.FeeBaseMSat),\n\t\t\t\tFeeProportionalMillionths: uint32(e1.FeeProportionalMillionths),\n\t\t\t})\n\t\t}\n\t\tif e2 != nil {\n\t\t\tannounceMessages = append(announceMessages, &lnwire.ChannelUpdateAnnouncement{\n\t\t\t\tSignature: r.fakeSig,\n\t\t\t\tChannelID: chanID,\n\t\t\t\tTimestamp: uint32(e2.LastUpdate.Unix()),\n\t\t\t\tFlags: 1,\n\t\t\t\tExpiry: e2.Expiry,\n\t\t\t\tHtlcMinimumMstat: uint32(e2.MinHTLC),\n\t\t\t\tFeeBaseMstat: uint32(e2.FeeBaseMSat),\n\t\t\t\tFeeProportionalMillionths: uint32(e2.FeeProportionalMillionths),\n\t\t\t})\n\t\t}\n\n\t\tnumEdges++\n\t\treturn nil\n\t}); err != nil && err != channeldb.ErrGraphNoEdgesFound {\n\t\tlog.Errorf(\"unable to sync edges w/ peer: %v\", err)\n\t\treturn err\n\t}\n\n\tlog.Infof(\"Syncing channel graph state with %x, sending %v \"+\n\t\t\"nodes and %v edges\", targetNode.SerializeCompressed(),\n\t\tnumNodes, numEdges)\n\n\t// With all the announcement messages gathered, send them all in a\n\t// single batch to the target peer.\n\treturn r.cfg.SendMessages(targetNode, announceMessages...)\n}", "title": "" }, { "docid": "12a2b48071e1900ffb6809c138abfafb", "score": "0.49070638", "text": "func SyncSuerChat() {\n\tchatlog.Sync()\n}", "title": "" }, { "docid": "713586842198c570a0fbc941d760a912", "score": "0.4899303", "text": "func (v *VCHub) Sync() {\n\tv.Log.Debugf(\"VCHub Sync starting\")\n\t// Bring useg to VCHub struct\n\tv.Wg.Add(1)\n\tgo func() {\n\t\tdefer v.Wg.Done()\n\t\tv.sync()\n\t}()\n}", "title": "" }, { "docid": "08271229c35adf29c3033d584b9b3a9a", "score": "0.48977372", "text": "func SynchronizationLoop(dbo *database.AnchorDatabaseOverlay) error {\n\tfmt.Printf(\"SynchronizationLoop\\n\")\n\ti := 0\n\tfor {\n\t\t//Iterate until we are fully in synch with all of the networks\n\t\t//Repeat iteration until there is nothing left to synch\n\t\t//to make sure all of the networks are in synch at the same time\n\t\t//(nothing has drifted apart while we were busy with other systems)\n\t\tfmt.Printf(\"Loop %v\\n\", i)\n\t\tblockCount, err := factom.SynchronizeFactomData(dbo)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Printf(\"blockCount - %v\\n\", blockCount)\n\t\t/*\n\t\t\ttxCount, err := ethereum.SynchronizeEthereumData(dbo)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Printf(\"txCount - %v\\n\", txCount)\n\t\t*/\n\t\tbtcCount, err := bitcoin.SynchronizeBitcoinData(dbo)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Printf(\"btcCount - %v\\n\", btcCount)\n\n\t\t//if (blockCount + txCount + btcCount) == 0 {\n\t\tif (blockCount + btcCount) == 0 {\n\t\t\t//if (blockCount + txCount) == 0 {\n\t\t\tbreak\n\t\t}\n\t\ti++\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "429e26552a192b92c47ad707999ca188", "score": "0.48960233", "text": "func (sm *SyncManager) isSyncCandidate(peer *peerpkg.Peer) bool {\n\t// Typically a peer is not a candidate for sync if it's not a full node,\n\t// however regression test is special in that the regression tool is\n\t// not a full node and still needs to be considered a sync candidate.\n\tif sm.chainParams == &chaincfg.RegressionNetParams {\n\t\t// The peer is not a candidate if it's not coming from localhost\n\t\t// or the hostname can't be determined for some reason.\n\t\thost, _, err := net.SplitHostPort(peer.Addr())\n\t\tif err != nil {\n\t\t\treturn false\n\t\t}\n\n\t\tif host != \"127.0.0.1\" && host != \"localhost\" {\n\t\t\treturn false\n\t\t}\n\t} else {\n\t\t// The peer is not a candidate for sync if it's not a full\n\t\t// node. Additionally, if the segwit soft-fork package has\n\t\t// activated, then the peer must also be upgraded.\n\t\tsegwitActive, err := sm.chain.IsDeploymentActive(chaincfg.DeploymentSegwit)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Unable to query for segwit \"+\n\t\t\t\t\"soft-fork state: %v\", err)\n\t\t}\n\t\tnodeServices := peer.Services()\n\t\tif nodeServices&wire.SFNodeNetwork != wire.SFNodeNetwork ||\n\t\t\t(segwitActive && !peer.IsWitnessEnabled()) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\t// Candidate if all checks passed.\n\treturn true\n}", "title": "" }, { "docid": "1aaf7d396b4833b4458c24cde5ee62e1", "score": "0.4895221", "text": "func TestRPCSendBlockSendsOnlyNecessaryBlocks(t *testing.T) {\n\tif testing.Short() {\n\t\tt.SkipNow()\n\t}\n\n\t// Create the \"remote\" peer.\n\tcst, err := blankConsensusSetTester(t.Name()+\"- remote\", modules.ProdDependencies)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer cst.Close()\n\t// Create the \"local\" peer.\n\t//\n\t// We create this peer manually (not using blankConsensusSetTester) so that we\n\t// can connect it to the remote peer before calling consensus.New so as to\n\t// prevent SendBlocks from triggering on Connect.\n\ttestdir := build.TempDir(modules.ConsensusDir, t.Name()+\" - local\")\n\tg, err := gateway.New(\"localhost:0\", false, filepath.Join(testdir, modules.GatewayDir), false)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer g.Close()\n\terr = g.Connect(cst.cs.gateway.Address())\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tcs, err := New(g, false, filepath.Join(testdir, modules.ConsensusDir), false)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer cs.Close()\n\n\t// Add a few initial blocks to both consensus sets. These are the blocks we\n\t// want to make sure SendBlocks is not sending unnecessarily as both parties\n\t// already have them.\n\tknownBlocks := make(map[types.BlockID]struct{})\n\tfor i := 0; i < 20; i++ {\n\t\tb, err := cst.miner.FindBlock()\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\t_, err = cst.cs.managedAcceptBlocks([]types.Block{b})\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\t_, err = cs.managedAcceptBlocks([]types.Block{b})\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\tknownBlocks[b.ID()] = struct{}{}\n\t}\n\n\t// Add a few blocks to only the remote peer and store which blocks we add.\n\taddedBlocks := make(map[types.BlockID]struct{})\n\tfor i := 0; i < 20; i++ {\n\t\tb, err := cst.miner.FindBlock()\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\t_, err = cst.cs.managedAcceptBlocks([]types.Block{b})\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\taddedBlocks[b.ID()] = struct{}{}\n\t}\n\n\terr = cs.gateway.RPC(cst.cs.gateway.Address(), modules.SendBlocksCmd, func(conn modules.PeerConn) error {\n\t\t// Get blockIDs to send.\n\t\tvar history [32]types.BlockID\n\t\tcs.mu.RLock()\n\t\terr := cs.db.View(func(tx *bolt.Tx) error {\n\t\t\thistory = blockHistory(tx)\n\t\t\treturn nil\n\t\t})\n\t\tcs.mu.RUnlock()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Send the block ids.\n\t\tif err := encoding.WriteObject(conn, history); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmoreAvailable := true\n\t\tfor moreAvailable {\n\t\t\t// Read a slice of blocks from the wire.\n\t\t\tvar newBlocks []types.Block\n\t\t\tif err := encoding.ReadObject(conn, &newBlocks, uint64(MaxCatchUpBlocks)*types.BlockSizeLimit); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := encoding.ReadObject(conn, &moreAvailable, 1); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// Check if the block needed to be sent.\n\t\t\tfor _, newB := range newBlocks {\n\t\t\t\t_, ok := knownBlocks[newB.ID()]\n\t\t\t\tif ok {\n\t\t\t\t\tt.Error(\"SendBlocks sent an unnecessary block that the caller already had\")\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\t_, ok = addedBlocks[newB.ID()]\n\t\t\t\tif !ok {\n\t\t\t\t\tt.Error(\"SendBlocks sent an unnecessary block that the caller did not have\")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n}", "title": "" }, { "docid": "39dc11ff7e6bd3cce9674d4dd79b54f5", "score": "0.48865598", "text": "func (s *Sync) findSyncPeer() {\n\tt := time.NewTicker(time.Microsecond)\n\tdefer t.Stop()\n\n\tfor {\n\t\tselect {\n\t\tcase _, _ = <-s.stop:\n\t\t\treturn\n\t\tcase _ = <-t.C:\n\t\t\ts.curPeer = s.peers.RandomPeer()\n\t\t\tif s.curPeer == nil {\n\t\t\t\t//log.Warn(\"No available peers were found, wait...\")\n\t\t\t} else {\n\t\t\t\t//log.Info(\"Find an available peer\", \"peer\", bm.syncPeer)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "9e66dfc314cdf9e91152b026f87d8348", "score": "0.48834673", "text": "func (rs *routeSyncer) syncLocalRouteTable() {\n\trs.mutex.Lock()\n\tdefer rs.mutex.Unlock()\n\tklog.V(2).Infof(\"Running local route table synchronization\")\n\tfor dst, route := range rs.routeTableStateMap {\n\t\tklog.V(3).Infof(\"Syncing route: %s\", dst)\n\t\terr := rs.routeReplacer(route)\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"Route could not be replaced due to : \" + err.Error())\n\t\t}\n\t}\n}", "title": "" }, { "docid": "974c5d3701b5bb8deae4c435daac80a2", "score": "0.48830545", "text": "func (c *Client) Sync(ci *cid.Cid, local bool) (api.GlobalPinInfo, error) {\n\tvar gpi api.GlobalPinInfoSerial\n\terr := c.do(\"POST\", fmt.Sprintf(\"/pins/%s/sync?local=%t\", ci.String(), local), nil, &gpi)\n\treturn gpi.ToGlobalPinInfo(), err\n}", "title": "" }, { "docid": "9e6bb4396716351ceb022b4e264caf4f", "score": "0.4881875", "text": "func (db *DB) Sync() error {\n\n\t// If there is no file associated\n\t// with this database then ignore\n\t// this method call.\n\n\tif db.file.pntr == nil {\n\t\treturn ErrDbMemoryOnly\n\t}\n\n\t// If the database is currently\n\t// already syncing, then ignore\n\t// the sync this time around.\n\n\tif db.wait.sync {\n\t\treturn ErrDbAlreadySyncing\n\t}\n\n\t// Mark that the database is now\n\t// syncing so that other calls\n\t// to sync will be ignored.\n\n\tdb.wait.sync = true\n\n\t// Ensure that when this method\n\t// is finished we mark that the\n\t// database is not syncing.\n\n\tdefer func() {\n\t\tdb.wait.sync = false\n\t}()\n\n\t// Obtain a lock on the buffer to\n\t// prevent changes while we flush\n\t// the buffer to the sender.\n\n\tdb.buff.lock.Lock()\n\tdefer db.buff.lock.Unlock()\n\n\t// Obtain a lock on the sender to\n\t// prevent changes while we flush\n\t// the sender to the file.\n\n\tdb.send.lock.Lock()\n\tdefer db.send.lock.Unlock()\n\n\t// Obtain a lock on the file to\n\t// prevent other threads from\n\t// syncing to the file.\n\n\tdb.file.lock.Lock()\n\tdefer db.file.lock.Unlock()\n\n\t// Flush the buffer to the file\n\t// and ensure that the file is\n\t// synced to storage in the OS.\n\n\tif _, err := db.buff.pntr.WriteTo(db.send.pntr); err != nil {\n\t\treturn err\n\t}\n\n\tif err := db.send.pntr.Flush(); err != nil {\n\t\treturn err\n\t}\n\n\tif err := db.file.pntr.Sync(); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n\n}", "title": "" }, { "docid": "dbde985e8175bbe4923f498901c04420", "score": "0.4879467", "text": "func RunSyncSet(logger *log.Logger, source Source, destination Destination, config Config) error {\n\tsourcePeople, err := source.ListUsers(GetSourceAttributes(config.AttributeMap))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif len(sourcePeople) == 0 {\n\t\treturn errors.New(\"no people found in source\")\n\t}\n\tlogger.Printf(\" Found %v people in source\", len(sourcePeople))\n\n\t// remap source people to destination attributes for comparison\n\tsourcePeople, err = RemapToDestinationAttributes(logger, sourcePeople, config.AttributeMap)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdestinationPeople, err := destination.ListUsers(GetDestinationAttributes(config.AttributeMap))\n\tif err != nil {\n\t\treturn err\n\t}\n\tlogger.Printf(\" Found %v people in destination\", len(destinationPeople))\n\n\tchangeSet := GenerateChangeSet(logger, sourcePeople, destinationPeople, config)\n\n\tlogger.Printf(\"ChangeSet Plans: Create %d, Update %d, Delete %d\\n\",\n\t\tlen(changeSet.Create), len(changeSet.Update), len(changeSet.Delete))\n\n\t// If in DryRun mode only print out ChangeSet plans and return mocked change results based on plans\n\tif config.Runtime.DryRunMode {\n\t\tlogger.Println(\"Dry run mode enabled. Change set details follow:\")\n\t\tprintChangeSet(logger, changeSet)\n\t\treturn nil\n\t}\n\n\t// Create a channel to pass activity logs for printing\n\teventLog := make(chan EventLogItem, 50)\n\tgo processEventLog(logger, config.Alert, eventLog)\n\n\tresults := destination.ApplyChangeSet(changeSet, eventLog)\n\n\tlogger.Printf(\"Sync results: %v users added, %v users updated, %v users removed\\n\",\n\t\tresults.Created, results.Updated, results.Deleted)\n\n\tfor i := 0; i < 100; i++ {\n\t\ttime.Sleep(time.Millisecond * 10)\n\t\tif len(eventLog) == 0 {\n\t\t\tbreak\n\t\t}\n\t}\n\tclose(eventLog)\n\n\treturn nil\n}", "title": "" }, { "docid": "3f26036864f9f0c804081d73cee41c7e", "score": "0.4878966", "text": "func SyncFile(localPath string, remote string, timeout int) error {\n\tfileInfo, err := os.Stat(localPath)\n\tif err != nil {\n\t\tlog.Errorf(\"Failed to get size of source file: %s, err: %s\", localPath, err)\n\t\treturn err\n\t}\n\tfileSize := fileInfo.Size()\n\tdirectIO := (fileSize%Blocks == 0)\n\tlog.Infof(\"source file size: %d, setting up directIo: %v\", fileSize, directIO)\n\n\tvar fileIo FileIoProcessor\n\tif directIO {\n\t\tfileIo, err = NewDirectFileIoProcessor(localPath, os.O_RDONLY, 0)\n\t} else {\n\t\tfileIo, err = NewBufferedFileIoProcessor(localPath, os.O_RDONLY, 0)\n\t}\n\tif err != nil {\n\t\tlog.Error(\"Failed to open local source file:\", localPath)\n\t\treturn err\n\t}\n\tdefer fileIo.Close()\n\n\tclient := &syncClient{remote, timeout, localPath, fileSize, fileIo}\n\n\tdefer client.closeServer() // kill the server no matter success or not, best effort\n\n\terr = client.syncFileContent(fileIo, fileSize)\n\tif err != nil {\n\t\tlog.Errorf(\"syncFileContent failed: %s\", err)\n\t\treturn err\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "65f81677b82187b77d45b3befd93222b", "score": "0.4875809", "text": "func (b *appendFSM) onRecvPipelineSync() {\n\tb.mustState(stateRecvPipelineSync)\n\n\tvar rollToRegisters *pb.LabelSet\n\tb.rollToOffset, rollToRegisters, b.readThroughRev = b.pln.gatherSync()\n\n\tif b.err = b.pln.recvErr(); b.err == nil {\n\t\tb.err = b.pln.sendErr()\n\t}\n\taddTrace(b.ctx, \"gatherSync() => %d, %v, %d, err: %v\",\n\t\tb.rollToOffset, rollToRegisters, b.readThroughRev, b.err)\n\n\tif b.err != nil {\n\t\tgo b.pln.shutdown(true)\n\t\tb.pln = nil\n\t\tb.err = errors.WithMessage(b.err, \"gatherSync\")\n\t\tb.state = stateError\n\t\treturn\n\t}\n\n\tif b.rollToOffset != 0 {\n\t\t// Peer has a larger offset, or an equal offset with an incompatible\n\t\t// Fragment. Try again, proposing Spools roll forward to |rollToOffset|.\n\t\t// This time all peers should agree on the new Fragment.\n\t\tb.state = stateSendPipelineSync\n\n\t\tif rollToRegisters != nil {\n\t\t\tb.registers.Assign(rollToRegisters) // Take peer registers.\n\t\t}\n\t} else if b.readThroughRev != 0 {\n\t\t// Peer has a non-equivalent Route at a later Etcd revision.\n\t\tgo b.pln.shutdown(false)\n\t\tb.pln = nil\n\t\tb.state = stateResolve\n\t} else {\n\t\tb.state = stateUpdateAssignments\n\t}\n\treturn\n}", "title": "" }, { "docid": "5aba62148dc0103ea9090e3cbc730667", "score": "0.48743895", "text": "func (rsync *MongoRsync) RsyncIntents() error {\n\t// start up the progress bar manager\n\trsync.progressManager = progress.NewProgressBarManager(log.Writer(0), progressBarWaitTime)\n\trsync.progressManager.Start()\n\tdefer rsync.progressManager.Stop()\n\n\tlog.Logf(log.DebugLow, \"syncing up to %v collections in parallel\", rsync.OutputOptions.NumParallelCollections)\n\n\tif rsync.OutputOptions.NumParallelCollections > 0 {\n\t\tresultChan := make(chan error)\n\n\t\t// start a goroutine for each job thread\n\t\tfor i := 0; i < rsync.OutputOptions.NumParallelCollections; i++ {\n\t\t\tgo func(id int) {\n\t\t\t\tlog.Logf(log.DebugHigh, \"starting sync routine with id=%v\", id)\n\t\t\t\t// var ioBuf []byte\n\t\t\t\tfor {\n\t\t\t\t\tintent := rsync.manager.Pop()\n\t\t\t\t\tif intent == nil {\n\t\t\t\t\t\t// log.Logf(log.DebugHigh, \"ending rsync routine with id=%v, no more work to do\", id)\n\t\t\t\t\t\tresultChan <- nil // done\n\t\t\t\t\t\treturn\n\t\t\t\t\t}\n\t\t\t\t\t// log.Logf(log.DebugHigh, \"format......test.....%v.%v\", intent.DB, intent.C)\n\n\t\t\t\t\toplogFlag, err := rsync.RsyncIntent(intent)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tresultChan <- fmt.Errorf(\"%v: %v\", intent.Namespace(), err)\n\t\t\t\t\t\treturn\n\t\t\t\t\t}\n\t\t\t\t\tif !oplogFlag {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\t\t\t\t\trsync.manager.Finish(intent)\n\t\t\t\t}\n\t\t\t}(i)\n\t\t}\n\n\t\t// wait until all goroutines are done or one of them errors out\n\t\tfor i := 0; i < rsync.OutputOptions.NumParallelCollections; i++ {\n\t\t\tif err := <-resultChan; err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\t// single-threaded\n\tfor {\n\t\tintent := rsync.manager.Pop()\n\t\tif intent == nil {\n\t\t\treturn nil\n\t\t}\n\t\toplogFlag, err := rsync.RsyncIntent(intent)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"%v: %v\", intent.Namespace(), err)\n\t\t}\n\t\tif !oplogFlag {\n\t\t\t// log.Logf(log.DebugHigh, \"format......test.....%v.%v\", intent.DB, intent.C)\n\t\t\tcontinue\n\t\t}\n\t\trsync.manager.Finish(intent)\n\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "de027e683bea45d7316ac9403f7ea58a", "score": "0.4873309", "text": "func (sim *SISimulation) Transmit(t int) {\n\tc := make(chan TransmissionEvent)\n\td := make(chan TransmissionPackage)\n\tvar wg sync.WaitGroup\n\t// Get hosts that are infected and determine pathogen pop size\n\t// Only hosts with an infected status can transmit\n\tvar infectedHosts []Host\n\tvar pathogenPopSizes []int\n\tfor hostID, host := range sim.HostMap() {\n\t\tif sim.HostStatus(hostID) == InfectedStatusCode {\n\t\t\tinfectedHosts = append(infectedHosts, host)\n\t\t\tpathogenPopSizes = append(pathogenPopSizes, host.PathogenPopSize())\n\t\t}\n\t}\n\t// Iterate using pre-assembled list of infected hosts\n\tfor i, host := range infectedHosts {\n\t\t// Iterate over host's neighbors and create a new goroutine\n\t\t// that determines whether pathogens transmit or not\n\t\thostID := host.ID()\n\t\tcount := pathogenPopSizes[i]\n\t\tnumMigrants := host.GetTransmissionModel().TransmissionSize()\n\t\ttransmissionProb := host.GetTransmissionModel().TransmissionProb()\n\t\tfor _, neighbor := range sim.HostNeighbors(hostID) {\n\t\t\tstatus := sim.HostStatus(neighbor.ID())\n\t\t\t// Overrides default transmission prob set in the config file\n\t\t\tif t := sim.HostConnection(hostID, neighbor.ID()); t > 0 {\n\t\t\t\ttransmissionProb = t\n\t\t\t}\n\t\t\tfor _, infectableStatus := range sim.InfectableStatuses() {\n\t\t\t\tif status == infectableStatus {\n\t\t\t\t\twg.Add(1)\n\t\t\t\t\tgo TransmitPathogens(sim.InstanceID(), t, host, neighbor, numMigrants, transmissionProb, count, c, d, &wg)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tgo func() {\n\t\twg.Wait()\n\t\tclose(c)\n\t\tclose(d)\n\t}()\n\t// Add the new pathogen to the destination host\n\t// and record\n\tvar wg2 sync.WaitGroup\n\twg2.Add(2)\n\tgo func() {\n\t\tfor t := range c {\n\t\t\tt.destination.AddPathogens(t.pathogen)\n\t\t}\n\t\twg2.Done()\n\t}()\n\tgo func() {\n\t\tif sim.logTransmission {\n\t\t\tsim.WriteTransmission(d)\n\t\t} else {\n\t\t\tfor range d {\n\t\t\t}\n\t\t}\n\t\twg2.Done()\n\t}()\n\twg2.Wait()\n}", "title": "" }, { "docid": "4d6864155e1edd38c982d38ba222b217", "score": "0.48596257", "text": "func (c *chainStore) RunSync(ctx context.Context) {\n\tl, err := c.Store.Last()\n\tif err != nil {\n\t\tc.l.Error(\"run_sync\", \"load\", \"last_beacon\", err)\n\t\treturn\n\t}\n\tcurrRound := c.ticker.CurrentRound()\n\toutCh, err := syncChain(ctx, c.l, c.safe, l, currRound, c.client)\n\tif err != nil {\n\t\tc.l.Error(\"error_sync\", err)\n\t\treturn\n\t}\n\tfor newB := range outCh {\n\t\tc.newBeaconCh <- newB\n\t}\n\treturn\n}", "title": "" }, { "docid": "c2f0291a55237abe089a26af6aadb954", "score": "0.48592412", "text": "func syncDest(localRoot string, localPath string, destRoot string, flatten bool) (string, error) {\n\trel, err := filepath.Rel(localRoot, localPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\t// On Windows, rel will be separated by \\, which is not correct inside the VM\n\trel = filepath.ToSlash(rel)\n\n\t// If flatten is set, dump everything into the same destination directory\n\tif flatten {\n\t\treturn path.Join(destRoot, filepath.Base(localPath)), nil\n\t}\n\treturn path.Join(destRoot, rel), nil\n}", "title": "" }, { "docid": "db0b96c9abfda11e37d02a93ae41886f", "score": "0.48546845", "text": "func (ts TorrentStore) Sync(_ map[model.InfoHash]model.TorrentStats) error {\n\tpanic(\"implement me\")\n}", "title": "" }, { "docid": "5cc873725e4611ed28160e42c590a1d9", "score": "0.48390627", "text": "func (scs *Service) ForceSyncForRemote(rc *model.RemoteCluster) {\n\tif rcs := scs.server.GetRemoteClusterService(); rcs == nil {\n\t\treturn\n\t}\n\n\t// fetch all channels shared with this remote.\n\topts := model.SharedChannelRemoteFilterOpts{\n\t\tRemoteId: rc.RemoteId,\n\t}\n\tscrs, err := scs.server.GetStore().SharedChannel().GetRemotes(opts)\n\tif err != nil {\n\t\tscs.server.GetLogger().Log(mlog.LvlSharedChannelServiceError, \"Failed to fetch shared channel remotes\",\n\t\t\tmlog.String(\"remote\", rc.DisplayName),\n\t\t\tmlog.String(\"remoteId\", rc.RemoteId),\n\t\t\tmlog.Err(err),\n\t\t)\n\t\treturn\n\t}\n\n\tfor _, scr := range scrs {\n\t\ttask := newSyncTask(scr.ChannelId, rc.RemoteId, nil)\n\t\ttask.schedule = time.Now().Add(NotifyMinimumDelay)\n\t\tscs.addTask(task)\n\t}\n}", "title": "" } ]
ae97ef27e9b0a818af4e0919cf2a8e2a
NewPipeline creates a pipeline from connection options. The telemetry policy, when enabled, will use the specified module and version info.
[ { "docid": "d2dad3762f0be4044af087d0b567b0e5", "score": "0.7316883", "text": "func NewPipeline(module, version string, cred azcore.TokenCredential, options *arm.ClientOptions) pipeline.Pipeline {\n\tif options == nil {\n\t\toptions = &arm.ClientOptions{}\n\t}\n\tep := options.Host\n\tif len(ep) == 0 {\n\t\tep = arm.AzurePublicCloud\n\t}\n\tperCallPolicies := []azpolicy.Policy{}\n\tif !options.DisableRPRegistration {\n\t\tregRPOpts := armpolicy.RegistrationOptions{ClientOptions: options.ClientOptions}\n\t\tperCallPolicies = append(perCallPolicies, NewRPRegistrationPolicy(string(ep), cred, &regRPOpts))\n\t}\n\tperRetryPolicies := []azpolicy.Policy{\n\t\tNewBearerTokenPolicy(cred, &armpolicy.BearerTokenOptions{\n\t\t\tScopes: []string{shared.EndpointToScope(string(ep))},\n\t\t\tAuxiliaryTenants: options.AuxiliaryTenants,\n\t\t}),\n\t}\n\treturn azruntime.NewPipeline(module, version, perCallPolicies, perRetryPolicies, &options.ClientOptions)\n}", "title": "" } ]
[ { "docid": "eb3318061325c68afcb164c2ab4d9855", "score": "0.712985", "text": "func NewPipeline(ctx *pulumi.Context,\n\tname string, args *PipelineArgs, opts ...pulumi.ResourceOption) (*Pipeline, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.BootstrapConfiguration == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'BootstrapConfiguration'\")\n\t}\n\tif args.PipelineType == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'PipelineType'\")\n\t}\n\tif args.ResourceGroupName == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'ResourceGroupName'\")\n\t}\n\taliases := pulumi.Aliases([]pulumi.Alias{\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:devops/v20200713preview:Pipeline\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:devops:Pipeline\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:devops:Pipeline\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:devops/v20190701preview:Pipeline\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:devops/v20190701preview:Pipeline\"),\n\t\t},\n\t})\n\topts = append(opts, aliases)\n\tvar resource Pipeline\n\terr := ctx.RegisterResource(\"azure-native:devops/v20200713preview:Pipeline\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "3c1ca03d3a662d0d217a955fe05f9d3b", "score": "0.6798041", "text": "func New(opt ...Option) (p *Pipeline, err error) {\n\toptions := Options{}\n\tfor _, v := range opt {\n\t\tv(&options)\n\t}\n\tp = &Pipeline{\n\t\tOptions: options,\n\t\trunMutex: sync.Mutex{},\n\t\tstatus: STATUS_STOP,\n\t}\n\terr = p.init()\n\treturn\n}", "title": "" }, { "docid": "8ba15ae30c741a4287ff2157beab0860", "score": "0.66115224", "text": "func New() *Pipeline {\n\treturn &Pipeline{\n\t\tconfig: DefaultConfig(),\n\t}\n}", "title": "" }, { "docid": "bf06d02d7f9a1aa20bb1d6b14fea2aec", "score": "0.66097796", "text": "func New(ruleGetter ChannelRuleGetter) (*Pipeline, error) {\n\tp := &Pipeline{\n\t\truleGetter: ruleGetter,\n\t}\n\n\tif os.Getenv(\"GF_LIVE_PIPELINE_TRACE\") != \"\" {\n\t\t// Traces for development only at the moment.\n\t\t// Start local Jaeger and then run Grafana with GF_LIVE_PIPELINE_TRACE:\n\t\t// docker run --rm -it --name jaeger -e COLLECTOR_ZIPKIN_HOST_PORT=:9411 -p 5775:5775/udp -p 6831:6831/udp -p 6832:6832/udp -p 5778:5778 -p 16686:16686 -p 14268:14268 -p 14250:14250 -p 9411:9411 jaegertracing/all-in-one:1.26\n\t\t// Then visit http://localhost:16686/ where Jaeger UI is served.\n\t\ttp, err := tracerProvider(\"http://localhost:14268/api/traces\")\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\ttracer := tp.Tracer(\"gf.live.pipeline\")\n\t\tp.tracer = tracer\n\t}\n\n\tif os.Getenv(\"GF_LIVE_PIPELINE_DEV\") != \"\" {\n\t\tgo postTestData() // TODO: temporary for development, remove before merge.\n\t}\n\n\treturn p, nil\n}", "title": "" }, { "docid": "db162a7a8fea5fee07ff1a9e6159d216", "score": "0.6529816", "text": "func New() *Pipeline {\n\treturn &Pipeline{}\n}", "title": "" }, { "docid": "f4e363955ba774c25ef26808b726a518", "score": "0.6472153", "text": "func NewPipeline() *Pipeline {\n\treturn &Pipeline{\n\t\tmake(chan struct{}),\n\t\tsync.WaitGroup{},\n\t\tsync.Mutex{},\n\t\tnil,\n\t}\n}", "title": "" }, { "docid": "0debd670bfe53393308e5f8a3b1cbc45", "score": "0.63875866", "text": "func newBuildPipeline(t gaia.PipelineType) BuildPipeline {\n\tvar bP BuildPipeline\n\n\t// Create build pipeline for given pipeline type\n\tswitch t {\n\tcase gaia.PTypeGolang:\n\t\tbP = &BuildPipelineGolang{\n\t\t\tType: t,\n\t\t}\n\tcase gaia.PTypeJava:\n\t\tbP = &BuildPipelineJava{\n\t\t\tType: t,\n\t\t}\n\tcase gaia.PTypePython:\n\t\tbP = &BuildPipelinePython{\n\t\t\tType: t,\n\t\t}\n\tcase gaia.PTypeCpp:\n\t\tbP = &BuildPipelineCpp{\n\t\t\tType: t,\n\t\t}\n\tcase gaia.PTypeRuby:\n\t\tbP = &BuildPipelineRuby{\n\t\t\tType: t,\n\t\t}\n\tcase gaia.PTypeNodeJS:\n\t\tbP = &BuildPipelineNodeJS{\n\t\t\tType: t,\n\t\t}\n\t}\n\n\treturn bP\n}", "title": "" }, { "docid": "afba0c56d0d4854050bec14fcb55aef2", "score": "0.6310497", "text": "func NewPipeline() Pipeline {\n\n\tp := &pipeline{}\n\tp.head = newHandlerContext(p, headHandler{}, nil, nil)\n\tp.tail = newHandlerContext(p, tailHandler{}, nil, nil)\n\n\tp.head.next = p.tail\n\tp.tail.prev = p.head\n\n\t// head + tail\n\tp.size = 2\n\treturn p\n}", "title": "" }, { "docid": "b850f46102bc167f907c1d966f4bfc76", "score": "0.6299277", "text": "func NewExportPipeline(ctx context.Context, driver ProtocolDriver, exporterOpts ...ExporterOption) (*Exporter,\n\t*sdktrace.TracerProvider, *basic.Controller, error) {\n\n\texp, err := NewExporter(ctx, driver, exporterOpts...)\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\n\ttracerProvider := sdktrace.NewTracerProvider(\n\t\tsdktrace.WithBatcher(exp),\n\t)\n\n\tcntr := basic.New(\n\t\tprocessor.New(\n\t\t\tsimple.NewWithInexpensiveDistribution(),\n\t\t\texp,\n\t\t),\n\t)\n\n\treturn exp, tracerProvider, cntr, nil\n}", "title": "" }, { "docid": "1f092c8d0fa89c8b2c4df4bab28dc890", "score": "0.6295393", "text": "func ExampleNewPipeline() {\n\t// This example shows how to wire in your own logging mechanism (this example uses\n\t// Go's standard logger to write log information to standard error)\n\tlogger := log.New(os.Stderr, \"\", log.Ldate|log.Lmicroseconds)\n\n\t// Create/configure a request pipeline options object.\n\t// All PipelineOptions' fields are optional; reasonable defaults are set for anything you do not specify\n\tpo := azfile.PipelineOptions{\n\t\t// Set RetryOptions to control how HTTP request are retried when retryable failures occur\n\t\tRetry: azfile.RetryOptions{\n\t\t\tPolicy: azfile.RetryPolicyExponential, // Use exponential backoff as opposed to linear\n\t\t\tMaxTries: 3, // Try at most 3 times to perform the operation (set to 1 to disable retries)\n\t\t\tTryTimeout: time.Second * 3, // Maximum time allowed for any single try\n\t\t\tRetryDelay: time.Second * 1, // Backoff amount for each retry (exponential or linear)\n\t\t\tMaxRetryDelay: time.Second * 3, // Max delay between retries\n\t\t},\n\n\t\t// Set RequestLogOptions to control how each HTTP request & its response is logged\n\t\tRequestLog: azfile.RequestLogOptions{\n\t\t\tLogWarningIfTryOverThreshold: time.Millisecond * 200, // A successful response taking more than this time to arrive is logged as a warning\n\t\t},\n\n\t\t// Set LogOptions to control what & where all pipeline log events go\n\t\tLog: pipeline.LogOptions{\n\t\t\tLog: func(s pipeline.LogLevel, m string) { // This func is called to log each event\n\t\t\t\t// This method is not called for filtered-out severities.\n\t\t\t\tlogger.Output(2, m) // This example uses Go's standard logger\n\t\t\t},\n\t\t\tShouldLog: func(level pipeline.LogLevel) bool {\n\t\t\t\treturn level <= pipeline.LogInfo // Log all events from informational to more severe\n\t\t\t},\n\t\t},\n\t}\n\n\t// Create a request pipeline object configured with credentials and with pipeline options. Once created,\n\t// a pipeline object is goroutine-safe and can be safely used with many XxxURL objects simultaneously.\n\tp := azfile.NewPipeline(azfile.NewAnonymousCredential(), po) // A pipeline always requires some credential object\n\n\t// Once you've created a pipeline object, associate it with an XxxURL object so that you can perform HTTP requests with it.\n\tu, _ := url.Parse(\"https://myaccount.file.core.windows.net\")\n\tserviceURL := azfile.NewServiceURL(*u, p)\n\t// Use the serviceURL as desired...\n\n\t// NOTE: When you use an XxxURL object to create another XxxURL object, the new XxxURL object inherits the\n\t// same pipeline object as its parent. For example, the shareURL and fileURL objects (created below)\n\t// all share the same pipeline. Any HTTP operations you perform with these objects share the behavior (retry, logging, etc.)\n\tshareURL := serviceURL.NewShareURL(\"myshare\")\n\tdirectoryURL := shareURL.NewDirectoryURL(\"mydirectory\")\n\tfileURL := directoryURL.NewFileURL(\"ReadMe.txt\")\n\n\t// If you'd like to perform some operations with different behavior, create a new pipeline object and\n\t// associate it with a new XxxURL object by passing the new pipeline to the XxxURL object's WithPipeline method.\n\n\t// In this example, I reconfigure the retry policies, create a new pipeline, and then create a new\n\t// ShareURL object that has the same URL as its parent.\n\tpo.Retry = azfile.RetryOptions{\n\t\tPolicy: azfile.RetryPolicyFixed, // Use linear backoff\n\t\tMaxTries: 4, // Try at most 3 times to perform the operation (set to 1 to disable retries)\n\t\tTryTimeout: time.Minute * 1, // Maximum time allowed for any single try\n\t\tRetryDelay: time.Second * 5, // Backoff amount for each retry (exponential or linear)\n\t\tMaxRetryDelay: time.Second * 10, // Max delay between retries\n\t}\n\tnewShareURL := shareURL.WithPipeline(azfile.NewPipeline(azfile.NewAnonymousCredential(), po))\n\n\t// Now, any XxxDirectoryURL object created using newShareURL inherits the pipeline with the new retry policy.\n\tnewDirectoryURL := newShareURL.NewDirectoryURL(\"mynewdirectory\")\n\t_, _, _ = fileURL, directoryURL, newDirectoryURL // Avoid compiler's \"declared and not used\" error\n}", "title": "" }, { "docid": "0bc6b6261fcccb17523886d6908729ad", "score": "0.62525904", "text": "func NewPipeline() *Pipeline {\n\treturn &Pipeline{\n\t\tSerializablePipeline: NewSerializablePipeline(),\n\t}\n}", "title": "" }, { "docid": "d4bd1463ef8fb8a4f4652e740c833568", "score": "0.6153639", "text": "func New(stdin io.Reader, stdout io.Writer, stderr io.Writer) *Pipeline {\n pl := &Pipeline{}\n pl.input = stdin\n pl.output = stdout\n pl.err = stderr\n pl.tasks = []*exec.Cmd{}\n return pl\n}", "title": "" }, { "docid": "a2207ecff2425c52979e54862f0d0e3c", "score": "0.61425", "text": "func NewPipeline(definitionPath, environmentPath string, environment types.StringMap, ignoredSteps types.StringSet, selectedSteps types.StringSet) (*Pipeline, error) {\n\tp := &Pipeline{}\n\tvar err error\n\t// Load environment\n\tp.Environment, err = NewPipelineEnvironment(environmentPath, environment, ignoredSteps, selectedSteps)\n\tif err != nil {\n\t\t// As environment files are optional, handle if non is accessible\n\t\tif e, ok := err.(*os.PathError); ok && e.Err == syscall.ENOENT {\n\t\t\tlog.Print(\"No environment file is used\")\n\t\t} else {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\t// Load definition\n\tp.Definition, err = NewPipelineDefinition(definitionPath, p.Environment)\n\tp.localRunner = NewLocalRunner(\"pipeline\", os.Stdout, os.Stderr)\n\tp.noopRunner = NewNoopRunner(false)\n\treturn p, err\n}", "title": "" }, { "docid": "bbc0d255a0d4a983e5c644593e4977df", "score": "0.6102711", "text": "func NewPipeline(ops []OpUnion) Pipeline {\n\treturn Pipeline{Operations: ops}\n}", "title": "" }, { "docid": "af221912d50fc4ac903b9b6cfc0c17c5", "score": "0.60900384", "text": "func CreatePipeline(codecName string, pipelineStr string, clockRate float32) *Pipeline {\n\t// Generate C String from Input\n\tpipelineStrUnsafe := C.CString(pipelineStr)\n\tdefer C.free(unsafe.Pointer(pipelineStrUnsafe))\n\n\t// Lock Pipelines\n\tpipelinesLock.Lock()\n\tdefer pipelinesLock.Unlock()\n\n\t// Create new Pipeline\n\tpipeline := &Pipeline{\n\t\tPipeline: C.gstreamer_create_pipeline(pipelineStrUnsafe),\n\t\tid: utils.RandSeq(5),\n\t\tcodecName: codecName,\n\t\tclockRate: clockRate,\n\t}\n\tpipeline.outputTracks = []*webrtc.Track{}\n\t// Add new Pipeline\n\tpipelines[pipeline.id] = pipeline\n\treturn pipeline\n}", "title": "" }, { "docid": "411032cc9c60407899dfb2dc1118b2c8", "score": "0.6082344", "text": "func InstallNewPipeline(ctx context.Context, driver ProtocolDriver, exporterOpts ...ExporterOption) (*Exporter,\n\t*sdktrace.TracerProvider, *basic.Controller, error) {\n\n\texp, tp, cntr, err := NewExportPipeline(ctx, driver, exporterOpts...)\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\n\totel.SetTracerProvider(tp)\n\terr = cntr.Start(ctx)\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\n\treturn exp, tp, cntr, err\n}", "title": "" }, { "docid": "4187682cded78c7ee021ef5f53491b64", "score": "0.6025489", "text": "func (p *Parallel) NewPipeline() *Pipeline {\n\tpipe := NewPipeline()\n\tp.Add(pipe)\n\treturn pipe\n}", "title": "" }, { "docid": "f29c91b691da6bdd231fd2bfe612110c", "score": "0.60240823", "text": "func CreatePipeline(codecName string, tracks []*webrtc.Track) *Pipeline {\n\tfmt.Printf(\"In create pipeline\")\n\tpipelineStr := \"\"\n\tswitch codecName {\n\tcase \"VP8\":\n\t\tpipelineStr += \", encoding-name=VP8-DRAFT-IETF-01 ! rtpvp8depay ! decodebin ! autovideosink\"\n\tcase \"Opus\":\n\t\tpipelineStr += \"appsrc name=src ! decodebin ! audioconvert ! audioresample ! audio/x-raw, rate=8000 ! mulawenc ! appsink name=appsink max-buffers=1\"\n\t// case webrtc.VP9:\n\t// \tpipelineStr += \" ! rtpvp9depay ! decodebin ! autovideosink\"\n\t// case webrtc.H264:\n\t// \tpipelineStr += \" ! rtph264depay ! decodebin ! autovideosink\"\n\t// case webrtc.G722:\n\t// \tpipelineStr += \" clock-rate=8000 ! rtpg722depay ! decodebin ! autoaudiosink\"\n\tdefault:\n\t\tpanic(\"Unhandled codec \" + codecName)\n\t}\n\n\tpipelineStrUnsafe := C.CString(pipelineStr)\n\tdefer C.free(unsafe.Pointer(pipelineStrUnsafe))\n\treturn &Pipeline{\n\t\tPipeline: C.gstreamer_receive_create_pipeline(pipelineStrUnsafe),\n\t\ttracks: tracks,\n\t}\n}", "title": "" }, { "docid": "eb63840c5de57349b901aa5603105dcb", "score": "0.59881675", "text": "func NewPipeline(stages ...Stage) *Pipeline {\n\treturn &Pipeline{stages: stages}\n}", "title": "" }, { "docid": "f675218c8e8514b5f873de8999c613c4", "score": "0.5958769", "text": "func (xdcrf *XDCRFactory) NewPipeline(topic string, progress_recorder common.PipelineProgressRecorder) (common.Pipeline, error) {\n\tspec, err := xdcrf.repl_spec_svc.ReplicationSpec(topic)\n\tif err != nil {\n\t\txdcrf.logger.Errorf(\"Failed to get replication specification for pipeline %v, err=%v\\n\", topic, err)\n\t\treturn nil, err\n\t}\n\txdcrf.logger.Debugf(\"replication specification = %v\\n\", spec)\n\n\tlogger_ctx := log.CopyCtx(xdcrf.default_logger_ctx)\n\tlogger_ctx.SetLogLevel(spec.Settings.LogLevel)\n\n\ttargetClusterRef, err := xdcrf.remote_cluster_svc.RemoteClusterByUuid(spec.TargetClusterUUID, false)\n\tif err != nil {\n\t\txdcrf.logger.Errorf(\"Error getting remote cluster with uuid=%v for pipeline %v, err=%v\\n\", spec.TargetClusterUUID, spec.Id, err)\n\t\treturn nil, err\n\t}\n\n\tnozzleType, err := xdcrf.getOutNozzleType(targetClusterRef, spec)\n\tif err != nil {\n\t\txdcrf.logger.Errorf(\"Failed to get the nozzle type for %v, err=%v\\n\", spec.Id, err)\n\t\treturn nil, err\n\t}\n\tisCapiReplication := (nozzleType == base.Capi)\n\n\tconnStr, err := xdcrf.remote_cluster_svc.GetConnectionStringForRemoteCluster(targetClusterRef, isCapiReplication)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tusername, password, httpAuthMech, certificate, sanInCertificate, clientCertificate, clientKey, err := targetClusterRef.MyCredentials()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttargetBucketInfo, err := xdcrf.utils.GetBucketInfo(connStr, spec.TargetBucketName, username, password, httpAuthMech, certificate, sanInCertificate, clientCertificate, clientKey, xdcrf.logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tisTargetES := xdcrf.utils.CheckWhetherClusterIsESBasedOnBucketInfo(targetBucketInfo)\n\n\tconflictResolutionType, err := xdcrf.utils.GetConflictResolutionTypeFromBucketInfo(spec.TargetBucketName, targetBucketInfo)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// sourceCRMode is the conflict resolution mode to use when resolving conflicts for big documents at source side\n\t// capi replication always uses rev id based conflict resolution\n\tsourceCRMode := base.CRMode_RevId\n\tif !isCapiReplication {\n\t\t// for xmem replication, sourceCRMode is LWW if and only if target bucket is LWW enabled, so as to ensure that source side conflict\n\t\t// resolution and target side conflict resolution yield consistent results\n\t\tsourceCRMode = base.GetCRModeFromConflictResolutionTypeSetting(conflictResolutionType)\n\t}\n\n\txdcrf.logger.Infof(\"%v sourceCRMode=%v httpAuthMech=%v isCapiReplication=%v isTargetES=%v\\n\", topic, sourceCRMode, httpAuthMech, isCapiReplication, isTargetES)\n\n\t/**\n\t * Construct the Source nozzles\n\t * sourceNozzles - a map of DCPNozzleID -> *DCPNozzle\n\t * kv_vb_map - Map of SourceKVNode -> list of vbucket#'s that it's responsible for\n\t */\n\tsourceNozzles, kv_vb_map, err := xdcrf.constructSourceNozzles(spec, topic, isCapiReplication, logger_ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(sourceNozzles) == 0 {\n\t\t// no pipeline is constructed if there is no source nozzle\n\t\treturn nil, base.ErrorNoSourceNozzle\n\t}\n\n\tprogress_recorder(fmt.Sprintf(\"%v source nozzles have been constructed\", len(sourceNozzles)))\n\n\txdcrf.logger.Infof(\"%v kv_vb_map=%v\\n\", topic, kv_vb_map)\n\t/**\n\t * Construct the outgoing (Destination) nozzles\n\t * 1. outNozzles - map of ID -> actual nozzle\n\t * 2. vbNozzleMap - map of VBucket# -> nozzle to be used (to be used by router)\n\t * 3. kvVBMap - map of remote KVNodes -> vbucket# responsible for per node\n\t */\n\toutNozzles, vbNozzleMap, target_kv_vb_map, targetUserName, targetPassword, targetClusterVersion, err :=\n\t\txdcrf.constructOutgoingNozzles(spec, kv_vb_map, sourceCRMode, targetBucketInfo, targetClusterRef, isCapiReplication, isTargetES, logger_ctx)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tprogress_recorder(fmt.Sprintf(\"%v target nozzles have been constructed\", len(outNozzles)))\n\n\t// TODO construct queue parts. This will affect vbMap in router. may need an additional outNozzle -> downStreamPart/queue map in constructRouter\n\n\t// construct routers to be able to connect the nozzles\n\tfor _, sourceNozzle := range sourceNozzles {\n\t\tvblist := sourceNozzle.(*parts.DcpNozzle).GetVBList()\n\t\tdownStreamParts := make(map[string]common.Part)\n\t\tfor _, vb := range vblist {\n\t\t\ttargetNozzleId, ok := vbNozzleMap[vb]\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"Error constructing pipeline %v since there is no target nozzle for vb=%v\", topic, vb)\n\t\t\t}\n\n\t\t\toutNozzle, ok := outNozzles[targetNozzleId]\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"%v There is no corresponding target nozzle for vb=%v, targetNozzleId=%v\", topic, vb, targetNozzleId)\n\t\t\t}\n\t\t\tdownStreamParts[targetNozzleId] = outNozzle\n\t\t}\n\n\t\t// Construct a router - each Source nozzle has a router.\n\t\trouter, err := xdcrf.constructRouter(sourceNozzle.Id(), spec, downStreamParts, vbNozzleMap, sourceCRMode, logger_ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tsourceNozzle.SetConnector(router)\n\t}\n\tprogress_recorder(\"Source nozzles have been wired to target nozzles\")\n\n\t// construct and initializes the pipeline\n\tpipeline := pp.NewPipelineWithSettingConstructor(topic, sourceNozzles, outNozzles, spec, targetClusterRef,\n\t\txdcrf.ConstructSettingsForPart, xdcrf.ConstructSettingsForConnector, xdcrf.ConstructSSLPortMap, xdcrf.ConstructUpdateSettingsForPart,\n\t\txdcrf.ConstructUpdateSettingsForConnector, xdcrf.SetStartSeqno, xdcrf.CheckpointBeforeStop, logger_ctx)\n\n\t// These listeners are the driving factors of the pipeline\n\txdcrf.registerAsyncListenersOnSources(pipeline, logger_ctx)\n\txdcrf.registerAsyncListenersOnTargets(pipeline, logger_ctx)\n\n\t// initialize component event listener map in pipeline\n\tpp.GetAllAsyncComponentEventListeners(pipeline)\n\n\t// Create PipelineContext\n\tif pipelineContext, err := pctx.NewWithSettingConstructor(pipeline, xdcrf.ConstructSettingsForService, xdcrf.ConstructUpdateSettingsForService, logger_ctx); err != nil {\n\n\t\treturn nil, err\n\t} else {\n\t\t//register services to the pipeline context, so when pipeline context starts as part of the pipeline starting, these services will start as well\n\t\tpipeline.SetRuntimeContext(pipelineContext)\n\t\terr = xdcrf.registerServices(pipeline, logger_ctx, kv_vb_map, targetUserName, targetPassword, spec.TargetBucketName, target_kv_vb_map, targetClusterRef, targetClusterVersion, isCapiReplication, isTargetES)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tprogress_recorder(\"Pipeline has been constructed\")\n\n\txdcrf.logger.Infof(\"Pipeline %v has been constructed\", topic)\n\treturn pipeline, nil\n}", "title": "" }, { "docid": "8370dae898f619794e5b386f8f98b3e7", "score": "0.59133273", "text": "func NewPipeline(logger log.Logger, stgs PipelineStages, jobName *string, registerer prometheus.Registerer) (*Pipeline, error) {\n\thist := prometheus.NewHistogramVec(prometheus.HistogramOpts{\n\t\tNamespace: \"logentry\",\n\t\tName: \"pipeline_duration_seconds\",\n\t\tHelp: \"Label and metric extraction pipeline processing time, in seconds\",\n\t\tBuckets: []float64{.000005, .000010, .000025, .000050, .000100, .000250, .000500, .001000, .002500, .005000, .010000, .025000},\n\t}, []string{\"job_name\"})\n\terr := registerer.Register(hist)\n\tif err != nil {\n\t\tif existing, ok := err.(prometheus.AlreadyRegisteredError); ok {\n\t\t\thist = existing.ExistingCollector.(*prometheus.HistogramVec)\n\t\t} else {\n\t\t\t// Same behavior as MustRegister if the error is not for AlreadyRegistered\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\tst := []Stage{}\n\tfor _, s := range stgs {\n\t\tstage, ok := s.(PipelineStage)\n\t\tif !ok {\n\t\t\treturn nil, errors.Errorf(\"invalid YAML config, \"+\n\t\t\t\t\"make sure each stage of your pipeline is a YAML object (must end with a `:`), check stage `- %s`\", s)\n\t\t}\n\t\tif len(stage) > 1 {\n\t\t\treturn nil, errors.New(\"pipeline stage must contain only one key\")\n\t\t}\n\t\tfor key, config := range stage {\n\t\t\tname, ok := key.(string)\n\t\t\tif !ok {\n\t\t\t\treturn nil, errors.New(\"pipeline stage key must be a string\")\n\t\t\t}\n\t\t\tnewStage, err := New(logger, jobName, name, config, registerer)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrapf(err, \"invalid %s stage config\", name)\n\t\t\t}\n\t\t\tst = append(st, newStage)\n\t\t}\n\t}\n\treturn &Pipeline{\n\t\tlogger: log.With(logger, \"component\", \"pipeline\"),\n\t\tstages: st,\n\t\tjobName: jobName,\n\t\tplDuration: hist,\n\t}, nil\n}", "title": "" }, { "docid": "a7b3b29173478b6570ed694540abc05d", "score": "0.5904606", "text": "func NewPipeline(transformer Transformer, classifier Classifier, filterer Filterer, encoder Encoder, compressor Compressor, storer Storer) *Pipeline {\n\tp := &Pipeline{\n\t\tTransformer: transformer,\n\t\tClassifier: classifier,\n\t\tFilterer: filterer,\n\t\tEncoder: encoder,\n\t\tCompressor: compressor,\n\t\tStorer: storer,\n\t}\n\tstorer.SetPipeline(p)\n\treturn p\n}", "title": "" }, { "docid": "bde16cc878c61806a81173d4f98738a0", "score": "0.5865673", "text": "func NewPipeline(ls ...interface{}) (*Pipe, error) {\n\tvar pipe []interface{}\n\n\tp := &Pipe{\n\t\tls: pipe,\n\t}\n\n\tfor _, f := range ls {\n\t\tif err := p.Add(f); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn p, nil\n}", "title": "" }, { "docid": "3d6b03e399c1f075445ec6448a25325d", "score": "0.5853", "text": "func PipelineFromLibrary(p *library.Pipeline) *Pipeline {\n\tpipeline := &Pipeline{\n\t\tID: sql.NullInt64{Int64: p.GetID(), Valid: true},\n\t\tRepoID: sql.NullInt64{Int64: p.GetRepoID(), Valid: true},\n\t\tCommit: sql.NullString{String: p.GetCommit(), Valid: true},\n\t\tFlavor: sql.NullString{String: p.GetFlavor(), Valid: true},\n\t\tPlatform: sql.NullString{String: p.GetPlatform(), Valid: true},\n\t\tRef: sql.NullString{String: p.GetRef(), Valid: true},\n\t\tType: sql.NullString{String: p.GetType(), Valid: true},\n\t\tVersion: sql.NullString{String: p.GetVersion(), Valid: true},\n\t\tExternalSecrets: sql.NullBool{Bool: p.GetExternalSecrets(), Valid: true},\n\t\tInternalSecrets: sql.NullBool{Bool: p.GetInternalSecrets(), Valid: true},\n\t\tServices: sql.NullBool{Bool: p.GetServices(), Valid: true},\n\t\tStages: sql.NullBool{Bool: p.GetStages(), Valid: true},\n\t\tSteps: sql.NullBool{Bool: p.GetSteps(), Valid: true},\n\t\tTemplates: sql.NullBool{Bool: p.GetTemplates(), Valid: true},\n\t\tData: p.GetData(),\n\t}\n\n\treturn pipeline.Nullify()\n}", "title": "" }, { "docid": "46f16163cc52f0755faa780e26e9e0f6", "score": "0.58334494", "text": "func New(sqsSvc *sqs.SQS, lambdaSvc *lambda.Lambda, db *dynamodb.DynamoDB, envName string) *PipelineManager {\n\treturn &PipelineManager{\n\t\tsqsSvc: sqsSvc,\n\t\tlambdaSvc: lambdaSvc,\n\t\tdb: db,\n\t\tenvName: envName,\n\t}\n}", "title": "" }, { "docid": "98c42420fcda58cd00aa4455c61cabda", "score": "0.5830119", "text": "func createPipeline(\n\tconfig Config, mgr types.Manager, logger log.Modular, stats metrics.Type,\n) (*util.ClosablePool, error) {\n\tpool := util.NewClosablePool()\n\n\t// Create our input pipe\n\tinputPipe, err := input.New(config.Input, mgr, logger, stats)\n\tif err != nil {\n\t\tlogger.Errorf(\"Input error (%s): %v\\n\", config.Input.Type, err)\n\t\treturn nil, err\n\t}\n\tpool.Add(1, inputPipe)\n\n\t// Create our benchmarking output pipe\n\toutputPipe := test.NewBenchOutput(\n\t\ttime.Duration(config.ReportPeriodMS)*time.Millisecond, logger, stats,\n\t)\n\tpool.Add(10, outputPipe)\n\n\toutputPipe.StartReceiving(inputPipe.TransactionChan())\n\treturn pool, nil\n}", "title": "" }, { "docid": "c20351f0dd981712dbbf790cbd15fee1", "score": "0.58288306", "text": "func New(stages ...StageRunner) *Pipeline {\n\treturn &Pipeline{\n\t\tstages: stages,\n\t}\n}", "title": "" }, { "docid": "c21baca8147871fe2e88b73670e38d4b", "score": "0.58113647", "text": "func New(sampleRate phono.SampleRate, options ...Option) (*Pipe, error) {\n\tp := &Pipe{\n\t\tUID: phono.NewUID(),\n\t\tsampleRate: sampleRate,\n\t\tlog: log.GetLogger(),\n\t\tprocessors: make([]*processRunner, 0),\n\t\tsinks: make([]*sinkRunner, 0),\n\t\tparams: make(map[string][]phono.ParamFunc),\n\t\tfeedback: make(map[string][]phono.ParamFunc),\n\t\tevents: make(chan eventMessage, 1),\n\t\tprovide: make(chan struct{}),\n\t\tconsume: make(chan message),\n\t}\n\tfor _, option := range options {\n\t\terr := option(p)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tgo p.loop()\n\treturn p, nil\n}", "title": "" }, { "docid": "0f4be7e2241bca664abf00fe9e2985b4", "score": "0.5809624", "text": "func (g *Gitlab) CreatePipeline(pid, ref string) (*Pipeline, error) {\n\tvar pl Pipeline\n\tdata, err := g.buildAndExecRequest(\n\t\thttp.MethodPost,\n\t\tg.ResourceUrlWithQuery(\n\t\t\tpipelineCreationUrl,\n\t\t\tmap[string]string{\":id\": pid},\n\t\t\tmap[string]string{\"ref\": ref},\n\t\t),\n\t\tnil,\n\t)\n\tif nil != err {\n\t\treturn nil, fmt.Errorf(\"Request create pipeline API error: %v\", err)\n\t}\n\n\tif err := json.Unmarshal(data, &pl); nil != err {\n\t\treturn nil, fmt.Errorf(\"Decode response error: %v\", err)\n\t}\n\n\treturn &pl, nil\n}", "title": "" }, { "docid": "0b455c08a94ba469875bd2aa2a0f3c19", "score": "0.57971877", "text": "func NewCmdUpgradePipeline() (*cobra.Command, *Options) {\n\to := &Options{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"pipeline\",\n\t\tAliases: []string{\"pipelines\"},\n\t\tShort: \"Upgrades the pipelines in the source repositories to the latest version stream and pipeline catalog\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\terr := o.Run()\n\t\t\thelper.CheckErr(err)\n\t\t},\n\t}\n\tcmd.Flags().StringVarP(&o.Dir, \"dir\", \"d\", \".\", \"the directory look for the 'jx-requirements.yml` file\")\n\tcmd.Flags().StringVarP(&o.Filter, \"filter\", \"f\", \"\", \"the text filter to filter out repositories to upgrade\")\n\tcmd.Flags().StringVarP(&o.ConfigFile, \"config\", \"c\", \"\", \"the configuration file to load for the repository configurations. If not specified we look in .jx/gitops/source-repositories.yaml\")\n\tcmd.Flags().StringVarP(&o.Strategy, \"strategy\", \"s\", \"resource-merge\", \"the 'kpt' strategy to use. To see available strategies type 'kpt pkg update --help'. Typical values are: resource-merge, fast-forward, alpha-git-patch, force-delete-replace\")\n\n\tcmd.Flags().StringVar(&o.PullRequestTitle, \"pull-request-title\", \"\", \"the PR title\")\n\tcmd.Flags().StringVar(&o.PullRequestBody, \"pull-request-body\", \"\", \"the PR body\")\n\tcmd.Flags().BoolVarP(&o.AutoMerge, \"auto-merge\", \"\", true, \"should we automatically merge if the PR pipeline is green\")\n\tcmd.Flags().BoolVarP(&o.NoConvert, \"no-convert\", \"\", false, \"disables converting from Kptfile based pipelines to the uses:sourceURI notation for reusing pipelines across repositories\")\n\tcmd.Flags().StringVarP(&o.KptBinary, \"bin\", \"\", \"\", \"the 'kpt' binary name to use. If not specified this command will download the jx binary plugin into ~/.jx3/plugins/bin and use that\")\n\n\to.EnvironmentPullRequestOptions.ScmClientFactory.AddFlags(cmd)\n\n\teo := &o.EnvironmentPullRequestOptions\n\tcmd.Flags().StringVarP(&eo.CommitTitle, \"commit-title\", \"\", \"\", \"the commit title\")\n\tcmd.Flags().StringVarP(&eo.CommitMessage, \"commit-message\", \"\", \"\", \"the commit message\")\n\treturn cmd, o\n}", "title": "" }, { "docid": "224354a5edb5ba24f394dacdead5837c", "score": "0.5729198", "text": "func NewPipe(opt Opition) (*Pipe, error) {\n\tp := &Pipe{\n\t\tOpition: opt,\n\t\tApps: []*Context{},\n\n\t\tdata: map[string]interface{}{},\n\t}\n\n\tfor _, source := range p.Sources {\n\t\tif _, ok := sourceLoaders[source]; !ok {\n\t\t\treturn nil, fmt.Errorf(\"source %s not exist\", source)\n\t\t}\n\t}\n\n\tfor _, handler := range p.Handlers {\n\t\tif _, ok := sourceHandlers[handler]; !ok {\n\t\t\treturn nil, fmt.Errorf(\"handler %s not exist\", handler)\n\t\t}\n\t}\n\n\treturn p, nil\n}", "title": "" }, { "docid": "91af32f0217d934e0c1bdfc948b365ca", "score": "0.5725815", "text": "func PipelineFromContext(ctx *cli.Context) drone.Pipeline {\n\treturn drone.Pipeline{\n\t\tBuild: buildFromContext(ctx),\n\t\tRepo: repoFromContext(ctx),\n\t\tCommit: commitFromContext(ctx),\n\t\tStage: stageFromContext(ctx),\n\t\tStep: stepFromContext(ctx),\n\t\tSemVer: semVerFromContext(ctx),\n\t\tCalVer: calVerFromContext(ctx),\n\t\tSystem: systemFromContext(ctx),\n\t}\n}", "title": "" }, { "docid": "fdfbac2c9d4c41a34536bc1253ec6856", "score": "0.56773376", "text": "func testPipeline() *Pipeline {\n\treturn &Pipeline{\n\t\tID: sql.NullInt64{Int64: 1, Valid: true},\n\t\tRepoID: sql.NullInt64{Int64: 1, Valid: true},\n\t\tCommit: sql.NullString{String: \"48afb5bdc41ad69bf22588491333f7cf71135163\", Valid: true},\n\t\tFlavor: sql.NullString{String: \"large\", Valid: true},\n\t\tPlatform: sql.NullString{String: \"docker\", Valid: true},\n\t\tRef: sql.NullString{String: \"refs/heads/master\", Valid: true},\n\t\tType: sql.NullString{String: constants.PipelineTypeYAML, Valid: true},\n\t\tVersion: sql.NullString{String: \"1\", Valid: true},\n\t\tExternalSecrets: sql.NullBool{Bool: false, Valid: true},\n\t\tInternalSecrets: sql.NullBool{Bool: false, Valid: true},\n\t\tServices: sql.NullBool{Bool: true, Valid: true},\n\t\tStages: sql.NullBool{Bool: false, Valid: true},\n\t\tSteps: sql.NullBool{Bool: true, Valid: true},\n\t\tTemplates: sql.NullBool{Bool: false, Valid: true},\n\t\tData: testPipelineData(),\n\t}\n}", "title": "" }, { "docid": "786237e381943db6a9a669a484220276", "score": "0.5558228", "text": "func (f *telemetryPolicyFactory) New(next pipeline.Policy, po *pipeline.PolicyOptions) pipeline.Policy {\n\treturn &telemetryPolicy{factory: f, next: next}\n}", "title": "" }, { "docid": "c43c68e034ffda9290aea53857ded1bc", "score": "0.55272216", "text": "func NewFromString(exp string) *Pipeline {\n\tcmds := ParseCommand(exp)\n\treturn NewPipeline(cmds...)\n}", "title": "" }, { "docid": "cab234461c0411412ae742ec2672817f", "score": "0.5520648", "text": "func (a *apiServer) CreatePipeline(ctx context.Context, request *pps.CreatePipelineRequest) (response *emptypb.Empty, retErr error) {\n\tmetricsFn := metrics.ReportUserAction(ctx, a.reporter, \"CreatePipeline\")\n\tdefer func(start time.Time) { metricsFn(start, retErr) }(time.Now())\n\n\tjs, err := protojson.Marshal(request)\n\tif err != nil {\n\t\treturn nil, unknownError(ctx, \"could not marshal CreatePipelineRequest to JSON\", err)\n\t}\n\tv2Req := &pps.CreatePipelineV2Request{\n\t\tCreatePipelineRequestJson: string(js),\n\t\tUpdate: request.Update,\n\t\tReprocess: request.Reprocess,\n\t\tDryRun: request.DryRun,\n\t}\n\tif _, err := a.createPipeline(ctx, v2Req); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &emptypb.Empty{}, nil\n}", "title": "" }, { "docid": "9e3d5d98267f330e74eebc9e0033f73c", "score": "0.5476428", "text": "func NewPipeline(factories []component.Factory) *Pipeline {\n\tpipeline := &Pipeline{}\n\n\tfor i, factory := range factories {\n\t\tpool := pool{\n\t\t\tfactory: factory,\n\t\t\tterminate: make(chan struct{}),\n\t\t\tdone: &sync.WaitGroup{},\n\t\t\tworkers: make(chan component.Component, factory.PoolSize()),\n\t\t}\n\n\t\tpool.produce = make(chan component.Message, factory.ChannelSize())\n\t\tpool.output = make(chan component.Message, factory.ChannelSize())\n\t\tif i > 0 {\n\t\t\tpool.input = pipeline.pools[i-1].output\n\t\t}\n\n\t\tfor j := 0; j < factory.PoolSize(); j++ {\n\t\t\tspawnWorker(factory, pool)\n\t\t}\n\n\t\tpipeline.pools = append(pipeline.pools, &pool)\n\t}\n\n\tgo func() {\n\t\tfor msg := range pipeline.pools[len(factories)-1].output {\n\t\t\tmsg.Release()\n\t\t}\n\t}()\n\n\treturn pipeline\n}", "title": "" }, { "docid": "ff45c09a4fcf5fa4ba66f6dedf5c931b", "score": "0.54680884", "text": "func NewWithConfig(cfg Config) *Pipeline {\n\treturn &Pipeline{\n\t\tconfig: cfg,\n\t}\n}", "title": "" }, { "docid": "c243190f52dd8ff7e3264a90e4a0a41a", "score": "0.5422505", "text": "func New(p *config.Pipeline, opts ...OptFunc) *Builder {\n\tb := &Builder{pipeline: p}\n\tfor _, opt := range opts {\n\t\topt(b)\n\t}\n\n\treturn b\n}", "title": "" }, { "docid": "76bb54c462119fe4bc78947310f27d41", "score": "0.5409741", "text": "func (cli *Client) CreatePipeline(pipelineName, sourceBucket, targetBucket string, capacity int) error {\n\n\treturn api.CreatePipeline(cli, pipelineName, sourceBucket, targetBucket, capacity)\n}", "title": "" }, { "docid": "6542f82c17f2fbe1d2df0b9bcf4896dd", "score": "0.5406627", "text": "func newPipelineCommandHandler(repository eventstore.Repository) *pipelineCommandHandler {\n\treturn &pipelineCommandHandler{\n\t\trepository: repository,\n\t}\n}", "title": "" }, { "docid": "980917f0f166f210983de0c983e0bad8", "score": "0.5406198", "text": "func NewPipelineTarget(type_ string) *PipelineTarget {\n\tthis := PipelineTarget{}\n\tthis.Type = type_\n\treturn &this\n}", "title": "" }, { "docid": "887f63c699836352c7d326089076c294", "score": "0.5352502", "text": "func NewSpinnakerPipeline(params map[string]interface{}) (*pipeline, error) {\n\n\tpipeline := &pipeline{\n\t\tSchema: \"v2\",\n\t\tTemplate: Template{\n\t\t\tArtifactAccount: \"front50ArtifactCredentials\",\n\t\t\tReference: \"spinnaker://\" + params[\"pipeline_template\"].(string),\n\t\t\tType: \"front50/pipelineTemplate\",\n\t\t},\n\t\tApplication: params[\"spinnaker_application\"].(string),\n\t\tName: params[\"pipeline_name\"].(string),\n\t\tType: \"templatedPipeline\",\n\t\tTriggers: make([]interface{}, 0),\n\t\tStages: make([]interface{}, 0),\n\t\tVariables: Variables{\n\t\t\tNamespace: params[\"namespace\"].(string),\n\t\t\tDockerRegistry: params[\"docker_registry\"].(string),\n\t\t\tK8SAccount: params[\"k8s_account\"].(string),\n\t\t\tHelmPackageS3ObjectPath: params[\"helm_package_s3_object_path\"].(string),\n\t\t\tHelmOverrideFileS3ObjectPath: params[\"helm_override_file_s3_object_path\"].(string),\n\t\t\tDockerRegistryOrg: params[\"docker_registry_org\"].(string),\n\t\t\tDockerRepository: params[\"docker_repository\"].(string),\n\t\t\tHalS3Account: params[\"hal_s3_account\"].(string),\n\t\t\tHalDockerRegistryAccount: params[\"hal_docker_registry_account\"].(string),\n\t\t\tDockerImageTag: params[\"docker_image_tag\"].(string),\n\t\t\tSpinnakerApplication: params[\"spinnaker_application\"].(string),\n\t\t},\n\t\tExclude: make([]interface{}, 0),\n\t\tParameterConfig: make([]interface{}, 0),\n\t\tNotifications: make([]interface{}, 0),\n\t}\n\n\treturn pipeline, nil\n}", "title": "" }, { "docid": "0fa3543453f96935161a612e236c006f", "score": "0.5343167", "text": "func createPipeline(params CRDCreationParameters) (*syntax.ParsedPipeline, error) {\n\tsteps, err := buildSteps(params)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"unable to create app extending pipeline steps\")\n\t}\n\n\tstage := syntax.Stage{\n\t\tName: appExtensionStageName,\n\t\tSteps: steps,\n\t\tAgent: &syntax.Agent{\n\t\t\tImage: determineDefaultStepImage(params.DefaultImage),\n\t\t},\n\t}\n\n\tparsedPipeline := &syntax.ParsedPipeline{\n\t\tStages: []syntax.Stage{stage},\n\t}\n\n\tenv := buildEnvParams(params)\n\tparsedPipeline.AddContainerEnvVarsToPipeline(env)\n\n\treturn parsedPipeline, nil\n}", "title": "" }, { "docid": "3a2a43f46407ee59a2f5aec49304be0d", "score": "0.5318411", "text": "func NewPipelineUpserter(ctx *common.Context, tokenProvider func(bool) string) Executor {\n\n\tworkflow := new(pipelineWorkflow)\n\tworkflow.codeRevision = ctx.Config.Repo.Revision\n\tworkflow.repoName = ctx.Config.Repo.Slug\n\n\tif ctx.Config.Repo.Branch != \"\" {\n\t\tworkflow.codeBranch = ctx.Config.Repo.Branch\n\t} else {\n\t\tworkflow.codeBranch = ctx.Config.Service.Pipeline.Source.Branch\n\t}\n\n\tstackParams := make(map[string]string)\n\n\treturn newPipelineExecutor(\n\t\tworkflow.serviceFinder(\"\", ctx),\n\t\tworkflow.pipelineToken(ctx.Config.Namespace, tokenProvider, ctx.StackManager, stackParams),\n\t\tnewConditionalExecutor(\n\t\t\tworkflow.isFromCatalog(&ctx.Config.Service.Pipeline),\n\t\t\tworkflow.pipelineCatalogUpserter(ctx.Config.Namespace, &ctx.Config.Service.Pipeline, stackParams, ctx.CatalogManager, ctx.StackManager),\n\t\t\tnewPipelineExecutor(\n\t\t\t\tnewParallelExecutor(\n\t\t\t\t\tworkflow.pipelineBucket(ctx.Config.Namespace, stackParams, ctx.StackManager, ctx.StackManager),\n\t\t\t\t\tworkflow.codedeployBucket(ctx.Config.Namespace, &ctx.Config.Service, ctx.StackManager, ctx.StackManager),\n\t\t\t\t),\n\t\t\t\tworkflow.pipelineRolesetUpserter(ctx.RolesetManager, ctx.RolesetManager, stackParams),\n\t\t\t\tworkflow.pipelineUpserter(ctx.Config.Namespace, ctx.StackManager, ctx.StackManager, stackParams),\n\t\t\t),\n\t\t),\n\t\tworkflow.pipelineNotifyUpserter(ctx.Config.Namespace, &ctx.Config.Service.Pipeline, ctx.SubscriptionManager))\n\n}", "title": "" }, { "docid": "d53445ccab54094ae2f7f0c34add1fa0", "score": "0.53077865", "text": "func (client *Client) CreatePipeline(\n\tctx context.Context, pipeline *graylog.Pipeline,\n) (*ErrorInfo, error) {\n\treturn client.callPost(\n\t\tctx, client.Endpoints().Pipelines(), pipeline, &pipeline)\n}", "title": "" }, { "docid": "f123568da65fe053a98a103fce4033d5", "score": "0.5260787", "text": "func (c *MyConn) Pipeline() *MyPipeline {\n\treturn NewMyPipeline(c)\n}", "title": "" }, { "docid": "1caf61ebcf3c0b11750198244aaad658", "score": "0.52582693", "text": "func InitPipeline(pipelineType *pipeline.Definition,\n\tpipelineArguments *PipelineArguments,\n\tstepImplementations ...Step) (*PipelineInfo, error) {\n\n\ttimestamp := time.Now().Format(\"20060102150405\")\n\thash := strings.ToLower(utils.RandStringStrSeed(5, pipelineArguments.JobName))\n\tradixConfigMapName := fmt.Sprintf(\"radix-config-2-map-%s-%s-%s\", timestamp, pipelineArguments.ImageTag, hash)\n\tgitConfigFileName := fmt.Sprintf(\"radix-git-information-%s-%s-%s\", timestamp, pipelineArguments.ImageTag, hash)\n\n\tpodSecContext := securitycontext.Pod(securitycontext.WithPodFSGroup(fsGroup),\n\t\tsecuritycontext.WithPodSeccompProfile(corev1.SeccompProfileTypeRuntimeDefault))\n\tcontainerSecContext := securitycontext.Container(securitycontext.WithContainerDropAllCapabilities(),\n\t\tsecuritycontext.WithContainerSeccompProfile(corev1.SeccompProfileTypeRuntimeDefault),\n\t\tsecuritycontext.WithContainerRunAsGroup(runAsGroup),\n\t\tsecuritycontext.WithContainerRunAsUser(runAsUser))\n\n\tpipelineArguments.ContainerSecurityContext = *containerSecContext\n\tpipelineArguments.PodSecurityContext = *podSecContext\n\n\tstepImplementationsForType, err := getStepStepImplementationsFromType(pipelineType, stepImplementations...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &PipelineInfo{\n\t\tDefinition: pipelineType,\n\t\tPipelineArguments: *pipelineArguments,\n\t\tSteps: stepImplementationsForType,\n\t\tRadixConfigMapName: radixConfigMapName,\n\t\tGitConfigMapName: gitConfigFileName,\n\t}, nil\n}", "title": "" }, { "docid": "c3f37b357978132de734e82b1b7c98dd", "score": "0.52522105", "text": "func New(input *bytes.Buffer, command ...Execute) *commandPipeline {\n\treturn &commandPipeline{\n\t\tcommands: append([]Execute{}, command...),\n\t\tinput: input,\n\t}\n}", "title": "" }, { "docid": "51c45a676e9e846f41ee13074c3be603", "score": "0.52433074", "text": "func AddPipeline(name string, projectKey string, pipelineType string, params []Parameter) error {\n\tp := Pipeline{\n\t\tName: name,\n\t\tProjectKey: projectKey,\n\t\tType: pipelineType,\n\t\tParameter: params,\n\t}\n\n\tdata, err := json.Marshal(p)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\turl := fmt.Sprintf(\"/project/%s/pipeline\", projectKey)\n\t_, _, err = Request(\"POST\", url, data)\n\treturn err\n}", "title": "" }, { "docid": "5cef9e09611f7dbdcba82c7fdc01ae0f", "score": "0.52355534", "text": "func NewMockPipelineFrom(i iface.Pipeline) *MockPipeline {\n\treturn &MockPipeline{\n\t\tAddFunc: &PipelineAddFunc{\n\t\t\tdefaultHook: i.Add,\n\t\t},\n\t\tRunFunc: &PipelineRunFunc{\n\t\t\tdefaultHook: i.Run,\n\t\t},\n\t}\n}", "title": "" }, { "docid": "b6f635444218c19364d46a899b5e5cb7", "score": "0.52323204", "text": "func NewMockPipeline() *MockPipeline {\n\treturn &MockPipeline{\n\t\tAddFunc: &PipelineAddFunc{\n\t\t\tdefaultHook: func(string, ...interface{}) {\n\t\t\t\treturn\n\t\t\t},\n\t\t},\n\t\tRunFunc: &PipelineRunFunc{\n\t\t\tdefaultHook: func() (interface{}, error) {\n\t\t\t\treturn nil, nil\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "e1d9ee799d475713bc0b0cd0fc9bf619", "score": "0.5226728", "text": "func NewProcessPipeline(pipes ...ProcessPipe) *ProcessPipeline {\n\thead := make(chan data.ParsedResponse)\n\tvar next_chan chan data.ParsedResponse\n\tfor _, pipe := range pipes {\n\t\tif next_chan == nil {\n\t\t\tnext_chan = pipe.Process(head)\n\t\t} else {\n\t\t\tnext_chan = pipe.Process(next_chan)\n\t\t}\n\t}\n\treturn &ProcessPipeline{head: head, tail: next_chan}\n}", "title": "" }, { "docid": "7335baa592ddc55d32a4c0b026c38f74", "score": "0.52166855", "text": "func (es *Connection) CreatePipeline(\n\tid string,\n\tparams map[string]string,\n\tbody interface{},\n) (int, *QueryResult, error) {\n\treturn withQueryResult(es.apiCall(\"PUT\", \"_ingest\", \"pipeline\", id, \"\", params, body))\n}", "title": "" }, { "docid": "dd9cc2d633edb5ae2769e107415efcdc", "score": "0.52106625", "text": "func New(ctx context.Context, opts ...Option) (*Protocol, error) {\n\tt := &Protocol{}\n\tt.incoming = make(chan pubsub.Message)\n\tif err := t.applyOptions(opts...); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif t.client == nil {\n\t\t// Auth to pubsub.\n\t\tclient, err := pubsub.NewClient(ctx, t.projectID)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t// Success.\n\t\tt.client = client\n\t}\n\n\tif t.connectionsBySubscription == nil {\n\t\tt.connectionsBySubscription = make(map[string]*internal.Connection)\n\t}\n\n\tif t.connectionsByTopic == nil {\n\t\tt.connectionsByTopic = make(map[string]*internal.Connection)\n\t}\n\treturn t, nil\n}", "title": "" }, { "docid": "ecfdd023981aabdc779ac20f35cd5431", "score": "0.52062", "text": "func NewReceivePipeline(parsePipe ParsePipe, pipes ...ReceivePipe) *ReceivePipeline {\n\thead := make(chan announced.Response)\n\tvar next_chan chan announced.Response\n\tfor _, pipe := range pipes {\n\t\tif next_chan == nil {\n\t\t\tnext_chan = pipe.Process(head)\n\t\t} else {\n\t\t\tnext_chan = pipe.Process(next_chan)\n\t\t}\n\t}\n\tlast_chan := parsePipe.Process(next_chan)\n\treturn &ReceivePipeline{head: head, tail: last_chan}\n}", "title": "" }, { "docid": "8ce24dc2f9ba31091f3b251309273561", "score": "0.5195645", "text": "func (p *Pipeline) ToLibrary() *library.Pipeline {\n\tpipeline := new(library.Pipeline)\n\n\tpipeline.SetID(p.ID.Int64)\n\tpipeline.SetRepoID(p.RepoID.Int64)\n\tpipeline.SetCommit(p.Commit.String)\n\tpipeline.SetFlavor(p.Flavor.String)\n\tpipeline.SetPlatform(p.Platform.String)\n\tpipeline.SetRef(p.Ref.String)\n\tpipeline.SetType(p.Type.String)\n\tpipeline.SetVersion(p.Version.String)\n\tpipeline.SetExternalSecrets(p.ExternalSecrets.Bool)\n\tpipeline.SetInternalSecrets(p.InternalSecrets.Bool)\n\tpipeline.SetServices(p.Services.Bool)\n\tpipeline.SetStages(p.Stages.Bool)\n\tpipeline.SetSteps(p.Steps.Bool)\n\tpipeline.SetTemplates(p.Templates.Bool)\n\tpipeline.SetData(p.Data)\n\n\treturn pipeline\n}", "title": "" }, { "docid": "d3d99f1291f47c484086628f6c38fd5a", "score": "0.5184365", "text": "func (cl *tektonClient) CreatePipelineResource(pipelineresource *v1alpha1.PipelineResource) error {\n\t_, err := cl.resourceClient.PipelineResources(cl.namespace).Create(pipelineresource)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d110af61c25b77bbcf372a46ef67edbc", "score": "0.5119762", "text": "func New(options map[string]string) model.Module {\n\tlog.WithFields(log.Fields{\n\t\t\"id\": ModuleID,\n\t\t\"options\": options,\n\t}).Debug(\"Creating new Module\")\n\tif os.Getenv(\"DOCKER_HOST\") != \"\" && dockerEndpoint != \"unix:///var/run/docker.sock\" { //If default value of tag + env set\n\t\tdockerEndpoint = os.Getenv(\"DOCKER_HOST\")\n\t}\n\tclient, err := docker.NewClient(dockerEndpoint)\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"client\": client,\n\t\t\t\"err\": err,\n\t\t}).Warn(\"Failed to create docker client\")\n\t\t//return nil\n\t}\n\treturn &Module{Endpoint: dockerEndpoint, Client: client, event: setListener(client)}\n}", "title": "" }, { "docid": "5096e4162f1649a1f532dd5113accb96", "score": "0.5104944", "text": "func CreatePipeline(dsl Pipeline) pipeline.GroovePipeline {\n\t// Register the sources used in the groove pipeline.\n\tRegisterSources()\n\n\t// Create a groove pipeline from the boogie dsl.\n\tg := pipeline.GroovePipeline{}\n\tif s, ok := querySourceMapping[dsl.Query.Format]; ok {\n\t\tg.QueriesSource = s\n\t} else {\n\t\tlog.Fatalf(\"%v is not a known query source\", dsl.Query.Format)\n\t}\n\n\tif len(dsl.Statistic.Source) > 0 {\n\t\tif s, ok := statisticSourceMapping[dsl.Statistic.Source]; ok {\n\t\t\tg.StatisticsSource = s\n\t\t} else {\n\t\t\tlog.Fatalf(\"%v is not a known statistics source\", dsl.Statistic.Source)\n\t\t}\n\t}\n\n\tif len(dsl.Statistic.Source) == 0 && len(dsl.Measurements) > 0 {\n\t\tlog.Fatal(\"A statistic source is required for measurements\")\n\t}\n\n\tif len(dsl.Measurements) > 0 && len(dsl.Output.Measurements) == 0 {\n\t\tlog.Fatal(\"At least one output format must be supplied when using analysis measurements\")\n\t}\n\n\tif len(dsl.Output.Measurements) > 0 && len(dsl.Measurements) == 0 {\n\t\tlog.Fatal(\"At least one analysis measurement must be supplied for the output formats\")\n\t}\n\n\tif len(dsl.Evaluations) > 0 && len(dsl.Output.Evaluations.Measurements) == 0 {\n\t\tlog.Fatal(\"At least one output format must be supplied when using evaluation measurements\")\n\t}\n\n\tif len(dsl.Output.Evaluations.Measurements) > 0 && len(dsl.Evaluations) == 0 {\n\t\tlog.Fatal(\"At least one evaluation measurement must be supplied for the output formats\")\n\t}\n\n\tg.Measurements = []analysis.Measurement{}\n\tfor _, measurementName := range dsl.Measurements {\n\t\tif m, ok := measurementMapping[measurementName]; ok {\n\t\t\tg.Measurements = append(g.Measurements, m)\n\t\t} else {\n\t\t\tlog.Fatalf(\"%v is not a known measurement\", measurementName)\n\t\t}\n\t}\n\n\tg.Evaluations = []eval.Evaluator{}\n\tfor _, evaluationMeasurement := range dsl.Evaluations {\n\t\tif m, ok := evaluationMapping[evaluationMeasurement.Evaluation]; ok {\n\t\t\tg.Evaluations = append(g.Evaluations, m)\n\t\t} else {\n\t\t\tlog.Fatalf(\"%v is not a known evaluation measurement\", evaluationMeasurement.Evaluation)\n\t\t}\n\t}\n\n\tif len(dsl.Output.Evaluations.Qrels) > 0 {\n\t\tb, err := ioutil.ReadFile(dsl.Output.Evaluations.Qrels)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t\tqrels, err := trecresults.QrelsFromReader(bytes.NewReader(b))\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t\tg.EvaluationQrels = qrels\n\t}\n\n\tg.MeasurementFormatters = []output.MeasurementFormatter{}\n\tfor _, formatter := range dsl.Output.Measurements {\n\t\tif o, ok := measurementFormatters[formatter.Format]; ok {\n\t\t\tg.MeasurementFormatters = append(g.MeasurementFormatters, o)\n\t\t} else {\n\t\t\tlog.Fatalf(\"%v is not a known measurement output format\", formatter.Format)\n\t\t}\n\t}\n\n\tg.EvaluationFormatters = []output.EvaluationFormatter{}\n\tfor _, formatter := range dsl.Output.Evaluations.Measurements {\n\t\tif o, ok := evaluationFormatters[formatter.Format]; ok {\n\t\t\tg.EvaluationFormatters = append(g.EvaluationFormatters, o)\n\t\t} else {\n\t\t\tlog.Fatalf(\"%v is not a known evaluation output format\", formatter.Format)\n\t\t}\n\t}\n\n\tg.Preprocess = []preprocess.QueryProcessor{}\n\tfor _, p := range dsl.Preprocess {\n\t\tif processor, ok := preprocessorMapping[p]; ok {\n\t\t\tg.Preprocess = append(g.Preprocess, processor)\n\t\t} else {\n\t\t\tlog.Fatalf(\"%v is not a known preprocessor\", p)\n\t\t}\n\t}\n\n\tg.Transformations = preprocess.QueryTransformations{}\n\tfor _, t := range dsl.Transformations.Operations {\n\t\tif transformation, ok := transformationMappingBoolean[t]; ok {\n\t\t\tg.Transformations.BooleanTransformations = append(g.Transformations.BooleanTransformations, transformation)\n\t\t} else if transformation, ok := transformationMappingElasticsearch[t]; ok {\n\t\t\tg.Transformations.ElasticsearchTransformations = append(g.Transformations.ElasticsearchTransformations, transformation)\n\t\t} else {\n\t\t\tlog.Fatalf(\"%v is not a known preprocessing transformation\", t)\n\t\t}\n\t}\n\n\t//g.QueryChain\n\tif len(dsl.Rewrite.Chain) > 0 && len(dsl.Rewrite.Transformations) > 0 {\n\t\tvar transformations []rewrite.Transformation\n\t\tfor _, transformation := range dsl.Rewrite.Transformations {\n\t\t\tif t, ok := rewriteTransformationMapping[transformation]; ok {\n\t\t\t\ttransformations = append(transformations, t)\n\t\t\t} else {\n\t\t\t\tlog.Fatalf(\"%v is not a known rewrite transformation\", transformation)\n\t\t\t}\n\t\t}\n\n\t\tif qc, ok := queryChainCandidateSelectorMapping[dsl.Rewrite.Chain]; ok {\n\t\t\tg.QueryChain = rewrite.NewQueryChain(qc, transformations...)\n\t\t} else {\n\t\t\tlog.Fatalf(\"%v is not a known query chain candidate selector\", dsl.Rewrite.Chain)\n\t\t}\n\n\t}\n\n\tg.Transformations.Output = dsl.Transformations.Output\n\tg.OutputTrec.Path = dsl.Output.Trec.Output\n\treturn g\n}", "title": "" }, { "docid": "0d5363b4453cdba5da9fcbe32eaf0c96", "score": "0.5102145", "text": "func (s *PipelineApiService) CreatePipeline(pipeline Pipeline) (interface{}, error) {\n\t// TODO - update CreatePipeline with the required logic for this service method.\n\t// Add api_pipeline_service.go to the .openapi-generator-ignore to avoid overwriting this service implementation when updating open api generation.\n\treturn nil, errors.New(\"service method 'CreatePipeline' not implemented\")\n}", "title": "" }, { "docid": "411067b772c3a755bb68204ae6b6ee25", "score": "0.5078076", "text": "func (c *MockClient) CreatePipeline(pipeline *corev2.Pipeline) error {\n\targs := c.Called(pipeline)\n\treturn args.Error(0)\n}", "title": "" }, { "docid": "8203ca5b6f3cf31d8b749823818aacc7", "score": "0.5056241", "text": "func LoadPipeline(db gorp.SqlExecutor, projectKey, name string, deep bool) (*sdk.Pipeline, error) {\n\tvar p sdk.Pipeline\n\n\tvar lastModified time.Time\n\tquery := `SELECT pipeline.id, pipeline.name, pipeline.description, pipeline.project_id, pipeline.last_modified, pipeline.from_repository\n\t\t\tFROM pipeline\n\t \t\t\tJOIN project on pipeline.project_id = project.id\n\t \t\tWHERE pipeline.name = $1 AND project.projectKey = $2`\n\n\tif err := db.QueryRow(query, name, projectKey).Scan(&p.ID, &p.Name, &p.Description, &p.ProjectID, &lastModified, &p.FromRepository); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, sdk.WithStack(sdk.ErrPipelineNotFound)\n\t\t}\n\t\treturn nil, sdk.WithStack(err)\n\t}\n\tp.LastModified = lastModified.Unix()\n\tp.ProjectKey = projectKey\n\n\tif deep {\n\t\tif err := loadPipelineDependencies(context.TODO(), db, &p); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tparameters, err := GetAllParametersInPipeline(context.TODO(), db, p.ID)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tp.Parameter = parameters\n\t}\n\n\treturn &p, nil\n}", "title": "" }, { "docid": "8d6466dec76f0205b44c168476dd8b9b", "score": "0.5037986", "text": "func (pool *Pool) Pipeline() *Pipeline {\n\treturn BlankPipeline(int64(pool.DB))\n}", "title": "" }, { "docid": "3adea6f4f70960d51282f378a84039a2", "score": "0.5031702", "text": "func NewCli(name string, link *PipelineLink) (cli *Cli, err error) {\n\tcli = &Cli{\n\t\tParser: subcommand.NewParser(name),\n\t\tOutput: os.Stdout,\n\t}\n\t//set the help command\n\tcli.setHelp()\n\t//when the first command is processed\n\t//initialise the link so we take into account the\n\t//global configuration flags\n\tcli.PostFlags(func() error {\n\t\tif err = link.Init(); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tscripts, err := link.Scripts()\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Error loading scripts:\\n\\t%v\\n\", err)\n\t\t\tos.Exit(-1)\n\t\t}\n\t\tcli.AddScripts(scripts, link)\n\t\tif !link.IsLocal() {\n\t\t\t//it we are not in local mode we need to send the data\n\t\t\tfor _, cmd := range cli.Scripts {\n\n\t\t\t\tcmd.addDataOption()\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t})\n\t//add config flags\n\tcli.addConfigOptions(link.config)\n\treturn\n}", "title": "" }, { "docid": "282c2d6f4d62b97b7352f8eddf24529d", "score": "0.4987802", "text": "func TestMalformedPipeline(t *testing.T) {\n\tt.Parallel()\n\tc, _ := minikubetestenv.AcquireCluster(t)\n\n\tpipelineName := tu.UniqueString(\"MalformedPipeline\")\n\n\tvar err error\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"request.Pipeline cannot be nil\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName)},\n\t)\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"must specify a transform\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"no input set\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tService: &pps.Service{},\n\t\tParallelismSpec: &pps.ParallelismSpec{},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"services can only be run with a constant parallelism of 1\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tSpecCommit: &pfs.Commit{},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"cannot resolve commit with no repo\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tSpecCommit: &pfs.Commit{Branch: &pfs.Branch{}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"cannot resolve commit with no repo\", err.Error())\n\n\tdataRepo := tu.UniqueString(\"TestMalformedPipeline_data\")\n\trequire.NoError(t, c.CreateRepo(pfs.DefaultProjectName, dataRepo))\n\n\tdataCommit := client.NewCommit(pfs.DefaultProjectName, dataRepo, \"master\", \"\")\n\trequire.NoError(t, c.PutFile(dataCommit, \"file\", strings.NewReader(\"foo\"), client.WithAppendPutFile()))\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Pfs: &pps.PFSInput{}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"input must specify a name\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Pfs: &pps.PFSInput{Name: \"data\"}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"input must specify a repo\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Pfs: &pps.PFSInput{Repo: dataRepo}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"input must specify a glob\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: client.NewPFSInput(pfs.DefaultProjectName, \"out\", \"/*\"),\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"input cannot be named out\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Pfs: &pps.PFSInput{Name: \"out\", Repo: dataRepo, Glob: \"/*\"}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"input cannot be named out\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Pfs: &pps.PFSInput{Name: \"data\", Repo: \"dne\", Glob: \"/*\"}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"dne[^ ]* not found\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: client.NewCrossInput(\n\t\t\tclient.NewPFSInput(pfs.DefaultProjectName, \"foo\", \"/*\"),\n\t\t\tclient.NewPFSInput(pfs.DefaultProjectName, \"foo\", \"/*\"),\n\t\t),\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"name \\\"foo\\\" was used more than once\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Cron: &pps.CronInput{}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"input must specify a name\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Cron: &pps.CronInput{Name: \"cron\"}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"Empty spec string\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Cross: []*pps.Input{}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"no input set\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Union: []*pps.Input{}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"no input set\", err.Error())\n\n\t_, err = c.PpsAPIClient.CreatePipeline(c.Ctx(), &pps.CreatePipelineRequest{\n\t\tPipeline: client.NewPipeline(pfs.DefaultProjectName, pipelineName),\n\t\tTransform: &pps.Transform{},\n\t\tInput: &pps.Input{Join: []*pps.Input{}},\n\t})\n\trequire.YesError(t, err)\n\trequire.Matches(t, \"no input set\", err.Error())\n}", "title": "" }, { "docid": "6f73d3a57b5d972a95621336f6fae50d", "score": "0.4985436", "text": "func New(pcpc *PipeCSPairConfig) (*PipeDialer, *PipeListener) {\n\tconnC := make(chan net.Conn)\n\n\tnoOpIfNil := func(rwd RWDebugger) RWDebugger {\n\t\tif rwd == nil {\n\t\t\treturn func(log.Logger, bool, bool, int, []byte, int, error) {}\n\t\t}\n\t\treturn rwd\n\t}\n\n\tpd := &PipeDialer{\n\t\tconnC: connC,\n\n\t\tlogger: pcpc.Logger,\n\n\t\tclientReadDebug: noOpIfNil(pcpc.ClientReadDebug),\n\t\tclientWriteDebug: noOpIfNil(pcpc.ClientWriteDebug),\n\n\t\tserverReadDebug: noOpIfNil(pcpc.ServerReadDebug),\n\t\tserverWriteDebug: noOpIfNil(pcpc.ServerWriteDebug),\n\t}\n\n\tpl := &PipeListener{\n\t\tconnC: connC,\n\t\tdoneC: make(chan struct{}),\n\t\taddr: &Addr{\n\t\t\tNetworkStr: \"pipe\",\n\t\t\tStringStr: \"127.0.0.1:22\",\n\t\t},\n\t\tcloseOnce: &sync.Once{},\n\t}\n\n\treturn pd, pl\n}", "title": "" }, { "docid": "387d08e8bebb5d1c1f52f440b3bd707f", "score": "0.4969579", "text": "func NewPipelineTrigger(ctx *pulumi.Context,\n\tname string, args *PipelineTriggerArgs, opts ...pulumi.ResourceOption) (*PipelineTrigger, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.Description == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'Description'\")\n\t}\n\tif args.Project == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'Project'\")\n\t}\n\tsecrets := pulumi.AdditionalSecretOutputs([]string{\n\t\t\"token\",\n\t})\n\topts = append(opts, secrets)\n\topts = internal.PkgResourceDefaultOpts(opts)\n\tvar resource PipelineTrigger\n\terr := ctx.RegisterResource(\"gitlab:index/pipelineTrigger:PipelineTrigger\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "3660bb35f8b84547a7e425a19a2f0d8f", "score": "0.4933267", "text": "func NewPipe(conf *PipeConf, stoppable bool) *Pipe {\n\tpipe := &Pipe{\n\t\tlocal: utils.NewEndpoint(conf.Local),\n\t\tremote: utils.NewEndpoint(conf.Remote),\n\t\tterminate: make(chan bool),\n\t\tstoppable: stoppable,\n\t\tclientsMap: make(map[string]net.Conn),\n\t}\n\treturn pipe\n}", "title": "" }, { "docid": "1a37fbf6f311de449694fdd9227e8f2b", "score": "0.4921419", "text": "func NewPipelineStore(db *DB, time util.TimeInterface, uuid util.UUIDGeneratorInterface) *PipelineStore {\n\treturn &PipelineStore{db: db, time: time, uuid: uuid}\n}", "title": "" }, { "docid": "14852b5d4cfbd9071851ff55ad71171a", "score": "0.49066195", "text": "func CreateCDPipeline(name types.NamespacedName, stageNamespace string) *pipelinev1.Pipeline {\n\treturn &pipelinev1.Pipeline{\n\t\tTypeMeta: pipelineTypeMeta,\n\t\tObjectMeta: meta.ObjectMeta(name),\n\t\tSpec: pipelinev1.PipelineSpec{\n\t\t\tResources: []pipelinev1.PipelineDeclaredResource{\n\t\t\t\tcreatePipelineDeclaredResource(\"source-repo\", \"git\"),\n\t\t\t},\n\t\t\tTasks: []pipelinev1.PipelineTask{\n\t\t\t\tcreateCDPipelineTask(\"apply-source\"),\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "02bf65bc3502bea57df64faf3de41692", "score": "0.49023187", "text": "func Pipeline(g *graph.Graph, id string, factory *Factory, top Values) executor.Pipeline {\n\tp := pipelineGen{Graph: g, RenderingPlant: factory, Top: top, ID: id}\n\treturn executor.NewPipeline().\n\t\tAndThen(p.maybeTransformRoot).\n\t\tAndThen(p.prepareNode).\n\t\tAndThen(p.wrapTask)\n}", "title": "" }, { "docid": "54cc8034765f8e1ca491d86dda51f518", "score": "0.48711908", "text": "func MRPipelineCreate(project int64, iid string) (*gitlab.PipelineInfo, error) {\n\tvar options []gitlab.OptionFunc\n\tu := fmt.Sprintf(\"projects/%d/merge_requests/%s/pipelines\", project, iid)\n\treq, err := lab.NewRequest(\"POST\", u, nil, options)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tp := new(gitlab.PipelineInfo)\n\t_, err = lab.Do(req, p)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn p, err\n}", "title": "" }, { "docid": "1256c63b7afaf28f40a3dcb49754254b", "score": "0.4862171", "text": "func New(config config.Config) Plugins {\n\tpkg := Plugins{\n\t\tclient: http.NewClient(config),\n\t\tpager: &http.LinkHeaderPager{},\n\t}\n\n\treturn pkg\n}", "title": "" }, { "docid": "c36cada57ff83b5de0a360cc2b645a8c", "score": "0.4856871", "text": "func NewPipelineSpec(spec *pipeline.Spec) PipelineSpec {\n\treturn PipelineSpec{\n\t\tID: spec.ID,\n\t\tDotDAGSource: spec.DotDagSource,\n\t}\n}", "title": "" }, { "docid": "990f35853c7aa595e3ae7b42c6fa91ed", "score": "0.48430696", "text": "func NewCmdGetPipeline(commonOpts *opts.CommonOptions) *cobra.Command {\n\toptions := &PipelineOptions{\n\t\tOptions: Options{\n\t\t\tCommonOptions: commonOpts,\n\t\t},\n\t}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"pipelines [flags]\",\n\t\tShort: \"Display one or more Pipelines\",\n\t\tLong: getPipelineLong,\n\t\tExample: getPipelineExample,\n\t\tAliases: []string{\"pipe\", \"pipes\", \"pipeline\"},\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\toptions.Cmd = cmd\n\t\t\toptions.Args = args\n\t\t\terr := options.Run()\n\t\t\thelper.CheckErr(err)\n\t\t},\n\t}\n\n\toptions.AddGetFlags(cmd)\n\n\treturn cmd\n}", "title": "" }, { "docid": "5bb7cd537268d675fd5142ebc1ce524c", "score": "0.4841688", "text": "func PipelineReqFromInfo(pipelineInfo *ppsclient.PipelineInfo) *ppsclient.CreatePipelineRequest {\n\treturn &ppsclient.CreatePipelineRequest{\n\t\tPipeline: pipelineInfo.Pipeline,\n\t\tTransform: pipelineInfo.Transform,\n\t\tParallelismSpec: pipelineInfo.ParallelismSpec,\n\t\tEgress: pipelineInfo.Egress,\n\t\tOutputBranch: pipelineInfo.OutputBranch,\n\t\tScaleDownThreshold: pipelineInfo.ScaleDownThreshold,\n\t\tResourceRequests: pipelineInfo.ResourceRequests,\n\t\tResourceLimits: pipelineInfo.ResourceLimits,\n\t\tInput: pipelineInfo.Input,\n\t\tDescription: pipelineInfo.Description,\n\t\tIncremental: pipelineInfo.Incremental,\n\t\tCacheSize: pipelineInfo.CacheSize,\n\t\tEnableStats: pipelineInfo.EnableStats,\n\t\tBatch: pipelineInfo.Batch,\n\t\tMaxQueueSize: pipelineInfo.MaxQueueSize,\n\t\tService: pipelineInfo.Service,\n\t\tChunkSpec: pipelineInfo.ChunkSpec,\n\t\tDatumTimeout: pipelineInfo.DatumTimeout,\n\t\tJobTimeout: pipelineInfo.JobTimeout,\n\t\tSalt: pipelineInfo.Salt,\n\t}\n}", "title": "" }, { "docid": "1323200a85fa4acefb769821a4891f29", "score": "0.48347643", "text": "func NewSerializablePipeline() SerializablePipeline {\n\treturn SerializablePipeline{\n\t\tDisabled: false,\n\t\tKeepWaitingPipelines: false,\n\t\tLimitConcurrent: true,\n\t\tTriggers: []*Trigger{},\n\t\tAppConfig: map[string]interface{}{},\n\t\tParameterConfig: &[]*PipelineParameter{},\n\t}\n}", "title": "" }, { "docid": "e83f749db9b546e1e102c09e079d81d1", "score": "0.48098713", "text": "func (c *Client) CreatePipeline(ctx context.Context, params *CreatePipelineInput, optFns ...func(*Options)) (*CreatePipelineOutput, error) {\n\tif params == nil {\n\t\tparams = &CreatePipelineInput{}\n\t}\n\n\tresult, metadata, err := c.invokeOperation(ctx, \"CreatePipeline\", params, optFns, addOperationCreatePipelineMiddlewares)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tout := result.(*CreatePipelineOutput)\n\tout.ResultMetadata = metadata\n\treturn out, nil\n}", "title": "" }, { "docid": "a7fae14bc309528a1720af5ffafc0bf2", "score": "0.4806468", "text": "func NewPipelineSpec(spec *pipeline.Spec) PipelineSpec {\n\treturn PipelineSpec{\n\t\tID: spec.ID,\n\t\tJobID: spec.JobID,\n\t\tDotDAGSource: spec.DotDagSource,\n\t}\n}", "title": "" }, { "docid": "8010e327f6085a324dfadeff1996ba33", "score": "0.47984543", "text": "func StartPipeline(service *app.Service, stages ...Stage) *Pipeline {\n\tstartPipelineMutex.Lock()\n\tdefer startPipelineMutex.Unlock()\n\n\tif pipeline := GetPipeline(PipelineID(service.ID())); pipeline != nil {\n\t\treturn pipeline\n\t}\n\n\tcheckArgs := func() {\n\t\tif service == nil {\n\t\t\tpanic(\"A pipeline requires a service to run\")\n\t\t}\n\t\tif !service.Alive() {\n\t\t\tpanic(app.ServiceNotAliveError(service.ID()))\n\t\t}\n\t\tif len(stages) == 0 {\n\t\t\tpanic(\"A pipeline must have at least 1 stage\")\n\t\t}\n\t\tfor _, stage := range stages {\n\t\t\tif stage.Command().run == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Stage Command run function was nil for : ServiceID(0x%x)\", service.ID()))\n\t\t\t}\n\t\t}\n\n\t\tserviceID := service.ID()\n\t\tfor _, metricID := range COUNTER_METRIC_IDS {\n\t\t\tif app.MetricRegistry.Counter(serviceID, metricID) == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Counter metric is missing : MetricID(0x%x)\", metricID))\n\t\t\t}\n\t\t}\n\t\tfor _, metricID := range COUNTER_VECTOR_METRIC_IDS {\n\t\t\tif app.MetricRegistry.CounterVector(serviceID, metricID) == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Counter vector metric is missing : MetricID(0x%x)\", metricID))\n\t\t\t}\n\t\t}\n\t\tfor _, metricID := range GAUGE_METRIC_IDS {\n\t\t\tif app.MetricRegistry.Gauge(serviceID, metricID) == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Gauge metric is missing : MetricID(0x%x)\", metricID))\n\t\t\t}\n\t\t}\n\t}\n\n\tcheckArgs()\n\n\tserviceID := service.ID()\n\n\tpipeline := &Pipeline{\n\t\tService: service,\n\t\tstartedOn: time.Now(),\n\t\tin: make(chan context.Context),\n\t\tout: make(chan context.Context),\n\t\tstages: stages,\n\n\t\trunCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_RUN_COUNT),\n\t\tfailedCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_FAILED_COUNT),\n\t\tcontextExpiredCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_CONTEXT_EXPIRED_COUNT),\n\t\tprocessingTime: app.MetricRegistry.Counter(serviceID, PIPELINE_PROCESSING_TIME_SEC),\n\t\tprocessingFailedTime: app.MetricRegistry.Counter(serviceID, PIPELINE_PROCESSING_TIME_SEC_FAILED),\n\t\tchannelDeliveryTime: app.MetricRegistry.Counter(serviceID, PIPELINE_CHANNEL_DELIVERY_TIME_SEC),\n\n\t\tpingPongCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_PING_PONG_COUNT),\n\t\tpingPongTime: app.MetricRegistry.Counter(serviceID, PIPELINE_PING_PONG_TIME_SEC),\n\t\tpingExpiredCounter: app.MetricRegistry.Counter(serviceID, PIPELINE_PING_EXPIRED_COUNT),\n\t\tpingExpiredTime: app.MetricRegistry.Counter(serviceID, PIPELINE_PING_EXPIRED_TIME_SEC),\n\n\t\tconsecutiveSuccessCounter: app.MetricRegistry.Gauge(serviceID, PIPELINE_CONSECUTIVE_SUCCESS_COUNT),\n\t\tconsecutiveFailureCounter: app.MetricRegistry.Gauge(serviceID, PIPELINE_CONSECUTIVE_FAILURE_COUNT),\n\t\tconsecutiveExpiredCounter: app.MetricRegistry.Gauge(serviceID, PIPELINE_CONSECUTIVE_EXPIRED_COUNT),\n\n\t\tlastSuccessTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_SUCCESS_TIME),\n\t\tlastFailureTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_FAILURE_TIME),\n\t\tlastExpiredTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_EXPIRED_TIME),\n\t\tlastPingSuccessTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_PING_SUCCESS_TIME),\n\t\tlastPingExpiredTime: app.MetricRegistry.Gauge(serviceID, PIPELINE_LAST_PING_EXPIRED_TIME),\n\t}\n\n\tfirstStageCommandID := pipeline.stages[0].cmd.id\n\tvar build func(stages []Stage, in, out chan context.Context)\n\tbuild = func(stages []Stage, in, out chan context.Context) {\n\t\tcreateStageWorkers := func(stage Stage, process func(ctx context.Context)) {\n\t\t\tfor i := 0; i < int(stage.PoolSize()); i++ {\n\t\t\t\tif stage.cmd.id == firstStageCommandID {\n\t\t\t\t\tservice.Go(func() error {\n\t\t\t\t\t\tfor {\n\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\tcase <-service.Dying():\n\t\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t\tcase ctx := <-in:\n\t\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\t\t\t\tpipelineContextExpired(ctx, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\t\t\t// record the time when the context started the workflow, i.e., entered the first stage of the pipeline\n\t\t\t\t\t\t\t\t\tctx = startWorkflowTimer(ctx)\n\t\t\t\t\t\t\t\t\tpipeline.runCounter.Inc()\n\t\t\t\t\t\t\t\t\tprocess(ctx)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t} else {\n\t\t\t\t\tservice.Go(func() error {\n\t\t\t\t\t\tfor {\n\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\tcase <-service.Dying():\n\t\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t\tcase ctx := <-in:\n\t\t\t\t\t\t\t\tselect {\n\t\t\t\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\t\t\t\tpipelineContextExpired(ctx, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\t\t\tprocess(ctx)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t}\n\n\t\t\t}\n\t\t}\n\t\tstage := stages[0]\n\t\tif len(stages) == 1 {\n\t\t\tcreateStageWorkers(stage, func(ctx context.Context) {\n\t\t\t\tif IsPing(ctx) {\n\t\t\t\t\t// reply with pong\n\t\t\t\t\tctx = withPong(ctx)\n\t\t\t\t\tout, ok := OutputChannel(ctx)\n\t\t\t\t\tif !ok {\n\t\t\t\t\t\tout = pipeline.out\n\t\t\t\t\t}\n\n\t\t\t\t\tselect {\n\t\t\t\t\tcase <-service.Dying():\n\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\tpipelineContextExpired(ctx, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\t\tcase out <- ctx:\n\t\t\t\t\t\tpipeline.lastPingSuccessTime.Set(float64(time.Now().Unix()))\n\t\t\t\t\t}\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tresult := stage.run(ctx)\n\t\t\t\tprocessedTime := time.Now()\n\t\t\t\tprocessingDuration := time.Now().Sub(WorkflowStartTime(ctx))\n\t\t\t\tworkflowTime := processingDuration.Seconds()\n\t\t\t\tpipeline.processingTime.Add(workflowTime)\n\t\t\t\tif err := Error(result); err != nil {\n\t\t\t\t\tcontextFailed(pipeline, ctx)\n\t\t\t\t\tpipeline.failedCounter.Inc()\n\t\t\t\t\tpipeline.processingFailedTime.Add(workflowTime)\n\t\t\t\t\tresult = WithError(result, stage.Command().id, err)\n\t\t\t\t\tpipeline.lastFailureTime.Set(float64(time.Now().Unix()))\n\t\t\t\t\tpipeline.consecutiveFailureCounter.Inc()\n\t\t\t\t\tpipeline.consecutiveSuccessCounter.Set(0)\n\t\t\t\t}\n\n\t\t\t\tout, ok := OutputChannel(result)\n\t\t\t\tif !ok {\n\t\t\t\t\tout = pipeline.out\n\t\t\t\t}\n\n\t\t\t\tselect {\n\t\t\t\tcase <-service.Dying():\n\t\t\t\t\treturn\n\t\t\t\tcase <-result.Done():\n\t\t\t\t\tpipelineContextExpired(result, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\tcase out <- result:\n\t\t\t\t\tdeliveryTime := time.Now().Sub(processedTime).Seconds()\n\t\t\t\t\tpipeline.channelDeliveryTime.Add(deliveryTime)\n\n\t\t\t\t\tif Error(result) == nil {\n\t\t\t\t\t\tpipeline.lastSuccessTime.Set(float64(time.Now().Unix()))\n\t\t\t\t\t\tpipeline.consecutiveSuccessCounter.Inc()\n\t\t\t\t\t\tpipeline.consecutiveFailureCounter.Set(0)\n\t\t\t\t\t\tpipeline.consecutiveExpiredCounter.Set(0)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn\n\t\t}\n\n\t\tcreateStageWorkers(stage, func(ctx context.Context) {\n\t\t\tif IsPing(ctx) {\n\t\t\t\t// send the context downstream, i.e., to the next stage\n\t\t\t\tselect {\n\t\t\t\tcase <-service.Dying():\n\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\tpipelineContextExpired(ctx, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\tcase out <- ctx:\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tresult := stage.run(ctx)\n\t\t\tprocessedTime := time.Now()\n\t\t\tif err := Error(result); err != nil {\n\t\t\t\tcontextFailed(pipeline, ctx)\n\t\t\t\tpipeline.failedCounter.Inc()\n\t\t\t\tresult = WithError(result, stage.Command().id, err)\n\t\t\t\tpipeline.lastFailureTime.Set(float64(time.Now().Unix()))\n\t\t\t\tselect {\n\t\t\t\tcase <-service.Dying():\n\t\t\t\t\treturn\n\t\t\t\tcase <-result.Done():\n\t\t\t\t\tpipelineContextExpired(result, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\tcase pipeline.out <- result:\n\t\t\t\t\tdeliveryTime := time.Now().Sub(processedTime).Seconds()\n\t\t\t\t\tpipeline.channelDeliveryTime.Add(deliveryTime)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tselect {\n\t\t\t\tcase <-service.Dying():\n\t\t\t\t\treturn\n\t\t\t\tcase <-result.Done():\n\t\t\t\t\tpipelineContextExpired(result, pipeline, stage.Command().CommandID()).Log(pipeline.Service.Logger())\n\t\t\t\tcase out <- result:\n\t\t\t\t\tdeliveryTime := time.Now().Sub(processedTime).Seconds()\n\t\t\t\t\tpipeline.channelDeliveryTime.Add(deliveryTime)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\n\t\tbuild(stages[1:], out, make(chan context.Context))\n\t}\n\n\tbuild(stages, pipeline.in, make(chan context.Context))\n\n\tgo func() {\n\t\tdefer unregisterPipeline(pipeline.ID())\n\t\tselect {\n\t\tcase <-service.Dying():\n\t\tcase <-app.Dying():\n\t\t}\n\t}()\n\n\tregisterPipeline(pipeline)\n\tapp.SERVICE_STARTED.Log(service.Logger().Info()).Msg(\"Pipeline started\")\n\n\treturn pipeline\n}", "title": "" }, { "docid": "569c29ebd645a6f422ab1fafe09b0501", "score": "0.47918433", "text": "func CICreate(pid interface{}, opts *gitlab.CreatePipelineOptions) (*gitlab.Pipeline, error) {\n\tp, _, err := lab.Pipelines.CreatePipeline(pid, opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn p, nil\n}", "title": "" }, { "docid": "a33bfac850f319a92480398ce5b0bb1b", "score": "0.47906443", "text": "func (in *Pipeline) DeepCopy() *Pipeline {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Pipeline)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "title": "" }, { "docid": "a33bfac850f319a92480398ce5b0bb1b", "score": "0.47906443", "text": "func (in *Pipeline) DeepCopy() *Pipeline {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Pipeline)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "title": "" }, { "docid": "a33bfac850f319a92480398ce5b0bb1b", "score": "0.47906443", "text": "func (in *Pipeline) DeepCopy() *Pipeline {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Pipeline)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "title": "" }, { "docid": "79ce8642a0f909697ee8987a6ab77c05", "score": "0.47734654", "text": "func (a *apiServer) StartPipeline(ctx context.Context, request *pps.StartPipelineRequest) (response *emptypb.Empty, retErr error) {\n\tif request.Pipeline == nil {\n\t\treturn nil, errors.New(\"request.Pipeline cannot be nil\")\n\t}\n\tensurePipelineProject(request.Pipeline)\n\n\tif err := a.txnEnv.WithWriteContext(ctx, func(txnCtx *txncontext.TransactionContext) error {\n\t\tpipelineInfo, err := a.InspectPipelineInTransaction(txnCtx, request.Pipeline)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// check if the caller is authorized to update this pipeline\n\t\tif err := a.authorizePipelineOpInTransaction(ctx, txnCtx, pipelineOpStartStop, pipelineInfo.Details.Input, pipelineInfo.Pipeline.Project.GetName(), pipelineInfo.Pipeline.Name); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Restore branch provenance, which may create a new output commit/job\n\t\tprovenance := append(branchProvenance(pipelineInfo.Pipeline.Project, pipelineInfo.Details.Input),\n\t\t\tclient.NewSystemRepo(pipelineInfo.Pipeline.Project.GetName(), pipelineInfo.Pipeline.Name, pfs.SpecRepoType).NewBranch(\"master\"))\n\t\tif err := a.env.PFSServer.CreateBranchInTransaction(ctx, txnCtx, &pfs.CreateBranchRequest{\n\t\t\tBranch: client.NewBranch(pipelineInfo.Pipeline.Project.GetName(), pipelineInfo.Pipeline.Name, pipelineInfo.Details.OutputBranch),\n\t\t\tProvenance: provenance,\n\t\t}); err != nil {\n\t\t\treturn errors.EnsureStack(err)\n\t\t}\n\t\t// restore same provenance to meta repo\n\t\tif pipelineInfo.Details.Spout == nil && pipelineInfo.Details.Service == nil {\n\t\t\tif err := a.env.PFSServer.CreateBranchInTransaction(ctx, txnCtx, &pfs.CreateBranchRequest{\n\t\t\t\tBranch: client.NewSystemRepo(pipelineInfo.Pipeline.Project.GetName(), pipelineInfo.Pipeline.Name, pfs.MetaRepoType).NewBranch(pipelineInfo.Details.OutputBranch),\n\t\t\t\tProvenance: provenance,\n\t\t\t}); err != nil {\n\t\t\t\treturn errors.EnsureStack(err)\n\t\t\t}\n\t\t}\n\n\t\tnewPipelineInfo := &pps.PipelineInfo{}\n\t\treturn a.updatePipeline(txnCtx, pipelineInfo.Pipeline, newPipelineInfo, func() error {\n\t\t\tnewPipelineInfo.Stopped = false\n\t\t\treturn nil\n\t\t})\n\t}); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &emptypb.Empty{}, nil\n}", "title": "" }, { "docid": "ad2dd0995a9371aa9c5a08217bebd69e", "score": "0.47664", "text": "func New(isMetricDisabled bool, driverType model.DriverType) *Module {\n\tm := &Module{\n\t\tisMetricsDisabled: true,\n\t\tclusterID: os.Getenv(\"CLUSTER_ID\"),\n\t\tnodeID: ksuid.New().String(),\n\t\tsink: api.New(\"spacecloud\", \"api.spaceuptech.com\", true).DB(\"db\"),\n\t\tdriverType: string(driverType),\n\t}\n\treturn m\n}", "title": "" }, { "docid": "330dc6b454c8860538ecebc6fe68ec36", "score": "0.47605938", "text": "func BuildPipeline(name,age string) {\n\tvar doc bytes.Buffer\n\n\tf, err := os.Create(outputDirPath + name + \".py\")\n\tcheck(err)\n\tdefer f.Close()\n\n\tm := map[string]interface{}{\"name\": name, \"age\": age}\n\tt := template.Must(template.New(\"\").Parse(scaffold.Pipeline))\n\tt.Execute(&doc, m)\n\n\tfmt.Println(doc.String())\n\twrittenValue, err := f.WriteString(doc.String())\n\tcheck(err)\n\tfmt.Printf(\"wrote %d bytes\\n\", writtenValue)\n\tf.Sync()\n}", "title": "" }, { "docid": "223af2f1231d737652326952e6a3544f", "score": "0.47545546", "text": "func CreateCIPipeline(name types.NamespacedName, stageNamespace string) *pipelinev1.Pipeline {\n\treturn &pipelinev1.Pipeline{\n\t\tTypeMeta: pipelineTypeMeta,\n\t\tObjectMeta: meta.ObjectMeta(name),\n\t\tSpec: pipelinev1.PipelineSpec{\n\n\t\t\tResources: []pipelinev1.PipelineDeclaredResource{\n\t\t\t\tcreatePipelineDeclaredResource(\"source-repo\", \"git\"),\n\t\t\t},\n\n\t\t\tTasks: []pipelinev1.PipelineTask{\n\t\t\t\tcreateCommitStatusPipelineTask(PendingCommitStatusTask, \"pending\", \"The build has started\"),\n\t\t\t\tcreateCIPipelineTask(\"apply-source\"),\n\t\t\t},\n\t\t\tParams: paramSpecs(\"REPO\", \"COMMIT_SHA\", \"GIT_REPO\"),\n\t\t\tFinally: []pipelinev1.PipelineTask{\n\t\t\t\tcreateCommitStatusPipelineTask(\"set-final-status\", \"$(tasks.apply-source.status)\", \"The build is complete\"),\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "106c6d5dca317d1fc786e2d6c24124fd", "score": "0.47418466", "text": "func StartPipeline(configPipeline *config.Pipeline, configAgents []config.Agent) (int, error) {\n\tp, err := newPipeline(configPipeline, configAgents)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tpipelines[p.ID] = p\n\n\terr = p.start()\n\n\treturn p.ID, err\n}", "title": "" }, { "docid": "a7193eafe1cdf7e804e2a0e4a15cd406", "score": "0.47304785", "text": "func (b *Builder) Pipeline() (*types.SpinnakerPipeline, error) {\n\tsp := &types.SpinnakerPipeline{\n\t\tLimitConcurrent: b.pipeline.DisableConcurrentExecutions,\n\t\tKeepWaitingPipelines: b.pipeline.KeepQueuedPipelines,\n\t\tDescription: b.pipeline.Description,\n\t\tAppConfig: map[string]interface{}{},\n\t}\n\n\tsp.Notifications = buildNotifications(b.pipeline.Notifications)\n\tsp.Triggers = make([]types.Trigger, 0)\n\n\tfor _, trigger := range b.pipeline.Triggers {\n\t\tif jt := trigger.Jenkins; jt != nil {\n\t\t\tsp.Triggers = append(sp.Triggers, &types.JenkinsTrigger{\n\t\t\t\tTriggerObject: types.TriggerObject{\n\t\t\t\t\tEnabled: newDefaultTrue(jt.Enabled),\n\t\t\t\t\tType: JenkinsTrigger,\n\t\t\t\t},\n\n\t\t\t\tJob: jt.Job,\n\t\t\t\tMaster: jt.Master,\n\t\t\t\tPropertyFile: jt.PropertyFile,\n\t\t\t})\n\t\t}\n\n\t\tif wh := trigger.Webhook; wh != nil {\n\t\t\tsp.Triggers = append(sp.Triggers, &types.WebhookTrigger{\n\t\t\t\tTriggerObject: types.TriggerObject{\n\t\t\t\t\tEnabled: wh.Enabled,\n\t\t\t\t\tType: WebhookTrigger,\n\t\t\t\t},\n\t\t\t\tSource: wh.Source,\n\t\t\t})\n\t\t}\n\t}\n\n\tsp.Parameters = make([]types.Parameter, len(b.pipeline.Parameters))\n\tfor i, param := range b.pipeline.Parameters {\n\t\tsp.Parameters[i] = types.Parameter{\n\t\t\tName: param.Name,\n\t\t\tDescription: param.Description,\n\t\t\tDefault: param.Default,\n\t\t\tRequired: param.Required,\n\t\t}\n\n\t\tif len(param.Options) > 0 {\n\t\t\tsp.Parameters[i].HasOptions = true\n\t\t\tfoundDefaultValue := param.Default == \"\"\n\t\t\tfor _, val := range param.Options {\n\t\t\t\tfoundDefaultValue = foundDefaultValue || param.Default == val.Value\n\t\t\t\tsp.Parameters[i].Options = append(sp.Parameters[i].Options, types.Option{\n\t\t\t\t\tValue: val.Value,\n\t\t\t\t})\n\t\t\t}\n\t\t\tif !foundDefaultValue {\n\t\t\t\treturn sp, errors.New(\"builder: the specified default value is not one of the options\")\n\t\t\t}\n\t\t}\n\t}\n\n\tvar stageIndex = 0\n\tfor _, stage := range b.pipeline.Stages {\n\t\tvar s types.Stage\n\t\tvar err error\n\n\t\t// if the account has an override, switch the account name\n\t\tif account, ok := b.overrideAccounts[stage.Account]; ok {\n\t\t\tstage.Account = account\n\t\t}\n\n\t\tif stage.RunJob != nil {\n\t\t\ts, err = b.buildRunJobStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to b.buildRunJobStage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\t\tif stage.Deploy != nil {\n\t\t\ts, err = b.buildDeployStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to b.buildDeployStage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\n\t\tif stage.ManualJudgement != nil {\n\t\t\ts, err = b.buildManualJudgementStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to buildManualJudgementStage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\n\t\tif stage.DeployEmbeddedManifests != nil {\n\t\t\ts, err = b.buildDeployEmbeddedManifestStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to buildDeployEmbeddedManifestStage with error: %s\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\n\t\tif stage.DeleteEmbeddedManifest != nil {\n\t\t\ts, err = b.buildDeleteEmbeddedManifestStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to buildDeleteEmbeddedManifestStage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\n\t\tif stage.ScaleManifest != nil {\n\t\t\ts, err = b.buildScaleManifestStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to buildScaleManifestStage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\n\t\tif stage.WebHook != nil {\n\t\t\ts, err = b.buildWebHookStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to webhook stage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\n\t\tif stage.Jenkins != nil {\n\t\t\ts, err = b.buildJenkinsStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to build jenkins stage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\n\t\tif stage.EvaluateVariables != nil {\n\t\t\ts, err = b.buildEvaluateVariablesStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"failed to build evaluate variables stage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex++\n\t\t}\n\n\t\tif stage.RunSpinnakerPipeline != nil {\n\t\t\ts, err = b.buildRunSpinnakerPipelineStage(stageIndex, stage)\n\t\t\tif err != nil {\n\t\t\t\treturn sp, fmt.Errorf(\"Failed to build spinnaker pipeline stage with error: %v\", err)\n\t\t\t}\n\t\t\tstageIndex = stageIndex + 1\n\t\t}\n\n\t\tsp.Stages = append(sp.Stages, s)\n\t}\n\n\treturn sp, nil\n}", "title": "" }, { "docid": "e61ac1b2b980e8527d114e45b91b164c", "score": "0.47146916", "text": "func (pm *PipelineManager) newDeployment(pipeline *api.Pipeline) *appsv1.Deployment {\n\tlbls := pipeLabels(pipeline)\n\n\tdeployment := &appsv1.Deployment{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: pipeline.Name,\n\t\t\tNamespace: pipeline.Namespace,\n\t\t\tOwnerReferences: []metav1.OwnerReference{\n\t\t\t\t*metav1.NewControllerRef(pipeline, api.SchemeGroupVersion.WithKind(api.PipelineResourceKind)),\n\t\t\t},\n\t\t\tLabels: lbls,\n\t\t},\n\t\tSpec: appsv1.DeploymentSpec{\n\t\t\tSelector: &metav1.LabelSelector{\n\t\t\t\tMatchLabels: lbls,\n\t\t\t},\n\t\t\tStrategy: appsv1.DeploymentStrategy{\n\t\t\t\tType: appsv1.RecreateDeploymentStrategyType,\n\t\t\t},\n\t\t\tMinReadySeconds: 10,\n\t\t\tTemplate: corev1.PodTemplateSpec{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tLabels: lbls,\n\t\t\t\t},\n\t\t\t\tSpec: corev1.PodSpec{\n\t\t\t\t\tRestartPolicy: corev1.RestartPolicyAlways,\n\t\t\t\t\tVolumes: []corev1.Volume{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"config\",\n\t\t\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\t\t\tConfigMap: &corev1.ConfigMapVolumeSource{\n\t\t\t\t\t\t\t\t\tLocalObjectReference: corev1.LocalObjectReference{Name: pipeline.Name},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tContainers: []corev1.Container{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"gravity\",\n\t\t\t\t\t\t\tImage: pipeline.Spec.Image,\n\t\t\t\t\t\t\tCommand: pipeline.Spec.Command,\n\t\t\t\t\t\t\tPorts: []corev1.ContainerPort{\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tName: \"http\",\n\t\t\t\t\t\t\t\t\tContainerPort: containerPort,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tLivenessProbe: &corev1.Probe{\n\t\t\t\t\t\t\t\tHandler: corev1.Handler{\n\t\t\t\t\t\t\t\t\tHTTPGet: &corev1.HTTPGetAction{\n\t\t\t\t\t\t\t\t\t\tPort: intstr.FromString(\"http\"),\n\t\t\t\t\t\t\t\t\t\tPath: \"/healthz\",\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\tInitialDelaySeconds: 10,\n\t\t\t\t\t\t\t\tTimeoutSeconds: 5,\n\t\t\t\t\t\t\t\tPeriodSeconds: 10,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tName: \"config\",\n\t\t\t\t\t\t\t\t\tMountPath: \"/etc/gravity\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tResources: corev1.ResourceRequirements{ //TODO from tps config or metrics\n\t\t\t\t\t\t\t\tRequests: corev1.ResourceList{\n\t\t\t\t\t\t\t\t\t\"cpu\": resource.MustParse(\"100m\"),\n\t\t\t\t\t\t\t\t\t\"memory\": resource.MustParse(\"150M\"),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\tif pipeline.Spec.Paused {\n\t\tdeployment.Spec.Replicas = int32Ptr(0)\n\t} else {\n\t\tdeployment.Spec.Replicas = int32Ptr(1)\n\t}\n\treturn deployment\n}", "title": "" }, { "docid": "00949d294058c091fae1da49e2cb9aca", "score": "0.469673", "text": "func NewClient(clientName, moduleVersion string, cred azcore.TokenCredential, options *ClientOptions) (*Client, error) {\n\tpkg, err := shared.ExtractPackageName(clientName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif options == nil {\n\t\toptions = &ClientOptions{}\n\t}\n\n\tif !options.Telemetry.Disabled {\n\t\tif err := shared.ValidateModVer(moduleVersion); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tep := cloud.AzurePublic.Services[cloud.ResourceManager].Endpoint\n\tif c, ok := options.Cloud.Services[cloud.ResourceManager]; ok {\n\t\tep = c.Endpoint\n\t}\n\tpl, err := armruntime.NewPipeline(pkg, moduleVersion, cred, runtime.PipelineOptions{}, options)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttr := options.TracingProvider.NewTracer(clientName, moduleVersion)\n\treturn &Client{ep: ep, pl: pl, tr: tr}, nil\n}", "title": "" }, { "docid": "daa9b9f2f3855e4c443b81b4510e7d1e", "score": "0.46946716", "text": "func New(opts Opts) (forge.Forge, error) {\n\treturn &GitLab{\n\t\turl: opts.URL,\n\t\tClientID: opts.ClientID,\n\t\tClientSecret: opts.ClientSecret,\n\t\tSkipVerify: opts.SkipVerify,\n\t}, nil\n}", "title": "" }, { "docid": "4da3c8765b2282defa42d5c6aef3957d", "score": "0.46826273", "text": "func New(options ...Option) (metric.Exporter, error) {\n\tcfg := newConfig(options...)\n\texp := &exporter{\n\t\ttemporalitySelector: cfg.temporalitySelector,\n\t\taggregationSelector: cfg.aggregationSelector,\n\t}\n\texp.encVal.Store(*cfg.encoder)\n\treturn exp, nil\n}", "title": "" }, { "docid": "0946f4a5867dd6d8b8d07025d609e5d9", "score": "0.46787772", "text": "func (svc *PipelineService) Add(org, repo string, h *library.Pipeline) (*library.Pipeline, *Response, error) {\n\t// set the API endpoint path we send the request to\n\tu := fmt.Sprintf(\"/api/v1/pipelines/%s/%s\", org, repo)\n\n\t// library Pipeline type we want to return\n\tv := new(library.Pipeline)\n\n\t// send request using client\n\tresp, err := svc.client.Call(\"POST\", u, h, v)\n\n\treturn v, resp, err\n}", "title": "" }, { "docid": "0d24477a4f4ba0e0cf34ae0a9cb05d04", "score": "0.46664223", "text": "func NewSplitTunnel(ctx *pulumi.Context,\n\tname string, args *SplitTunnelArgs, opts ...pulumi.ResourceOption) (*SplitTunnel, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.AccountId == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'AccountId'\")\n\t}\n\tif args.Mode == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'Mode'\")\n\t}\n\tif args.Tunnels == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'Tunnels'\")\n\t}\n\topts = internal.PkgResourceDefaultOpts(opts)\n\tvar resource SplitTunnel\n\terr := ctx.RegisterResource(\"cloudflare:index/splitTunnel:SplitTunnel\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "057e03ea615f6fb346cfe3bf8fb043fd", "score": "0.46647197", "text": "func New(c Components, o Options) Interface {\n\tif o.ProcessedQueueSize == 0 {\n\t\to.ProcessedQueueSize = 5000\n\t}\n\n\th := &component{\n\t\tComponents: c,\n\t\tPublicNetAddr: o.PublicNetAddr,\n\t\tPrivateNetAddr: o.PrivateNetAddr,\n\t\tPrivateNetAddrAnnounce: o.PrivateNetAddrAnnounce,\n\t\tProcessed: newPQueue(o.ProcessedQueueSize),\n\t}\n\n\t// TODO Make it configurable\n\th.Configuration.CFList = [5]uint32{867100000, 867300000, 867500000, 867700000, 867900000}\n\th.Configuration.NetID = [3]byte{14, 14, 14}\n\t//h.Configuration.RX1DROffset = 0\n\th.Configuration.Rx1DrOffset = 0\n\th.Configuration.RFChain = 0\n\th.Configuration.InvPolarity = true\n\n\tset := make(chan bundle)\n\tbundles := make(chan []bundle)\n\n\th.ChBundles = set\n\tgo h.consumeBundles(bundles)\n\tgo h.consumeSet(bundles, set)\n\n\treturn h\n}", "title": "" }, { "docid": "85ade080cba32acc6ae00da37a91671d", "score": "0.46642363", "text": "func NewRunner(c client.Client) *ClientPipelineRunner {\n\treturn &ClientPipelineRunner{client: c, objectMeta: objectMetaCreator}\n}", "title": "" }, { "docid": "543d6240cb08e22a89fe4d67904ab3d1", "score": "0.46624726", "text": "func InsertPipeline(db gorp.SqlExecutor, store cache.Store, proj *sdk.Project, p *sdk.Pipeline, u *sdk.User) error {\n\tquery := `INSERT INTO pipeline (name, description, project_id, last_modified, from_repository) VALUES ($1, $2, $3, current_timestamp, $4) RETURNING id`\n\n\trx := sdk.NamePatternRegex\n\tif !rx.MatchString(p.Name) {\n\t\treturn sdk.NewError(sdk.ErrInvalidName, fmt.Errorf(\"invalid pipeline name, should match %s\", sdk.NamePattern))\n\t}\n\n\tif p.ProjectID == 0 {\n\t\treturn sdk.WithStack(sdk.ErrInvalidProject)\n\t}\n\n\tif err := db.QueryRow(query, p.Name, p.Description, p.ProjectID, p.FromRepository).Scan(&p.ID); err != nil {\n\t\treturn sdk.WithStack(err)\n\t}\n\n\tfor i := range p.Parameter {\n\t\tif err := InsertParameterInPipeline(db, p.ID, &p.Parameter[i]); err != nil {\n\t\t\treturn sdk.WithStack(err)\n\t\t}\n\t}\n\n\tevent.PublishPipelineAdd(proj.Key, *p, u)\n\n\treturn nil\n}", "title": "" } ]
48774f39e9661bd74fabb4a7ab8a0845
LoadMappingFile loads mapper records and returns it
[ { "docid": "9c585304d29f7ca9a7111a97eb95fa95", "score": "0.74954146", "text": "func LoadMappingFile(path string) (*mapper.Record, error) {\n\tfile := new(mapper.Record)\n\tstat, err := os.Stat(path)\n\tif os.IsNotExist(err) {\n\t\treturn nil, errors.New(\"Mapping file not found, please load it through -mapping option or put in /etc/whoisd/conf.d/mapping.json\")\n\t}\n\tmFile, err := os.Open(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer mFile.Close()\n\tdata := make([]byte, stat.Size())\n\tif _, err := mFile.Read(data); err != nil {\n\t\treturn nil, err\n\t}\n\tif err := json.Unmarshal(data, &file); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn file, nil\n}", "title": "" } ]
[ { "docid": "f47e7a4dee57570f4202054a2e144c3f", "score": "0.6759572", "text": "func (l *defaultExternalDataLoader) LoadFileMap(namespace, dsSpec string) (map[string][]byte, error) {\n\tif namespace == \"\" {\n\t\treturn nil, nil\n\t}\n\tparts := strings.Split(dsSpec, \"/\")\n\tif len(parts) != 2 {\n\t\treturn nil, fmt.Errorf(\"invalid %q annotation format. Expected kind/name, but insted got %q\", types.FilesFromDSKeyName, dsSpec)\n\t}\n\n\tdata, err := l.readK8sKeySource(parts[0], parts[1], namespace, \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn parseDataAsFileMap(data)\n}", "title": "" }, { "docid": "6ff564e2bcdac1d18a7fc74985076b81", "score": "0.65464085", "text": "func LoadMapping(path string) (Mapping, error) {\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tb, err := ioutil.ReadAll(f)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tmapping := make(Mapping)\n\tif err := yaml.Unmarshal(b, &mapping); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn mapping, nil\n}", "title": "" }, { "docid": "1d7cc8ea824931f76862be04d3d19fdd", "score": "0.6108084", "text": "func loadMap(mapLocation string) map[string]string {\n\tlocationMap := make(map[string]string)\n\n\tfile, err := os.Open(mapLocation)\n\tif err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"Error: %s\", err.Error())\n\t\treturn locationMap\n\t}\n\tdefer file.Close()\n\n\tb, err := ioutil.ReadAll(file)\n\tmapping := string(b)\n\n\tlines := strings.Split(mapping, \"\\n\")\n\tlines = lines[0:len(lines)-1]\n\t// for all values loop and add to the map\n\tfor _, value := range lines {\n\t\tlocations := strings.SplitN(value, \"\\x20\", 2)\n\t\tlocationMap[locations[0]] = locations[1]\n\t}\n\n\treturn locationMap\n}", "title": "" }, { "docid": "9141f88ed2a5772cd12a7e1bfbb9ab0f", "score": "0.60190946", "text": "func loadIndexMapToFile(filePath string) (m map[uint32]map[uint16]int) {\n\tfp, err := os.Open(filePath);\n\tif err != nil {\n\t\tpanic(\"cant open file\");\n\t}\n\tdefer fp.Close();\n\t\n\tenc := gob.NewDecoder(fp);\n\tif err := enc.Decode(&m); err != nil {\n\t\tpanic(\"cant decode\");\n\t}\n\treturn m;\n}", "title": "" }, { "docid": "f5dd39156a11aaa47b553937b16561a2", "score": "0.58236295", "text": "func getMapping(resource resourceType) (mapping, error) {\n\tvar m mapping\n\n\tfPath := path.Join(mappingsDir, string(resource)+\".json\")\n\tb, err := ioutil.ReadFile(fPath)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Unable to read file %v: %v. Have you run client.FetchAllMappingFiles() yet?\", fPath, err)\n\t}\n\terr = json.Unmarshal(b, &m)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Unable to read JSON at %v: %v\", fPath, err)\n\t}\n\treturn m, nil\n}", "title": "" }, { "docid": "80b117e6a4f895c164606b512695712d", "score": "0.5802845", "text": "func loadMap(w http.ResponseWriter, r *http.Request) {\n\tfileName := r.URL.Path[len(\"/loadMap/\"):]\n\tmaps.RobotMap.LoadMap(fileName)\n}", "title": "" }, { "docid": "a5ad02712f828792415bed2339233b54", "score": "0.5795596", "text": "func (a *app) LoadProjectionMappings() ([]types.ProjectionMapping, error) {\n\tprojectionMappings := []types.ProjectionMapping{}\n\terrs := []error{}\n\tfilepath.Walk(a.ProjectionMappingsRootPath(), func(path string, info os.FileInfo, err error) error {\n\t\t// for each path, test that is is a yaml file, and load it\n\t\tif info == nil || info.IsDir() {\n\t\t\treturn nil\n\t\t}\n\t\t// test for *.yaml suffix\n\t\tif !strings.HasSuffix(info.Name(), \".yaml\") {\n\t\t\t// skip this file\n\t\t\treturn nil\n\t\t}\n\n\t\traw, err := ioutil.ReadFile(path)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error reading projection mapping %s: %s\\n\", path, err.Error())\n\t\t\terrs = append(errs, err)\n\t\t\treturn err\n\t\t}\n\t\tm, err := v1.LoadFromYamlBytes(raw, a.Config)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error loading projection mapping %s: %s\\n\", path, err.Error())\n\t\t\terrs = append(errs, err)\n\t\t\treturn err\n\t\t}\n\t\tif a.Debug() {\n\t\t\tlog.Printf(\"Loaded projection mapping: %s\\n\", m)\n\t\t}\n\t\tprojectionMappings = append(projectionMappings, m)\n\t\treturn nil\n\t})\n\n\tif len(errs) > 0 {\n\t\treturn projectionMappings, fmt.Errorf(\"unable to load %d projection mappings\", len(errs))\n\t}\n\n\treturn projectionMappings, nil\n}", "title": "" }, { "docid": "51aae005c0280fdc696f21802ad78cc8", "score": "0.579253", "text": "func (r *Runfiles) loadRepoMapping() error {\n\trepoMappingPath, err := r.impl.path(repoMappingRlocation)\n\t// If Bzlmod is disabled, the repository mapping manifest isn't created, so\n\t// it is not an error if it is missing.\n\tif err != nil {\n\t\treturn nil\n\t}\n\tr.repoMapping, err = parseRepoMapping(repoMappingPath)\n\t// If the repository mapping manifest exists, it must be valid.\n\treturn err\n}", "title": "" }, { "docid": "c58dda8e00521e0e3c72f0e05fa9a2c8", "score": "0.578276", "text": "func (config *Record) Load() (*mapper.Record, error) {\n\tvar path string\n\tvar err error\n\tmapp := new(mapper.Record)\n\n\tif err = config.LoadConfigFile(config.ConfigPath); err != nil {\n\t\treturn nil, err\n\t}\n\tif mapp, err = LoadMappingFile(config.MappingPath); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// overwrite config from file by cmd flags\n\tflags := flag.NewFlagSet(\"whoisd\", flag.ContinueOnError)\n\t// Begin ignored flags\n\tflags.StringVar(&path, \"config\", \"\", \"\")\n\tflags.StringVar(&path, \"mapping\", \"\", \"\")\n\t// End ignored flags\n\tflags.BoolVar(&config.TestMode, \"t\", config.TestMode, \"\")\n\tflags.BoolVar(&config.TestMode, \"test\", config.TestMode, \"\")\n\tflags.StringVar(&config.Host, \"host\", config.Host, \"\")\n\tflags.IntVar(&config.Port, \"port\", config.Port, \"\")\n\tflags.IntVar(&config.Workers, \"work\", config.Workers, \"\")\n\tflags.IntVar(&config.Connections, \"conn\", config.Connections, \"\")\n\tflags.StringVar(&config.Storage.StorageType, \"storage\", config.Storage.StorageType, \"\")\n\tflags.StringVar(&config.Storage.Host, \"shost\", config.Storage.Host, \"\")\n\tflags.IntVar(&config.Storage.Port, \"sport\", config.Storage.Port, \"\")\n\tflags.StringVar(&config.Storage.IndexBase, \"base\", config.Storage.IndexBase, \"\")\n\tflags.StringVar(&config.Storage.TypeTable, \"table\", config.Storage.TypeTable, \"\")\n\tflags.Parse(os.Args[1:])\n\n\treturn mapp, nil\n}", "title": "" }, { "docid": "70cf20e70ea2f637f8456ac45a8ca907", "score": "0.57820797", "text": "func ReadMappingsProc(path string) ([]idtools.IDMap, error) {\n\treturn nil, nil\n}", "title": "" }, { "docid": "045c41df88c6b36f737211eb3813fb4f", "score": "0.576495", "text": "func LoadSecretMappingFile(fileName string) (*v1alpha1.SecretMapping, error) {\n\tconfig := &v1alpha1.SecretMapping{}\n\n\tdata, err := ioutil.ReadFile(fileName)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to load file %s due to %s\", fileName, err)\n\t}\n\n\terr = yaml.Unmarshal(data, config)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to unmarshal YAML file %s due to %s\", fileName, err)\n\t}\n\n\treturn config, nil\n}", "title": "" }, { "docid": "ab6ba45bcd6d8ebc293ba15c807f1adc", "score": "0.57018185", "text": "func readMapFile(filename string) (mappedHeaders []string, errorMessage string) {\n mappedHeaders = make([]string, DefaultHeaderCount)\n \n file, err := os.Open(filename)\n if err != nil {\n errorMessage = \"Cannot open mapped header file.\"\n return\n }\n defer file.Close()\n \n reader := bufio.NewReaderSize(file, DefaultBufferSize)\n \n index := 0\n line, isPrefix, err := reader.ReadLine()\n for err == nil && !isPrefix {\n if (index >= DefaultBufferSize) {\n errorMessage = \"Maxim amount of headers reached.\"\n return\n }\n mappedHeaders[index] = addFilter(string(line))\n index++\n line, isPrefix, err = reader.ReadLine()\n }\n return mappedHeaders[:index], \"\"\n}", "title": "" }, { "docid": "fbb75519952f4ee3da94dec007f5f53d", "score": "0.56952405", "text": "func ReadDataFromFile(path string) (map[MergedTableMapKey][]IfMergedTable, error) {\n\n\tvar rerr, err error\n\n\t// declare output variable\n\tdata := make(map[MergedTableMapKey][]IfMergedTable)\n\n\t// open local db\n\tf, err := os.Open(filepath.Clean(path))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// close db on exit\n\tdefer func(f *os.File) {\n\t\terr = f.Close()\n\t\tif err != nil {\n\t\t\trerr = err\n\t\t}\n\t}(f)\n\n\t// prepare decoder for data read from local db\n\tdec := gob.NewDecoder(f)\n\n\t// decode local db data to internal represintation\n\terr = dec.Decode(&data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn data, rerr\n}", "title": "" }, { "docid": "3d51dea348617176b8cdf6a82e677d7f", "score": "0.5677506", "text": "func LoadLightMapFromMessagePackFile(fileName string) (*LightMap, error) {\n\thandle := new(codec.MsgpackHandle)\n\treturn deserializeLightMapFromFile(fileName, handle)\n}", "title": "" }, { "docid": "59b5b06b0b4dd2ef56683603598124de", "score": "0.56255186", "text": "func (dm *Duramap) Load() error {\n\tdm.mut.RLock()\n\tif dm.m != nil {\n\t\tdm.mut.RUnlock()\n\t\treturn nil\n\t}\n\tdm.mut.RUnlock()\n\n\treturn dm.db.Update(func(tx *bolt.Tx) error {\n\t\tm := GenericMap{}\n\t\tb, err := tx.CreateBucketIfNotExists(bucketName)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tbz := b.Get([]byte(dm.Name))\n\t\tif bz == nil {\n\t\t\tbz, err = mp.Marshal(m)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer dm.mut.Unlock()\n\t\t\tdm.mut.Lock()\n\t\t\terr = b.Put([]byte(dm.Name), bz)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdm.unsafeSet(m, bz)\n\t\t}\n\n\t\tmperr := mp.Unmarshal(bz, &m)\n\t\tif mperr != nil {\n\t\t\treturn mperr\n\t\t}\n\n\t\tdefer dm.mut.Unlock()\n\t\tdm.mut.Lock()\n\t\tdm.unsafeSet(m, bz)\n\n\t\treturn nil\n\t})\n}", "title": "" }, { "docid": "7273c971b2d594f8cb55554ce0c815be", "score": "0.56212133", "text": "func MmapFile(fn string) ([]byte, error) {\n\tf, err := os.Open(fn)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tp, err := Mmap(f)\n\tf.Close()\n\treturn p, err\n}", "title": "" }, { "docid": "977c9460684ff58dee62efa7b6eaa766", "score": "0.5617973", "text": "func mapOverFile(\n\tfile string,\n\tmapFN func(string, string) []KeyValue,\n) (currentKeyValue []KeyValue) {\n\topenedFile := openFile(file)\n\tdefer openedFile.Close()\n\n\tcurrentKeyValue = mapFN(file, readFileAsString(openedFile))\n\treturn\n}", "title": "" }, { "docid": "9a2821bd8ab9a059465bfc2e99a3d9be", "score": "0.5612052", "text": "func (this *SceneManager) LoadTileMap(file string) {\n\t\n}", "title": "" }, { "docid": "f08ecaca35caaf0bc415c51c149125d1", "score": "0.56018215", "text": "func (a *RWAccess) LoadMap() error {\n\tcmap := make(map[string]bool)\n\tatype := reflect.TypeOf(*a)\n\taval := reflect.ValueOf(*a)\n\n\tfor i := 0; i < aval.NumField(); i++ {\n\t\tt := atype.Field(i)\n\t\tv := aval.Field(i)\n\t\tif v.Kind() == reflect.Bool {\n\t\t\tcmap[t.Tag.Get(\"json\")] = v.Bool()\n\t\t}\n\t}\n\ta.cmap = cmap\n\treturn nil\n}", "title": "" }, { "docid": "ba23f0a4e30b2b7a56b32d65b4596b09", "score": "0.5590177", "text": "func loadRecords() {\n\tfileNames, err := ioutil.ReadDir(dataDirectory + recordsFolder)\n\n\t// defer our error function\n\t// if the folder doesn't exist, make it and try again\n\tdefer func(err error) {\n\t\tif err != nil {\n\t\t\tif os.IsNotExist(err) {\n\t\t\t\tcreateRecordsFolder()\n\t\t\t\tloadRecords()\n\t\t\t} else {\n\t\t\t\tpanic(\"Error reading records: \" + err.Error())\n\t\t\t}\n\t\t}\n\t}(err)\n\n\tif err == nil {\n\t\t// initialize recordNames to len of fileNames\n\t\t// this might be more than required if there are stray directories in the folder\n\t\trecordMap = make(map[string]string)\n\n\t\tfor _, fileInfo := range fileNames {\n\t\t\tif !fileInfo.IsDir() {\n\t\t\t\tencryptedFileName := fileInfo.Name()\n\t\t\t\tdecryptedFileName := CryptoHelper.DecryptString(encryptedFileName, decryptionKey, initializationVector)\n\n\t\t\t\trecordMap[decryptedFileName] = encryptedFileName\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "76d17312f518c29657af34873d2caf0b", "score": "0.5587922", "text": "func loadGroupMap(r io.Reader) (*groupMap, error) {\n\tbs, err := ioutil.ReadAll(r)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"readall: %w\", err)\n\t}\n\tklog.Infof(\"%d bytes read from config\", len(bs))\n\n\tgm := &groupMap{}\n\terr = yaml.Unmarshal(bs, &gm)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: %w\", err)\n\t}\n\n\tklog.Infof(\"loaded: %+v\", gm)\n\tif len(gm.Groups) == 0 {\n\t\treturn nil, fmt.Errorf(\"no entries found after unmarshal\")\n\t}\n\treturn gm, nil\n}", "title": "" }, { "docid": "503360bd43043c844e1f8e9bcc6c13ba", "score": "0.55867016", "text": "func mapFile(file string, strainMap map[string]int) (map[pos_key]float64, error) {\n\n\tf, err := os.Open(file)\n\tdefer f.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tr := csv.NewReader(f)\n\t//skip the header\n\tif _, err := r.Read(); err != nil {\n\t\tpanic(err)\n\t}\n\t//initialize a map for this file\n\tfileMap := make(map[pos_key]float64)\n\tfor {\n\t\trecord, err := r.Read()\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tif record[5] == \"all\" {\n\t\t\tcontinue\n\t\t} else {\n\t\t\tstrain1, strain2, dist := getRecord(record)\n\t\t\tpos_i, _ := strainMap[strain1]\n\t\t\tpos_j, _ := strainMap[strain2]\n\t\t\tfileMap[pos_key{pos_i, pos_j}] = dist\n\t\t\tfileMap[pos_key{pos_j, pos_i}] = dist\n\t\t}\n\n\t}\n\treturn fileMap, err\n}", "title": "" }, { "docid": "ea32d2e304789634672992b6e2616e1c", "score": "0.55750924", "text": "func GenMappingFile() error {\n\tDownloadDevicesRepository()\n\treturn nil\n}", "title": "" }, { "docid": "f9e84090236aa1b262b6edc0c8c9fefe", "score": "0.5546707", "text": "func MapMaker(filePath string) {\n\tfile, err := ioutil.ReadFile(filePath)\n\tif err != nil {\n\n\t}\n\tfileString := string(file)\n\tsplitString := []string(strings.Split(fileString, \"\"))\n\n\t//feeds letters to map\n\tfor i:= 0; i < len(splitString); i++ {\n\t\taddToMap(splitString[i])\n\t}\n}", "title": "" }, { "docid": "af5eaa34ffe7488e52e0139045a70b6e", "score": "0.55099463", "text": "func loadUserMap(r io.Reader) (*userMap, error) {\n\tbs, err := ioutil.ReadAll(r)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"readall: %w\", err)\n\t}\n\tklog.Infof(\"%d bytes read from config\", len(bs))\n\n\tum := &userMap{}\n\terr = yaml.Unmarshal(bs, &um)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: %w\", err)\n\t}\n\n\tif len(um.Users) == 0 {\n\t\treturn nil, fmt.Errorf(\"no entries found after unmarshal\")\n\t}\n\tklog.Infof(\"loaded: %+v\", um)\n\treturn um, nil\n}", "title": "" }, { "docid": "59d590f98b66fdba91756bc417c874f8", "score": "0.5478364", "text": "func (g *Generator) fileToMap(fName string) map[string]string {\n\tm := make(map[string]string)\n\tpath := os.Getenv(\"GOPATH\") + \"/src/github.com/malisit/kolpa/data/\" + g.Locale + \"/\" + fName\n\tfile, err := os.Open(path)\n\n\tif err != nil {\n\t\treturn m\n\t}\n\tdefer file.Close()\n\n\tscanner := bufio.NewScanner(file)\n\tfor scanner.Scan() {\n\t\tline := strings.Split(scanner.Text(), \"\\t\")\n\t\tm[line[0]] = line[1]\n\t}\n\n\tif err := scanner.Err(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\treturn m\n}", "title": "" }, { "docid": "eab726e714f06d832836a71d6c3c6298", "score": "0.5422012", "text": "func (t TokenMap) LoadFile(filename string) error {\n\tdata, err := os.ReadFile(filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn t.LoadBytes(data)\n}", "title": "" }, { "docid": "18c89aef250398168470efac9b4d5743", "score": "0.54082114", "text": "func ReadFlowMap(file string) *FlowMap {\n\tlog.Println(\"INFO: Read flow mappings from\", file)\n\tf, err := os.Open(file)\n\tif err != nil {\n\t\tlog.Fatalln(\"ERROR: Failed to read mapping file\", file, err)\n\t}\n\tdefer f.Close()\n\trows, err := csv.NewReader(f).ReadAll()\n\tif err != nil {\n\t\tlog.Fatalln(\"ERROR: Failed to read mapping file\", file, err)\n\t}\n\tfm := FlowMap{\n\t\tmappings: make(map[string]*FlowMapEntry),\n\t\tunmappings: make(map[string]*FlowMapEntry),\n\t\tused: make(map[string]bool),\n\t\tuntouchedUsed: make(map[string]bool)}\n\tfor i, row := range rows {\n\t\tif i == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tif len(row) < 3 {\n\t\t\tlog.Println(\"WARNING: invalid flow mapping in row\", i)\n\t\t\tcontinue\n\t\t}\n\t\te := FlowMapEntry{\n\t\t\tOldID: strings.TrimSpace(row[0]),\n\t\t\tLocation: strings.TrimSpace(row[1]),\n\t\t\tNewID: strings.TrimSpace(row[2])}\n\t\tkey := MapKey(e.Location, e.OldID)\n\t\tfm.mappings[key] = &e\n\t\tfm.unmappings[e.NewID] = &e\n\t}\n\tlog.Println(\" ... read\", len(fm.mappings), \"mappings\")\n\treturn &fm\n}", "title": "" }, { "docid": "9d0893a2e4929c9722792cc787fa02dc", "score": "0.53907484", "text": "func loadMap(reffile <-chan string, basefile string, refmap map[string]string) error {\n\n\tfileinfo := fileMap[basefile]\n\n\tvar keymap = make(map[string]bool)\n\tvar outputmap = make(map[string]bool)\n\tvar initfldsmap = make(map[string]bool)\n\n\toutputfields := fileinfo.outputfields\n\tkeyfields := fileinfo.keys\n\t// add file for initialize fields - and perhaps some fields type process\n\tinitvalflds := \"TO_END_USR_ZIP\"\n\tfields := fileinfo.fields\n\tf := strings.Split(fields, \"|\")\n\t// initialize the maps\n\tfor i := 0; i < len(f); i++ {\n\t\tkeymap[strings.TrimSpace(f[i])] = false\n\t\toutputmap[strings.TrimSpace(f[i])] = false\n\t\tinitfldsmap[strings.TrimSpace(f[i])] = false\n\t}\n\tkeys := strings.Split(keyfields, \"|\")\n\tfor i := 0; i < len(keys); i++ {\n\t\tkeymap[strings.TrimSpace(keys[i])] = true\n\t}\n\toutputs := strings.Split(outputfields, \"|\")\n\tfor i := 0; i < len(outputs); i++ {\n\t\toutputmap[strings.TrimSpace(outputs[i])] = true\n\t}\n\tinits := strings.Split(initvalflds, \"|\")\n\tfor i := 0; i < len(inits); i++ {\n\t\tinitfldsmap[strings.TrimSpace(inits[i])] = true\n\t}\n\n\ttempline := \"\"\n\tblankline := \"\"\n\tinitval := \"\"\n\tfor i := 0; i < len(f); i++ {\n\t\t//adding pipe allows no possibility of substrings from other columns matching\n\t\tif outputmap[strings.TrimSpace(f[i])] {\n\t\t\tinitval = \"\"\n\t\t\tif initfldsmap[strings.TrimSpace(f[i])] {\n\t\t\t\tif strings.TrimSpace(f[i]) == \"TO_END_USR_ZIP\" {\n\t\t\t\t\tinitval = \"00000\"\n\t\t\t\t} else {\n\t\t\t\t\tinitval = \"\"\n\t\t\t\t}\n\t\t\t}\n\t\t\tif templine == \"\" {\n\t\t\t\ttempline = strings.TrimSpace(f[i])\n\t\t\t\tblankline = initval\n\t\t\t} else {\n\t\t\t\ttempline = templine + \"|\" + strings.TrimSpace(f[i])\n\t\t\t\tblankline = blankline + \"|\" + initval\n\t\t\t}\n\t\t}\n\t}\n\trefmap[\"HEADER|\"] = templine\n\trefmap[\"BLANK|\"] = blankline\n\tdone_ub := false\n\tcounter := 0\n\tline := \"\"\n\ttempline = \"\"\n\tfor done_ub == false {\n\t\tline = <-reffile\n\t\tif line == \"EOF\" {\n\t\t\tdone_ub = true\n\t\t} else {\n\t\t\tw := strings.Split(line, \"|\")\n\t\t\tif len(w) >= 2 {\n\t\t\t\tcounter++\n\t\t\t\ttempline := \"~start#~\" //value to indicate start of record to prevent leading | - may be better to just strip the first pipe at the end.\n\t\t\t\tkey := \"\"\n\n\t\t\t\tfor i := 0; i < len(w); i++ {\n\t\t\t\t\t//adding pipe allows no possibility of substrings from other columns matching\n\t\t\t\t\tif i < len(f) {\n\t\t\t\t\t\tif keymap[strings.TrimSpace(f[i])] {\n\t\t\t\t\t\t\tif key == \"\" {\n\t\t\t\t\t\t\t\tkey = strings.TrimSpace(w[i])\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tkey = key + strings.TrimSpace(w[i])\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif outputmap[strings.TrimSpace(f[i])] {\n\t\t\t\t\t\t\tif templine == \"~start#~\" {\n\t\t\t\t\t\t\t\ttempline = strings.TrimSpace(w[i])\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\ttempline = templine + \"|\" + strings.TrimSpace(w[i])\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (basefile == \"TRANCODE\") || (basefile == \"xPRODTYPE\") {\n\t\t\t\t\tif _, ok := refmap[key]; ok {\n\t\t\t\t\t\trefmap[key] = templine\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\trefmap[key] = templine\n\t\t\t\t}\n\t\t\t}\n\t\t} //after reads\n\t}\n\tlog.Println(basefile + \": \" + strconv.Itoa(counter))\n\treturn nil\n}", "title": "" }, { "docid": "c41c90e187859e2271d0b9c2a105701b", "score": "0.5387548", "text": "func MapperToFile(path string, m *Mapper) error {\n\tb, err := json.MarshalIndent(m, \"\", \"\\t\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tf, err := preppiFS.Create(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif _, err := f.Write(b); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "42481ec55007a07b525ccfc59a9f2c52", "score": "0.5378995", "text": "func (l *localStore) load() (map[string][]string, error) {\n\tvar svrsMap = make(map[string][]string, 0)\n\n\tf, err := os.Open(l.persistentFile)\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn svrsMap, nil\n\t\t}\n\t\treturn svrsMap, fmt.Errorf(\"failed to open file, path: %s, err: %v\", l.persistentFile, err)\n\t}\n\tdefer f.Close()\n\n\tns := bufio.NewScanner(f)\n\tfor ns.Scan() {\n\t\tline := ns.Text()\n\t\tss := strings.FieldsFunc(line, split)\n\n\t\t// TODO: use regex\n\t\tif len(ss) == 2 {\n\t\t\tif urls, ok := svrsMap[ss[0]]; ok {\n\t\t\t\turls = append(urls, ss[1])\n\t\t\t\tsvrsMap[ss[0]] = urls\n\t\t\t} else {\n\t\t\t\tsvrsMap[ss[0]] = []string{ss[1]}\n\t\t\t}\n\t\t}\n\t}\n\n\tif err := ns.Err(); err != nil {\n\t\treturn svrsMap, fmt.Errorf(\"read file failed, file: %s, err: %v\", l.persistentFile, err)\n\t}\n\n\treturn svrsMap, nil\n}", "title": "" }, { "docid": "bbb0d36093a7e6ca9938baadc0971774", "score": "0.537141", "text": "func YamlFileToMap(configfile string) (*map[interface{}]interface{}, error) {\n yamlmap := make(map[interface{}]interface{})\n yamlFile, err := ioutil.ReadFile(configfile)\n if err != nil {\n return nil, err\n }\n err = yaml.Unmarshal(yamlFile, yamlmap)\n if err != nil {\n return nil, err\n }\n return &yamlmap, nil\n}", "title": "" }, { "docid": "ce54f89ac00a2406ebe141d8592dcf11", "score": "0.5344842", "text": "func Load() (map[string]string, error) {\n\tfileLocation := getFileLocation()\n\tvar c = map[string]string{}\n\n\tfile, err := ioutil.ReadFile(fileLocation)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = json.Unmarshal(file, &c)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c, nil\n}", "title": "" }, { "docid": "8da9d4c600d6cee240e010acd558f0c0", "score": "0.5326176", "text": "func loadSongHashesMap(songHashesTxt string) error {\n\tlog.Printf(\"Loading songs from %q\", songHashesTxt)\n\thashIdx := 0\n\tpathIdx := 1\n\tartistIdx := 2\n\talbumIdx := 3\n\ttitleIdx := 4\n\n\tf, err := os.Open(songHashesTxt)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\tr := csv.NewReader(f)\n\tr.Comment = '#'\n\tr.LazyQuotes = true\n\n\trecords, err := r.ReadAll()\n\tif err != nil {\n\t\tlog.Printf(\"ERROR: %v\", err)\n\t\tpanic(err)\n\t}\n\n\tfor _, record := range records {\n\t\tpath := record[pathIdx]\n\t\tsong := songPathMap[path]\n\n\t\tif SongErrorPaths[path] {\n\t\t\tlog.Printf(\"Skipping error path: %q\", path)\n\t\t} else {\n\n\t\t\tif song == nil {\n\t\t\t\tsong = &Song{\n\t\t\t\t\tPath: path,\n\t\t\t\t\tHash: record[hashIdx],\n\t\t\t\t\tSongMeta: SongMeta{\n\t\t\t\t\t\tArtist: record[artistIdx],\n\t\t\t\t\t\tAlbum: record[albumIdx],\n\t\t\t\t\t\tTitle: record[titleIdx],\n\t\t\t\t\t},\n\t\t\t\t}\n\t\t\t\tSongs = append(Songs, song)\n\t\t\t\tsongPathMap[song.Path] = song\n\t\t\t}\n\n\t\t\tsong.Hash = record[hashIdx]\n\t\t\tsongHashMap[song.Hash] = append(songHashMap[song.Hash], song)\n\t\t}\n\t}\n\tlog.Printf(\"Loaded %d songs (paths: %d)\", len(Songs), len(songPathMap))\n\treturn nil\n}", "title": "" }, { "docid": "4f8b2d6a6e55e9b6406aa6e9952b141c", "score": "0.5315325", "text": "func LabelLoadsMapping(c *cli.Context) (map[string]string, map[string]string) {\n\tdebugCmdFuncInfo(c)\n\n\tlabelsSvc, formatter := WireUpLabel(c)\n\tlabels, err := labelsSvc.GetLabelList()\n\tif err != nil {\n\t\tformatter.PrintFatal(\"Couldn't receive labels data\", err)\n\t}\n\n\tlabelIDsByName := make(map[string]string)\n\tlabelNamesByID := make(map[string]string)\n\n\tfor _, label := range labels {\n\t\tlabelIDsByName[label.Name] = label.ID\n\t\tlabelNamesByID[label.ID] = label.Name\n\t}\n\treturn labelIDsByName, labelNamesByID\n}", "title": "" }, { "docid": "52fa3cf022a64326c7fd09d74f5caf8c", "score": "0.52959514", "text": "func MapperFromConfig(config string) (*Mapper, error) {\n\tdata, err := afero.ReadFile(preppiFS, config)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed reading config %q: %v\", config, err)\n\t}\n\tm := &Mapper{}\n\tif err := json.Unmarshal(data, m); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed reading config %q: %v\", config, err)\n\t}\n\treturn m, nil\n}", "title": "" }, { "docid": "14657de647bd2dd66007d335ed3cb806", "score": "0.52863544", "text": "func LoadMapMatchedFromBinary(c *gin.Context, b interface{}) *util.TaskResult {\n\t// Cannot load map matched trajectories before roadnet is ready.\n\tif !mod.ExpCtx.IsRoadnetReady() {\n\t\treturn &util.TaskResult{\n\t\t\tCode: 500,\n\t\t\tMessage: \"Please load roadnet first.\",\n\t\t}\n\t}\n\n\t// Send load map matched trajectory request to core.\n\tmod.ExpCtx.LockCtxLock()\n\tdefer mod.ExpCtx.UnlockCtxLock()\n\tutil.Core.SendRequest(struct {\n\t\tCmd string\n\t\tFolder string\n\t}{\n\t\tCmd: \"LoadMapMatchedTrajectoriesFromBinary\",\n\t\tFolder: \"Experiment_\" + strconv.Itoa(mod.ExpCtx.ID) + \"/\" + c.Param(\"folder\"),\n\t})\n\n\tret, err := util.Core.GetResponse()\n\tif err != nil {\n\t\treturn &util.TaskResult{\n\t\t\tCode: 500,\n\t\t\tMessage: \"Failed to load map matched trajectories: \" + err.Error(),\n\t\t}\n\t}\n\tif !ret.Success {\n\t\treturn &util.TaskResult{\n\t\t\tCode: 500,\n\t\t\tMessage: ret.Message,\n\t\t}\n\t}\n\n\treturn &util.TaskResult{\n\t\tCode: 200,\n\t\tMessage: ret.Message,\n\t\tData: mod.ExpCtx,\n\t}\n}", "title": "" }, { "docid": "2abe07789cab1ece63730a83a8369ac0", "score": "0.52852106", "text": "func (f FakeLoader) Load(location string) ([]byte, error) {\n\treturn f.delegate.Load(location)\n}", "title": "" }, { "docid": "d57c5d0a43d9845942747d74b39bee86", "score": "0.5284598", "text": "func ReadMappings(prefix, container string) string {\n\tpath := prefix + \"/\" + container + \"/config/log-mapping.json\"\n\tif _, err := os.Stat(path); err == nil {\n\t\tmapping, err := ioutil.ReadFile(path)\n\t\tcommon.LogError(err)\n\t\treturn string(mapping)\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "7d434965fef44c5a19fcbc9947ac870b", "score": "0.5282902", "text": "func LoadSecretMapping(dir string, failIfMissing bool) (*v1alpha1.SecretMapping, string, error) {\n\tabsolute, err := filepath.Abs(dir)\n\tif err != nil {\n\t\treturn nil, \"\", errors.Wrap(err, \"creating absolute path\")\n\t}\n\trelPath := filepath.Join(\".jx\", \"gitops\", \"secret-mappings.yaml\")\n\n\tfor absolute != \"\" && absolute != \".\" && absolute != \"/\" {\n\t\tfileName := filepath.Join(absolute, relPath)\n\t\tabsolute = filepath.Dir(absolute)\n\n\t\texists, err := util.FileExists(fileName)\n\t\tif err != nil {\n\t\t\treturn nil, \"\", err\n\t\t}\n\n\t\tif !exists {\n\t\t\tcontinue\n\t\t}\n\n\t\tconfig, err := LoadSecretMappingFile(fileName)\n\t\treturn config, fileName, err\n\t}\n\tif failIfMissing {\n\t\treturn nil, \"\", errors.Errorf(\"%s file not found\", relPath)\n\t}\n\treturn nil, \"\", nil\n}", "title": "" }, { "docid": "0eaf184c4bbe15b7a10c64533d7a9171", "score": "0.526792", "text": "func LoadPinnedMap(fileName string, opts *LoadPinOptions) (*Map, error) {\n\tfd, err := sys.ObjGet(&sys.ObjGetAttr{\n\t\tPathname: sys.NewStringPointer(fileName),\n\t\tFileFlags: opts.Marshal(),\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tm, err := newMapFromFD(fd)\n\tif err == nil {\n\t\tm.pinnedPath = fileName\n\t}\n\n\treturn m, err\n}", "title": "" }, { "docid": "aa4da572394294626934bed417a7c6d0", "score": "0.52556366", "text": "func (ir IndexReader) Load(path string) map[string]int {\n\tf, err := os.Open(path)\n\tif err == nil {\n\t\tio.Copy(&ir, f)\n\t\tf.Close()\n\t\treturn ir.data\n\t}\n\treturn map[string]int{}\n}", "title": "" }, { "docid": "9dd33d12ee77c2c90437d18c4af07f0f", "score": "0.52408427", "text": "func LoadLightMapFromJSONFile(fileName string) (*LightMap, error) {\n\thandle := new(codec.JsonHandle)\n\treturn deserializeLightMapFromFile(fileName, handle)\n}", "title": "" }, { "docid": "24bcf9aeb11194e9cbf7a89ef91d9cb2", "score": "0.5240347", "text": "func LoadSHA256asMap(fname string) (map[string]string, error) {\n\tfileBytes, err := ioutil.ReadFile(fname)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error reading from %s: %v\\n\", fname, err)\n\t}\n\thashMap := make(map[string]string, 1000)\n\t//Verbose.Printf(\"Loading from %s\\n\", fname)\n\n\tlines := bytes.Split(fileBytes, []byte{'\\n'})\n\n\t//Verbose.Printf(\"found %d lines in %s\\n\", len(lines), fname)\n\t// fmt.Printf(\"%v\\n\",lines)\n\tfor ndx, line := range lines {\n\t\tsline := string(line)\n\t\t_ = ndx\n\t\t//Verbose.Printf(\"line[%d] = %q\\n\", ndx, sline)\n\t\tif len(sline) < 3 { // must have at least 3 pipe symbols\n\t\t\tcontinue\n\t\t}\n\t\tif sline[0] == '#' {\n\t\t\tcontinue\n\t\t}\n\t\tpartRec, err := Split256(sline)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Can't load line[%d] %q from %s\\n\", ndx, sline, fname)\n\t\t}\n\t\thashMap[partRec.SHA] = sline\n\t}\n\tVerbose.Printf(\"added %d precalculated digests from %s\\n\", len(hashMap), fname)\n\treturn hashMap, nil\n}", "title": "" }, { "docid": "732d6d3914749898681445afe30099bb", "score": "0.52380127", "text": "func (fl *fileLayer) Load() (map[string]interface{}, error) {\n\tif fl.loaded {\n\t\treturn fl.data, nil\n\t}\n\text := strings.ToLower(filepath.Ext(fl.file))\n\tl, ok := loaders[ext]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"no registered loader for ext %s\", ext)\n\t}\n\n\tf, err := os.Open(fl.file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer func() { _ = f.Close() }()\n\n\tfl.data, err = l.Convert(f)\n\tfl.loaded = err == nil\n\n\treturn fl.data, err\n}", "title": "" }, { "docid": "d85cd6ea53abd54995166af501d2b343", "score": "0.5237904", "text": "func (vm *VerMap) Load() map[string]string {\n\tvm.mu.RLock()\n\tdefer vm.mu.RUnlock()\n\treturn vm.load()\n}", "title": "" }, { "docid": "3c0b105e47b72ed7d08cebec08a36404", "score": "0.523652", "text": "func loadFile(path string) (loadResult, error) {\n\tclient, err := eskipfile.Open(path)\n\tif err != nil {\n\t\treturn loadResult{}, err\n\t}\n\n\troutes, err := client.LoadAll()\n\treturn loadResult{routes: routes}, err\n}", "title": "" }, { "docid": "a48f6657ff74018cddfabace84120481", "score": "0.5225261", "text": "func ParseFile(filename string) (Map, error) {\n\tfile, err := os.Open(filename)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer file.Close()\n\n\treturn Parse(file)\n}", "title": "" }, { "docid": "c8a1f32f9deb218686396416d8b7aedb", "score": "0.52197945", "text": "func (n *NavMap) lookupMaps() error {\n\tmatches, err := filepath.Glob(filepath.Join(n.flags.RuntimeDirectory, \"navmap*.ppm\"))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar newestFile string\n\tvar newestTime time.Time\n\n\tfor _, path := range matches {\n\t\tstat, err := os.Stat(path)\n\n\t\t// error reading modified time\n\t\tif err != nil {\n\t\t\tn.logger.Warn().Err(err).Msg(\"error getting last modified time\")\n\t\t\tcontinue\n\t\t}\n\n\t\t// file not newer\n\t\tif newestTime.After(stat.ModTime()) {\n\t\t\tcontinue\n\t\t}\n\n\t\tnewestFile = path\n\t\tnewestTime = stat.ModTime()\n\t}\n\n\tn.latestMapLock.Lock()\n\tdefer n.latestMapLock.Unlock()\n\n\t// file already known\n\tif n.latestMap != nil && n.latestMap.path == newestFile && n.latestMap.mTime == newestTime {\n\t\treturn nil\n\t}\n\n\tlatestMap, err := NewMap(newestFile, newestTime)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tn.latestMap = latestMap\n\tn.logger.Debug().Str(\"map\", newestFile).Msg(\"updated latest map\")\n\n\treturn nil\n}", "title": "" }, { "docid": "d8aca819f7f5a9281536773d96ac2e20", "score": "0.5201969", "text": "func (a *ASEDb) LoadFromFile(filename string) {\n\n\tfile, _ := ioutil.ReadFile(filename)\n\t_ = json.Unmarshal([]byte(file), &a)\n\ta.fileName = filename\n}", "title": "" }, { "docid": "bfaabad642447d6a1cca9f9e150350eb", "score": "0.5183435", "text": "func DecodeFile(fileName string) (*Map, error) {\n\n\tfile, err := os.Open(fileName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer file.Close()\n\n\treturn Decode(file)\n}", "title": "" }, { "docid": "22181e94a9ba2cc737b3b44c1e510c16", "score": "0.51555324", "text": "func New(reader Reader, file, key string) (mapping *Mapping, err error) {\n\tif reader == nil {\n\t\treturn nil, errors.New(\"reader object is not provided or nil\")\n\t}\n\tcontent, err := reader.Read(file) // Read the mappings file\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfullOpsMap := make(map[string][]OpsData)\n\terr = json.Unmarshal([]byte(content), &fullOpsMap)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\topsData, ok := fullOpsMap[key]\n\tif !ok {\n\t\treturn nil, errors.New(\"required data not found\")\n\t}\n\tmapping = &Mapping{}\n\tmapping.Key = key\n\tmapping.OpsData = opsData\n\tmapping.Reader = reader\n\treturn mapping, err\n}", "title": "" }, { "docid": "644f8dd367d8dd676b50870cfd7e756b", "score": "0.5152474", "text": "func (stack *Stack) ReadTxtMaps() {\n\tif !stack.mapLoaded {\n\t\tstack.spToBodyMap = ReadTxtMaps(stack.String())\n\t\tstack.mapLoaded = true\n\t}\n}", "title": "" }, { "docid": "f8a79fd6d809d665666f6f177b16471b", "score": "0.51335806", "text": "func LoadImageMap(imagesDir string) (ImageMap, error) {\n\tm := ImageMap{}\n\terr := filepath.Walk(imagesDir, func(path string, info os.FileInfo, err error) error {\n\t\tname := info.Name()\n\t\tif info.IsDir() || !strings.HasSuffix(name, \".yml\") {\n\t\t\treturn nil\n\t\t}\n\n\t\tr, err := filepath.Rel(imagesDir, path)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to calculate relative path\")\n\t\t}\n\t\tr = strings.TrimSuffix(r, \".yml\")\n\t\tv, err := versionstream.LoadStableVersionFile(path)\n\t\tif err != nil {\n\t\t\tlog.Logger().Warnf(\"failed to parse image version file %s due to %s\", path, err.Error())\n\t\t\treturn nil\n\t\t}\n\t\tm.AddImage(r, v.Version, \"stream\")\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn m, errors.Wrapf(err, \"failed to walk images dir %s\", imagesDir)\n\t}\n\treturn m, nil\n}", "title": "" }, { "docid": "1a40bc7b0891311176006f1128139fcb", "score": "0.5127069", "text": "func ReadPatternsToMap(filename string) map[string]int {\n\tdata, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\tpanic(\"Error reading file.\")\n\t}\n\tres := make(map[string]int)\n\n\tsplit := strings.Split(strings.Replace(strings.TrimSpace(string(data)), \"\\r\\n\", \"\\n\", -1), \"\\n\")\n\n\tfor _,s := range split {\n\t\tsplitLine := strings.Split(s, \": \")\n\t\tnum,_ := strconv.Atoi(splitLine[1])\n\t\tif num != 0 {res[splitLine[0]] = num}\n\t}\n\treturn res\n}", "title": "" }, { "docid": "425097bdec71e28b2d14a483158bbe20", "score": "0.51201016", "text": "func LoadLocationListFile(ci *geoattractorindex.CityIndex, filepath string, r io.Reader, ti *geoindex.TimeIndex) (recordsCount int, err error) {\n defer func() {\n if state := recover(); state != nil {\n err = log.Wrap(state.(error))\n }\n }()\n\n c := csv.NewReader(r)\n\n c.Comment = '#'\n c.FieldsPerRecord = 3\n\n for i := 0; ; i++ {\n record, err := c.Read()\n if err != nil {\n if err == io.EOF {\n break\n }\n\n log.Panic(err)\n }\n\n sourceName := record[0]\n id := record[1]\n timestampPhrase := record[2]\n\n timestamp, err := time.Parse(time.RFC3339, timestampPhrase)\n if err != nil {\n log.Panicf(\"Could not parse [%s]: %s\", timestampPhrase, err)\n }\n\n cr, err := ci.GetById(sourceName, id)\n if err != nil {\n if err == geoattractorindex.ErrNotFound {\n log.Panicf(\"Could not find record from source [%s] with ID [%s].\", sourceName, id)\n }\n\n log.Panic(err)\n }\n\n gr := geoindex.NewGeographicRecord(\n GeographicSourceListfile,\n filepath,\n timestamp,\n true,\n cr.Latitude,\n cr.Longitude,\n nil)\n\n err = ti.AddWithRecord(gr)\n log.PanicIf(err)\n\n recordsCount++\n }\n\n return recordsCount, nil\n}", "title": "" }, { "docid": "7d3fa3deff32f2e9e142b616e38cfd88", "score": "0.50822294", "text": "func (s *Memory) Load(ctx context.Context, identifier string) ([]domain.Finding, error) {\n\tf, ok := s.Map.Load(identifier)\n\tif ok {\n\t\treturn f.([]domain.Finding), nil\n\t}\n\t_, ok = s.Map.Load(identifier + \"-marker\")\n\tif ok {\n\t\treturn nil, domain.InProgressError{Identifier: identifier}\n\t}\n\treturn nil, domain.NotFoundError{Identifier: identifier}\n}", "title": "" }, { "docid": "5d6fadf4342c3dd85875c45f428fb76f", "score": "0.5058599", "text": "func getDbMappingFile() string {\n\treturn strings.Join([]string{homeDir, DOOP_MAPPING_FILE}, string(os.PathSeparator))\n}", "title": "" }, { "docid": "dac35a638a10af93b28e39aa8e5a6d9d", "score": "0.5054983", "text": "func load(fname string) ([]string, error) {\n\tif fname == \"\" {\n\t\treturn nil, errors.New(\"Dictionary file name cannot be empty\")\n\t}\n\n\t// attempt to load file\n\tfile, err := os.Open(fname)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer file.Close()\n\n\tvar lines []string\n\tscanner := bufio.NewScanner(file)\n\tscanner.Split(bufio.ScanLines)\n\tfor scanner.Scan() {\n\t\tlines = append(lines, scanner.Text())\n\t}\n\treturn lines, scanner.Err()\n}", "title": "" }, { "docid": "8a33afada07bad2994e9861d20531502", "score": "0.5037314", "text": "func (lm *LocalMeta) Load() error {\n\tfile, err := os.Open(lm.name)\n\tif err != nil && !os.IsNotExist(errors.Cause(err)) {\n\t\treturn errors.Trace(err)\n\t}\n\tif os.IsNotExist(errors.Cause(err)) {\n\t\treturn nil\n\t}\n\tdefer file.Close()\n\n\t_, err = toml.DecodeReader(file, lm)\n\treturn errors.Trace(err)\n}", "title": "" }, { "docid": "2d0c6f65c28392165f8282f7a0979d79", "score": "0.5035164", "text": "func (t *LineTable) initFileMap() {\n\tt.mu.Lock()\n\tdefer t.mu.Unlock()\n\n\tif t.fileMap != nil {\n\t\treturn\n\t}\n\tm := make(map[string]uint32)\n\n\tfor i := uint32(1); i < t.nfiletab; i++ {\n\t\ts := t.string(t.binary.Uint32(t.filetab[4*i:]))\n\t\tm[s] = i\n\t}\n\tt.fileMap = m\n}", "title": "" }, { "docid": "a92ea384f6d11b20ca2ec72f535e2a6b", "score": "0.503154", "text": "func CSVFileToMap(filePath string) (returnMap []map[string]string, err error) {\n\n\t// read csv file\n\tcsvfile, err := os.Open(filePath)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(err.Error())\n\t}\n\n\tdefer csvfile.Close()\n\n\treader := csv.NewReader(csvfile)\n\n\trawCSVdata, err := reader.ReadAll()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(err.Error())\n\t}\n\n\theader := []string{} // holds first row (header)\n\tfor lineNum, record := range rawCSVdata {\n\n\t\t// for first row, build the headers in the slice\n\t\tif lineNum == 0 {\n\t\t\tfor i := 0; i < len(record); i++ {\n\t\t\t\theader = append(header, strings.TrimSpace(record[i]))\n\t\t\t}\n\t\t} else {\n\t\t\t// for each cell, map[string]string key=header value=value\n\t\t\tline := map[string]string{}\n\t\t\tfor i := 0; i < len(record); i++ {\n\t\t\t\tline[header[i]] = record[i]\n\t\t\t}\n\t\t\treturnMap = append(returnMap, line)\n\t\t}\n\t}\n\n\tfor i := range returnMap {\n\t\tfmt.Printf(\"%v \\n\", returnMap[i])\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "36cc20672f47a09886b84de385722932", "score": "0.50283", "text": "func LoadMap(path string) ([][]bool, error) {\n\tgrid := [][]bool{}\n\n\tfile, err := os.Open(path)\n if err != nil {\n\t\treturn grid, errors.New(\"Could not open the file\")\n }\n defer file.Close()\n\n scanner := bufio.NewScanner(file)\n\n\tscanner.Scan()\n\tline := scanner.Text()\n\tif line != \"type octile\" {\n\t\tmsg := fmt.Sprintf(\"Bad first line \\\"%s\\\"\", line)\n\t\treturn grid, errors.New(msg)\n\t}\n\n\tscanner.Scan()\n\tline = scanner.Text()\n\tsplits := strings.Split(line, \" \")\n\tif len(splits) != 2 || splits[0] != \"height\" {\n\t\tmsg := fmt.Sprintf(\"Bad second line \\\"%s\\\"\", line)\n\t\treturn grid, errors.New(msg)\n\t}\n\theight, err := strconv.Atoi(splits[1])\n\tif err != nil {\n\t\tmsg := fmt.Sprintf(\"Non-int height \\\"%s\\\"\", splits[1])\n\t\treturn grid, errors.New(msg)\n\t}\n\t\n\tscanner.Scan()\n\tline = scanner.Text()\n\tsplits = strings.Split(line, \" \")\n\tif len(splits) != 2 || splits[0] != \"width\" {\n\t\tmsg := fmt.Sprintf(\"Bad third line \\\"%s\\\"\", line)\n\t\treturn grid, errors.New(msg)\n\t}\n\twidth, err := strconv.Atoi(splits[1])\n\tif err != nil {\n\t\tmsg := fmt.Sprintf(\"Non-int width \\\"%s\\\"\", splits[1])\n\t\treturn grid, errors.New(msg)\n\t}\n\n\tscanner.Scan()\n\tline = scanner.Text()\n\tif line != \"map\" {\n\t\tmsg := fmt.Sprintf(\"Bad fourth line \\\"%s\\\"\", line)\n\t\treturn grid, errors.New(msg)\n\t}\n\n\tgrid = make([][]bool, height)\n\tfor row := 0; row < height; row++ {\n\t\tgrid[row] = make([]bool, width)\n }\n\n\trow := 0\n\tlineNumber := 5 // We have already read some lines\n for scanner.Scan() {\n\t\tif row >= height {\n\t\t\tpanic(\"Height mismatch in map file\")\n\t\t}\n\t\tline := scanner.Text()\n\t\tif len(line) != width {\n\t\t\tpanic(\"Width mismatch in map file\")\n\t\t}\n\t\tfor i, r := range(line) {\n\t\t\tvar b bool\n\t\t\tswitch r {\n\t\t\t\tcase '.': // open space\n\t\t\t\t\tb = false\n\t\t\t\t\tbreak\n\t\t\t\tcase '@': // wall\n\t\t\t\t\tb = true\n\t\t\t\t\tbreak\n\t\t\t\tdefault:\n\t\t\t\t\tmsg := fmt.Sprintf(\"Bad rune '%c' on line %d\", r, lineNumber)\n\t\t\t\t\treturn grid, errors.New(msg)\n\t\t\t}\n\t\t\tgrid[row][i] = b\n\t\t}\n\t\trow++\n }\n\tif height != row {\n\t\tpanic(\"Height mismatch in map file\")\n\t}\n\treturn grid, nil\n}", "title": "" }, { "docid": "58da0d32373f99fd05f2eb72ff03edc1", "score": "0.5022217", "text": "func Open(filename string) (*MmappedFile, error) {\n f, err := os.Open(filename)\n if err != nil {\n return nil, err\n }\n defer f.Close()\n fi, err := f.Stat()\n if err != nil {\n return nil, err\n }\n \n size := fi.Size()\n if size == 0 {\n return &MmappedFile{}, nil\n }\n if size < 0 {\n return nil, fmt.Errorf(\"mmap: file %q has negative size\", filename)\n }\n if size != int64(int(size)) {\n return nil, fmt.Errorf(\"mmap: file %q is too large\", filename)\n }\n\n data, err := syscall.Mmap(int(f.Fd()), 0, int(size), syscall.PROT_READ, syscall.MAP_SHARED)\n if err != nil {\n return nil, err\n }\n r := &MmappedFile{data}\n runtime.SetFinalizer(r, (*MmappedFile).Close)\n return r, nil\n}", "title": "" }, { "docid": "4524eebf71193e3af592fdb84b9b52a5", "score": "0.50217766", "text": "func LoadFromFile(languageCode string, filePath string) *lgo.OperationResult {\n\tdata, err := ioutil.ReadFile(filePath)\n\tif err != nil {\n\t\treturn lgo.NewFailureWithReturnObject(err)\n\t}\n\tvar keyValues map[string]string\n\terr = json.Unmarshal(data, &keyValues)\n\tif err != nil {\n\t\treturn lgo.NewFailureWithReturnObject(err)\n\t}\n\tlocalizations[languageCode] = keyValues\n\treturn lgo.NewSuccess(nil)\n}", "title": "" }, { "docid": "f5fa92669a10a7e59e5618229c415bb9", "score": "0.50203186", "text": "func ReadFromFile(path string) (org map[string]interface{}, err error) {\n\tdata, err := ioutil.ReadFile(\"./data/\" + path)\n\tif err != nil {\n\t\treturn\n\t}\n\tj, err := yaml.YAMLToJSON(data)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = json.Unmarshal(j, &org)\n\treturn\n}", "title": "" }, { "docid": "3f3da1ae7d849cf754b9fdf622a48022", "score": "0.5013532", "text": "func (doop *Doop) getDbMappingFile() string {\n\treturn strings.Join([]string{doop.homeDir, DOOP_MAPPING_FILE}, string(os.PathSeparator))\n}", "title": "" }, { "docid": "db06ce889e01e67fe0e3052fcead4bdf", "score": "0.50051224", "text": "func (t *testImpl) load(m Map) {\n}", "title": "" }, { "docid": "d39c97e0db4364e3f14395406864973d", "score": "0.50013804", "text": "func loadMappings() error {\n\tv := config.AtPath(\"api\", \"proxyMappings\").AsStringMap()\n\tmtx.Lock()\n\tdefer mtx.Unlock()\n\tmappings = v\n\n\tlog.Infof(\"Loaded %v hostname mappings from config service: %#v\", len(v), mappings)\n\n\treturn nil\n}", "title": "" }, { "docid": "ffe70248ca743204582903952291f8ae", "score": "0.49971065", "text": "func (l Loader) Load(c interface{}) error {\n\t// Set default values\n\tdefaults.SetDefaults(c)\n\n\t// Expand config paths\n\tloadPaths := []string{}\n\n\tfor _, configPath := range l.configPaths {\n\t\t// Interpret shell globs\n\t\texpandedPaths, err := filepath.Glob(configPath)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to expand configuration \"+\n\t\t\t\t\"path \\\"%s\\\" glob: %s\", configPath,\n\t\t\t\terr.Error())\n\t\t}\n\n\t\tfor _, expandedPath := range expandedPaths {\n\t\t\t// Check not directory\n\t\t\tfi, err := os.Stat(expandedPath)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to stat \"+\n\t\t\t\t\t\"configuration path \\\"%s\\\": %s\",\n\t\t\t\t\texpandedPath, err.Error())\n\t\t\t}\n\n\t\t\tif fi.IsDir() {\n\t\t\t\treturn fmt.Errorf(\"configuration path \"+\n\t\t\t\t\t\"\\\"%s\\\" is a directory, cannot be\",\n\t\t\t\t\texpandedPath)\n\t\t\t}\n\n\t\t\t// Not directory, add\n\t\t\tloadPaths = append(loadPaths, expandedPath)\n\t\t}\n\t}\n\n\t// Try to load all files in loadPaths\n\tfor _, loadPath := range loadPaths {\n\t\t// Check if MapDecoder exists for file extension\n\t\tdecoder, ok := l.formats[filepath.Ext(loadPath)]\n\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Use MapDecoder if exists\n\t\t// Open file\n\t\tloadFile, err := os.Open(loadPath)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error opening configuration \"+\n\t\t\t\t\"file \\\"%s\\\": %s\", loadPath, err.Error())\n\t\t}\n\n\t\t// Call MapDecoder\n\t\tloadMap := map[string]interface{}{}\n\n\t\tif err = decoder.Decode(loadFile, &loadMap); err != nil {\n\t\t\treturn fmt.Errorf(\"error decoding \\\"%s\\\": %s\",\n\t\t\t\tloadPath, err.Error())\n\t\t}\n\n\t\t// Put map into struct\n\t\tif err = mapstructure.Decode(loadMap, c); err != nil {\n\t\t\treturn fmt.Errorf(\"error putting decoded map \"+\n\t\t\t\t\"for \\\"%s\\\" into configuration struct: %s\",\n\t\t\t\tloadPath, err.Error())\n\t\t}\n\t}\n\n\t// Validate configuration struct\n\tif err := l.validate.Struct(c); err != nil {\n\t\treturn fmt.Errorf(\"failed to validate configuration \"+\n\t\t\t\"struct: %s\", err.Error())\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "84f8d5fa157eaa4e9a6d452449ec7b83", "score": "0.49963692", "text": "func (s *SourceMap) ReadFrom(r io.Reader) (n int64, err error) {\n\trr := bufio.NewReader(r)\n\n\tb := make([]byte, 26)\n\tnn, err := io.ReadFull(rr, b)\n\tn += int64(nn)\n\tif err != nil {\n\t\treturn n, err\n\t}\n\n\tif len(b) < 16 || bytes.Compare(b[0:4], []byte(sourceMapSignature)) != 0 {\n\t\treturn n, errors.New(\"invalid source map format\")\n\t}\n\tif b[4] != versionMajor || b[5] != versionMinor {\n\t\treturn n, errors.New(\"invalid source map version\")\n\t}\n\n\ts.Origin = binary.LittleEndian.Uint16(b[6:8])\n\ts.Size = binary.LittleEndian.Uint32(b[8:12])\n\ts.CRC = binary.LittleEndian.Uint32(b[12:16])\n\tfileCount := int(binary.LittleEndian.Uint16(b[16:18]))\n\tlineCount := int(binary.LittleEndian.Uint32(b[18:22]))\n\texportCount := int(binary.LittleEndian.Uint32(b[22:26]))\n\n\ts.Files = make([]string, fileCount)\n\tfor i := 0; i < fileCount; i++ {\n\t\tfile, err := rr.ReadString(0)\n\t\tn += int64(len(file))\n\t\tif err != nil {\n\t\t\treturn n, err\n\t\t}\n\t\ts.Files[i] = file[:len(file)-1]\n\t}\n\n\ts.Lines = make([]SourceLine, 0, lineCount)\n\tif lineCount > 0 {\n\t\tvar line SourceLine\n\t\tfor i := 0; i < lineCount; i++ {\n\t\t\tvar nn int\n\t\t\tline, nn, err = decodeSourceLine(rr, line)\n\t\t\tn += int64(nn)\n\t\t\tif err != nil {\n\t\t\t\treturn n, err\n\t\t\t}\n\t\t\ts.Lines = append(s.Lines, line)\n\t\t}\n\t}\n\n\ts.Exports = make([]Export, exportCount)\n\tfor i := 0; i < exportCount; i++ {\n\t\tlabel, err := rr.ReadString(0)\n\t\tn += int64(len(label))\n\t\tif err != nil {\n\t\t\treturn n, err\n\t\t}\n\t\ts.Exports[i].Label = label[:len(label)-1]\n\n\t\tnn, err = io.ReadFull(rr, b[:2])\n\t\tn += int64(nn)\n\t\tif err != nil {\n\t\t\treturn n, err\n\t\t}\n\t\ts.Exports[i].Address = binary.LittleEndian.Uint16(b[0:2])\n\t}\n\n\treturn n, nil\n}", "title": "" }, { "docid": "2b387690db7c9ac76dd96604c688f871", "score": "0.49953848", "text": "func LoadURLMapFromDB() (map[string]string, error) {\n\tvar pathsToUrls map[string]string\n\t// try to open the db\n\tdb, err := prepareDB()\n\tif err == nil {\n\t\tpathsToUrls, err = getDBValues(db)\n\t}\n\tdefer db.Close()\n\treturn pathsToUrls, err\n}", "title": "" }, { "docid": "44100f0a74073a5ea74e23a8e80d41ac", "score": "0.49933368", "text": "func (c *Chunk) readXMap() (e error) {\n\te = c.s.FromFile(c.xFile, &c.xMap)\n\treturn\n}", "title": "" }, { "docid": "de0c384c24bbd7030feb3019734f4a2c", "score": "0.49918306", "text": "func LoadMap(c interface{}, vars map[string]interface{}, conf Conf) error {\n\tconf.EnvDisable = true\n\tconf.FlagDisable = true\n\treturn LoadWithMap(c, vars, conf)\n}", "title": "" }, { "docid": "8413ed6f845e8fcfe23aba69c5102c04", "score": "0.49872956", "text": "func loadGraphmap() (*ebpf.CollectionSpec, error) {\n\treader := bytes.NewReader(_GraphmapBytes)\n\tspec, err := ebpf.LoadCollectionSpecFromReader(reader)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"can't load graphmap: %w\", err)\n\t}\n\n\treturn spec, err\n}", "title": "" }, { "docid": "5520669a753f4057e26854977e4f2bfe", "score": "0.4987037", "text": "func (c *Loader) LoadFromTomlFile(f string) error {\n\t_, err := toml.DecodeFile(f, c.confPtr)\n\treturn err\n}", "title": "" }, { "docid": "57d002ba36b7df9a3d88936d8438c3e6", "score": "0.49852365", "text": "func (f *fileStore) Load() error {\n\tf.mux.Lock()\n\tdefer f.mux.Unlock()\n\n\tif _, err := os.Stat(f.path); os.IsNotExist(err) {\n\t\t// if not exists, return empty slice\n\t\treturn nil\n\t}\n\n\tdata, err := ioutil.ReadFile(f.path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcache := make(map[string]model.LBPolicy, 0)\n\terr = json.Unmarshal(data, &cache)\n\tif err != nil {\n\t\treturn err\n\t}\n\tf.cache = cache\n\treturn nil\n}", "title": "" }, { "docid": "e1fdee9493fca66b27a9caca11c6eb14", "score": "0.49803233", "text": "func (db *Db) mmap(minsz int) error {\n\tdb.mmaplock.Lock()\n\tdefer db.mmaplock.Unlock()\n\n\tinfo, err := db.file.Stat()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"mmap stat error: %s\", err)\n\t} else if int(info.Size()) < db.pageSize*2 {\n\t\treturn fmt.Errorf(\"file size too small\")\n\t}\n\n\t// Ensure the size is at least the minimum size.\n\tvar size = int(info.Size())\n\tif size < minsz {\n\t\tsize = minsz\n\t}\n\tsize, err = db.mmapSize(size)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Dereference all mmap references before unmapping.\n\tif db.rwtx != nil {\n\t\tdb.rwtx.root.dereference()\n\t}\n\n\t// Unmap existing data before continuing.\n\tif err := db.munmap(); err != nil {\n\t\treturn err\n\t}\n\n\t// Memory-map the data file as a byte slice.\n\tif err := mmap(db, size); err != nil {\n\t\treturn err\n\t}\n\n\t// Save references to the meta pages.\n\tdb.meta0 = db.page(0).meta()\n\tdb.meta1 = db.page(1).meta()\n\n\t// Validate the meta pages. We only return an error if both meta pages fail\n\t// validation, since meta0 failing validation means that it wasn't saved\n\t// properly -- but we can recover using meta1. And vice-versa.\n\terr0 := db.meta0.validate()\n\terr1 := db.meta1.validate()\n\tif err0 != nil && err1 != nil {\n\t\treturn err0\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f2c51865f134bb34d8b172f61fb97e02", "score": "0.49757195", "text": "func ReadMap(rd io.Reader) (m Map, err error) {\n\t// Just in case of programming mistake. Not intentionally used.\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\terr = errors.New(fmt.Sprint(r))\n\t\t}\n\t}()\n\n\treturn decodeMap(rd, \"\", true)\n}", "title": "" }, { "docid": "bc8ff54d5b4f835071f926afb963cd91", "score": "0.49716735", "text": "func (p *PersistenceAPI) Load() ([]byte, error) {\n\treturn ioutil.ReadFile(p.filePath)\n}", "title": "" }, { "docid": "e2090488d2039f556c00b347c2577ab0", "score": "0.49701706", "text": "func OpenMappedFile(path string, used int) *MappedFile {\n\tfile, path, size := OpenFile(path + \".dat\")\n\tif size == 0 {\n\t\tsize = resize(file.Fd(), MIN_MMAP)\n\t}\n\tdata := Mmap(file, 0, size)\n\treturn &MappedFile{\n\t\tpath: path + \".dat\",\n\t\tfile: file,\n\t\tsize: size,\n\t\tused: used,\n\t\tdata: data,\n\t}\n}", "title": "" }, { "docid": "b5ac69983f1d4e2f7d074cff2f89e5b8", "score": "0.4968598", "text": "func LoadFunctionMap() FunctionMap {\n\tfm := make(FunctionMap)\n\n\tfm.AddFunction(\"keyMatch\", util.KeyMatchFunc)\n\tfm.AddFunction(\"keyMatch2\", util.KeyMatch2Func)\n\tfm.AddFunction(\"regexMatch\", util.RegexMatchFunc)\n\tfm.AddFunction(\"ipMatch\", util.IPMatchFunc)\n\n\treturn fm\n}", "title": "" }, { "docid": "9fdfdbeb596596b25e90a1eba2217f8b", "score": "0.49561334", "text": "func NewMap(ctx context.Context, path string) (m *pb.Map, err error) {\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\terr = errors.Wrap(err, \"failed to load\")\n\t\treturn\n\t}\n\tdefer f.Close()\n\tm = &pb.Map{}\n\td := tmx.NewDecoder(f)\n\terr = d.Decode(m)\n\tif err != nil {\n\t\terr = errors.Wrap(err, \"failed to marshal\")\n\t\treturn\n\t}\n\treturn\n}", "title": "" }, { "docid": "8ab74e7da9b36a4ebf23890f7627f7e3", "score": "0.49321058", "text": "func parseRepoMapping(path string) (map[repoMappingKey]string, error) {\n\tr, err := os.Open(path)\n\tif err != nil {\n\t\t// The repo mapping manifest only exists with Bzlmod, so it's not an\n\t\t// error if it's missing. Since any repository name not contained in the\n\t\t// mapping is assumed to be already canonical, an empty map is\n\t\t// equivalent to not applying any mapping.\n\t\treturn nil, nil\n\t}\n\tdefer r.Close()\n\n\t// Each line of the repository mapping manifest has the form:\n\t// canonical name of source repo,apparent name of target repo,target repo runfiles directory\n\t// https://cs.opensource.google/bazel/bazel/+/1b073ac0a719a09c9b2d1a52680517ab22dc971e:src/main/java/com/google/devtools/build/lib/analysis/RepoMappingManifestAction.java;l=117\n\ts := bufio.NewScanner(r)\n\trepoMapping := make(map[repoMappingKey]string)\n\tfor s.Scan() {\n\t\tfields := strings.SplitN(s.Text(), \",\", 3)\n\t\tif len(fields) != 3 {\n\t\t\treturn nil, fmt.Errorf(\"runfiles: bad repo mapping line %q in file %s\", s.Text(), path)\n\t\t}\n\t\trepoMapping[repoMappingKey{fields[0], fields[1]}] = fields[2]\n\t}\n\n\tif err = s.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"runfiles: error parsing repo mapping file %s: %w\", path, err)\n\t}\n\n\treturn repoMapping, nil\n}", "title": "" }, { "docid": "a3435d2a370ea6cc5f72b61bb23f4d41", "score": "0.49258962", "text": "func ParseFromFile(path string) (map[interface{}]interface{}, error) {\n\tdata, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\tlog.Fatalf(\"error: %v\", err)\n\t}\n\treturn ParseFromString(string(data))\n}", "title": "" }, { "docid": "931311752ea46cdd614cd138dca99d57", "score": "0.49173287", "text": "func (i *SearchIndex) loadFromFile() (*types.Index, error) {\n\t// an index file is not required. Its ok if it does not exist.\n\tif _, err := os.Stat(i.indexFilepath); err != nil {\n\t\treturn nil, err\n\t}\n\n\tbytes, err := ioutil.ReadFile(i.indexFilepath)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to read index file from %q. File corrupt?: %v\", i.indexFilepath, err)\n\t}\n\n\tindex := &types.Index{}\n\tif len(bytes) == 0 {\n\t\treturn index, nil\n\t}\n\n\terr = yaml.Unmarshal(bytes, &index)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"could not unmarshal index file with path '%s': %v\", i.indexFilepath, err)\n\t}\n\treturn index, nil\n}", "title": "" }, { "docid": "078328a86e9807fb5d2d6e375541f275", "score": "0.49170455", "text": "func Load(relativePathToFile string) (err error) {\n\tEnv.emap = map[string]interface{}{}\n\tEnv.raw = []byte{}\n\n\tdata, err := ioutil.ReadFile(relativePathToFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tEnv.raw = data\n\terr = json.Unmarshal(Env.raw, &Env.emap)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t//Check for mappings, error if they're not present\n\t_, ok := Env.emap[MAPPING_KEY_NAME]\n\tif !ok {\n\t\terrorMsg := fmt.Sprintf(\"JSON Malformed. Missing top level property named '%v' to determine projectId\", MAPPING_KEY_NAME)\n\t\treturn errors.New(errorMsg)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "0fbf60e9614dda10ceba27070f5bd371", "score": "0.49155578", "text": "func Map(f *os.File, offset int64, len int, prot Prot, flags MapFlag) (Mmap, error) {\n\treturn syscall.Mmap(int(f.Fd()), offset, len, int(prot), int(flags))\n}", "title": "" }, { "docid": "eb93d4829d88398335b000eece117d44", "score": "0.49108914", "text": "func (e *engine) loadAllRecords() <-chan *loader {\n\t// initialize the channels to return the keys and blocks on\n\tldr := make(chan *loader)\n\tgo func() {\n\t\tvar o, k int\n\t\t// start iterating through mapped file reigon one page at a time\n\t\tfor o < len(e.data) {\n\t\t\t// checking for non-empty page\n\t\t\tif !bytes.Equal(e.data[o:o+page], empty) {\n\t\t\t\t// if e.data[o+maxKey-1] != 0x00 {\n\t\t\t\t// found one; return key and block offset\n\t\t\t\tldr <- &loader{e.data[o : o+maxKey], o / page}\n\t\t\t}\n\t\t\tk++\n\t\t\to = k * page\n\t\t}\n\t\tclose(ldr)\n\t}()\n\treturn ldr\n}", "title": "" }, { "docid": "b507054bd765b8743f9c568d9ea453a3", "score": "0.49105874", "text": "func parseFileMapping(fileMappingStr string) (FileMapping, error) {\n\tsplitString := strings.Split(fileMappingStr, \":\")\n\n\tif len(splitString) != 2 {\n\t\treturn FileMapping{}, fmt.Errorf(\"invalid file mapping string: '%s'\", fileMappingStr)\n\t}\n\n\tmatchPattern := splitString[0]\n\tdestPath := splitString[1]\n\n\treturn FileMapping{matchPattern, destPath}, nil\n}", "title": "" }, { "docid": "1e54f07b5ac1fb01d6d137deaf4ffde6", "score": "0.49072328", "text": "func (l *configLoader) LoadRaw() (map[interface{}]interface{}, error) {\n\t// What path should we use\n\tconfigPath := l.ConfigPath()\n\t_, err := os.Stat(configPath)\n\tif err != nil {\n\t\treturn nil, errors.Errorf(\"Couldn't load '%s': %v\", configPath, err)\n\t}\n\n\tfileContent, err := ioutil.ReadFile(configPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\trawMap := map[interface{}]interface{}{}\n\terr = yaml.Unmarshal(fileContent, &rawMap)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn rawMap, nil\n}", "title": "" }, { "docid": "3a15276d5bfea28f7dd08e19ea06d7fd", "score": "0.48998946", "text": "func ReadJSONAsMapFile(configFileLocation string, stringLocator string) string {\n\tbuff, err := ioutil.ReadFile(configFileLocation)\n\tif err != nil {\n\t\tlog.Fatal(\"error:\", err)\n\t}\n\tvar jsonString = string(buff)\n\tjsonMap, _ := objects.NewMapFromJSON(jsonString)\n\tvar keyValue = jsonMap.Get(stringLocator).(string)\n\treturn keyValue\n}", "title": "" }, { "docid": "1fea393929b23593e5ce40de0f344302", "score": "0.48951715", "text": "func Import2() error {\n var locDataList map[string]int32\n locDataList = make(map[string]int32)\n \n // Read Blocks\n fmt.Println(\"Read Blocks\")\n readBlocks2(locDataList)\n \n // Open DB\n fmt.Println(\"Insert into Database\")\n fillDatabase2(locDataList)\n \n return nil;\n}", "title": "" }, { "docid": "6812b81e8ff6fe1c04dc2baff79568c8", "score": "0.48940873", "text": "func mmap(sl *SkipList, sz int) (data uintptr, err error) {\n\t// Map the data file to memory.\n\tb, err := syscall.Mmap(int(sl.file.Fd()), 0, sz, syscall.PROT_READ|syscall.PROT_WRITE, syscall.MAP_SHARED)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\t// Advise the kernel that the mmap is accessed randomly.\n\tif err := syscall.Madvise(b, syscall.MADV_RANDOM); err != nil {\n\t\treturn 0, fmt.Errorf(\"madvise: %s\", err)\n\t}\n\n\n\treturn uintptr(unsafe.Pointer(&b[0])), nil\n}", "title": "" }, { "docid": "adfdedb722e8f39f6a2777949f76dafb", "score": "0.4884117", "text": "func LoadEmployeesFromFile(empFilePath string) (map[string]Employee, error) {\n\tvar employees Employees\n\n\tempFile, err := os.Open(empFilePath)\n\tif err != nil {\n\t\tlog.Fatalf(\"Error in reading employee json file from the path %v \\n\", empFilePath)\n\t}\n\tdefer empFile.Close()\n\tbyteValue, err := ioutil.ReadAll(empFile)\n\tjson.Unmarshal(byteValue, &employees)\n\t//To load into a generic map interface\n\t// var result map[string]interface{}\n\t// json.Unmarshal([]byte(byteValue), &result)\n\tempMap := make(map[string]Employee)\n\tfor i, v := range employees.Employees {\n\t\tid := employees.Employees[i].Id\n\t\tempMap[id] = v\n\t}\n\treturn empMap, err\n}", "title": "" }, { "docid": "54062a094d4e3ebfcf6ccde2a38dbf41", "score": "0.48813298", "text": "func ReadMap(r io.Reader, val interface{}) error {\n\tv := reflect.Indirect(reflect.ValueOf(val))\n\treturn ReadMapReflect(r, v)\n}", "title": "" }, { "docid": "c8b0a9de21661ab416495e2e0ffa9a24", "score": "0.48799068", "text": "func (f *FilePersister) Load() ([]*Proxy, error) {\n\tbproxy, err := ioutil.ReadFile(f.FilePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar proxies = make([]*Proxy, 0)\n\n\terr = json.Unmarshal(bproxy, &proxies)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn proxies, nil\n}", "title": "" }, { "docid": "2c6ed7ae3380f932f097bdee639d7303", "score": "0.4877037", "text": "func LoadWithFuncMap(opt Options) (map[string]*template.Template, error) {\n\tbasePath = opt.Directory\n\texts = opt.Extensions\n\treturn loadTemplates(opt.Funcs)\n}", "title": "" }, { "docid": "82aca184e307c15e5eab39297eab6250", "score": "0.4864992", "text": "func (d *Dataset) Map(name string, mapperId gio.MapperId) *Dataset {\n\tret, step := add1ShardTo1Step(d)\n\tstep.Name = name + \".Map\"\n\tstep.IsPipe = false\n\tstep.IsGoCode = true\n\n\tex, _ := os.Executable()\n\n\tvar args []string\n\targs = append(args, ex)\n\t// args = append(args, os.Args[1:]...) // empty string in an arg can fail the execution\n\targs = append(args, \"-gleam.mapper=\"+string(mapperId))\n\tcommandLine := strings.Join(args, \" \")\n\t// println(\"args:\", commandLine)\n\tstep.Command = script.NewShellScript().Pipe(commandLine).GetCommand()\n\treturn ret\n}", "title": "" }, { "docid": "521ea760b216e77803df9cf810cf891b", "score": "0.48623747", "text": "func loadKeysFromFile(fn string) (openpgp.EntityList, error) {\n\t// use an intermediary bytes.Reader to support key import from\n\t// stdin for the seek operation below\n\tdata, err := ioutil.ReadFile(fn)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbuf := bytes.NewReader(data)\n\n\tif entities, err := openpgp.ReadKeyRing(buf); err == nil {\n\t\treturn entities, nil\n\t}\n\n\t// cannot load keys from file, perhaps it's ascii armored?\n\t// rewind and try again\n\tif _, err := buf.Seek(0, io.SeekStart); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn openpgp.ReadArmoredKeyRing(buf)\n}", "title": "" } ]
324dba3ba2c361cefce52ebe1d8626c7
/ mc_blockNumber Returns the number of most recent block. Parameters none Returns QUANTITY integer of the current block number the client is on.
[ { "docid": "3dfd677369d2a1bfe52abfc5f435f347", "score": "0.7977864", "text": "func (rpcCli *RpcClient) MC_blockNumber() (int64, error) {\n\n\tpointer, err := rpcCli.netServeHandler(MC_blockNumber, nil)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn pointer.ToInt()\n}", "title": "" } ]
[ { "docid": "a61f761ef76cc261117ffa832565008b", "score": "0.7736278", "text": "func (e *EVMBackend) BlockNumber() (hexutil.Uint64, error) {\n\t// NOTE: using 0 as min and max height returns the blockchain info up to the latest block.\n\tinfo, err := e.clientCtx.Client.BlockchainInfo(e.ctx, 0, 0)\n\tif err != nil {\n\t\treturn hexutil.Uint64(0), err\n\t}\n\n\treturn hexutil.Uint64(info.LastHeight), nil\n}", "title": "" }, { "docid": "02c1b0cf1e48d5a4284a960ac3869df0", "score": "0.74183315", "text": "func (e *EthereumClients) BlockNumber(ctx context.Context) (uint64, error) {\n\treturn e.DefaultClient.BlockNumber(ctx)\n}", "title": "" }, { "docid": "c23ea2f8945fbb145adf64c6aec3f3c7", "score": "0.74030745", "text": "func (block *Block) GetNumber() uint64 {\n\treturn block.Num\n}", "title": "" }, { "docid": "f03a083dfcfa0dad38ba980bb2cd61cd", "score": "0.73934895", "text": "func (self *block) BlockNum() uint64 {\n\treturn self.blockNum\n}", "title": "" }, { "docid": "e94bb1e7e027d506d68437c71db8e172", "score": "0.73622584", "text": "func (_L2OutputOracle *L2OutputOracleCallerSession) LatestBlockNumber() (*big.Int, error) {\n\treturn _L2OutputOracle.Contract.LatestBlockNumber(&_L2OutputOracle.CallOpts)\n}", "title": "" }, { "docid": "a9074a86e8644760a583bfc62fe69ac8", "score": "0.7362005", "text": "func (e *EthereumClient) BlockNumber(ctx context.Context) (uint64, error) {\n\tbn, err := e.Client.BlockNumber(ctx)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn bn, nil\n}", "title": "" }, { "docid": "8c9d55bb67de92da46bed62628c9cd39", "score": "0.73350006", "text": "func (c *client) BlockNumber(ctx context.Context) (*big.Int, error) {\n\tvar r string\n\terr := c.rpc.CallContext(ctx, &r, \"eth_blockNumber\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn hexutil.DecodeBig(r)\n}", "title": "" }, { "docid": "5f9d0e12ad9e7a351091830b4a351181", "score": "0.73284906", "text": "func (_Comtroller *ComtrollerSession) GetBlockNumber() (*big.Int, error) {\n\treturn _Comtroller.Contract.GetBlockNumber(&_Comtroller.CallOpts)\n}", "title": "" }, { "docid": "8ae578afe1efffa396e5bd46421480ed", "score": "0.7325827", "text": "func (api *API) BlockNumber() (uint64, error) {\n\tvar res string\n\tif err := api.Request(\"eth_blockNumber\", nil, &res); err != nil {\n\t\treturn 0, err\n\t}\n\treturn new(big.Int).SetBytes(common.FromHex(res)).Uint64(), nil\n}", "title": "" }, { "docid": "20080b19108db0910b45927b9b250d24", "score": "0.73148936", "text": "func (ws *WatchService) GetCurrentBlockNumber() *big.Int {\n\treturn new(big.Int).SetUint64(ws.GetBlockNumber())\n}", "title": "" }, { "docid": "28b5ce44affe146230e93907dfde7ea2", "score": "0.7280816", "text": "func (_Comtroller *ComtrollerCallerSession) GetBlockNumber() (*big.Int, error) {\n\treturn _Comtroller.Contract.GetBlockNumber(&_Comtroller.CallOpts)\n}", "title": "" }, { "docid": "cb8d9d7b1df468c438fe123e70c205ec", "score": "0.7249624", "text": "func (client Client) LatestBlockNum() (*big.Int, error) {\n\tvar hexStr string\n\tif err := client.rpc.Call(&hexStr, \"eth_blockNumber\"); err != nil {\n\t\treturn nil, fmt.Errorf(\"rpc: %s\", err)\n\t}\n\n\thexStr = utils.RemoveHexPrefix(hexStr)\n\n\t// pad if hex length is odd\n\tif len(hexStr)%2 != 0 {\n\t\thexStr = \"0\" + hexStr\n\t}\n\n\thexBytes, err := hex.DecodeString(hexStr)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"hex: %s\", err)\n\t}\n\n\treturn new(big.Int).SetBytes(hexBytes), nil\n}", "title": "" }, { "docid": "6072d12ccdf210cc4f3c8c9299bae782", "score": "0.72360593", "text": "func (_L2OutputOracle *L2OutputOracleSession) LatestBlockNumber() (*big.Int, error) {\n\treturn _L2OutputOracle.Contract.LatestBlockNumber(&_L2OutputOracle.CallOpts)\n}", "title": "" }, { "docid": "1fff8f6c05d1790b3f6771ca96ff3ca6", "score": "0.72352374", "text": "func (_L2OutputOracle *L2OutputOracleCaller) LatestBlockNumber(opts *bind.CallOpts) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _L2OutputOracle.contract.Call(opts, &out, \"latestBlockNumber\")\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "7041b770d0992e0e52447e2e929cc572", "score": "0.71208256", "text": "func (ws *WatchService) GetBlockNumber() uint64 {\n\tws.mu.RLock()\n\tdefer ws.mu.RUnlock()\n\n\treturn ws.blkNum\n}", "title": "" }, { "docid": "a6d01510c53ec3e698ff7a7162ed63b8", "score": "0.7097633", "text": "func (b *BlockchainBlock) Number() models.Number {\n\treturn b.blk.Number\n}", "title": "" }, { "docid": "1fe737416fa8940f69fb4c902d0f843a", "score": "0.6992104", "text": "func GetBlockNum() string {\n\tblock, err := Client.BlockByNumber(context.Background(), nil)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn block.Number().String()\n\n}", "title": "" }, { "docid": "ad027e411e7cad531622ca663942862c", "score": "0.69676626", "text": "func GetBlockNumber(phi PurseHandlerInterface) (blockNum int64, err error) {\n\treturn phi.getBlockNumber()\n}", "title": "" }, { "docid": "1ae7e20380f2b8820a5ca0b9aef9017d", "score": "0.6964669", "text": "func (t *TransactionCount) GetBlockNumber() int64 {\n\treturn t.BlockNumber\n}", "title": "" }, { "docid": "e6da9c62106f91717d9fd3622ea33cde", "score": "0.690743", "text": "func (khc *KeyHeaderChain) GetBlockNumber(hash common.Hash) *uint64 {\n\tif cached, ok := khc.numberCache.Get(hash); ok {\n\t\tnumber := cached.(uint64)\n\t\treturn &number\n\t}\n\tnumber := rawdb.ReadKeyHeaderNumber(khc.chainDb, hash)\n\tif number != nil {\n\t\tkhc.numberCache.Add(hash, *number)\n\t}\n\treturn number\n}", "title": "" }, { "docid": "20d897db597592ae214fe842d2c06a56", "score": "0.68853176", "text": "func (e *ContractHistoryEventCompleteStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "e105bb139f0e80f6e63efea7f331a974", "score": "0.6878285", "text": "func (_Comtroller *ComtrollerCaller) GetBlockNumber(opts *bind.CallOpts) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _Comtroller.contract.Call(opts, &out, \"getBlockNumber\")\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "7aee2a52d011971050994eaab86ef239", "score": "0.6860886", "text": "func (sdk *BcosSDK) GetBlockNumber() *RPCResult {\n\truntime.GC()\n\tdebug.FreeOSMemory()\n\n\tvar raw string\n\tif err := sdk.backend.CallContext(context.TODO(), &raw, \"getBlockNumber\", sdk.backend.groupID); err != nil {\n\t\treturn toRPCResult(\"\", err)\n\t}\n\tblockNumber, err := strconv.ParseInt(raw, 0, 64)\n\tif err != nil {\n\n\t\treturn toRPCResult(\"\", fmt.Errorf(\"parse block number failed, err: %v\", err))\n\t}\n\treturn toRPCResult(strconv.FormatInt(blockNumber, 10), err)\n}", "title": "" }, { "docid": "c467b2ec5e247de33e674ed4a639ce3f", "score": "0.68536025", "text": "func GetLatestBlockNumber(c *rpc.Client) (*big.Int, error) {\n\tvar number string\n\n\terr := c.Call(&number, \"eth_blockNumber\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn hexutil.DecodeBig(number)\n\n}", "title": "" }, { "docid": "3b34ca7316aaf2e3dadf9604dd49ca98", "score": "0.6833449", "text": "func (e *ContractNewChannelStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "f70c6c33ddf66de2a31adc0c1549c3f7", "score": "0.68162775", "text": "func GetBlockNumber(db DatabaseReader, hash math.Hash) uint64 {\n\tdata, _ := db.Get(append(blockHashPrefix, hash.Bytes()...))\n\tif len(data) != 8 {\n\t\treturn MissingNumber\n\t}\n\treturn binary.BigEndian.Uint64(data)\n}", "title": "" }, { "docid": "7b5da099c0e5ae0b5e287748641f40a4", "score": "0.6759478", "text": "func (e *ContractTokenAddedStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "10c59f230718e634f7b1984473df67ef", "score": "0.67098296", "text": "func (ec *Client) BlockNumberAt(ctx context.Context) (*big.Int, error) {\n\tvar result hexutil.Big\n\terr := ec.c.CallContext(ctx, &result, \"platon_blockNumber\")\n\treturn (*big.Int)(&result), err\n}", "title": "" }, { "docid": "0287fa8a6a92c775cdc351268753a0f0", "score": "0.6595752", "text": "func (e *ContractUnlockStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "9cea4108a65d038e7e55a88c7bc71209", "score": "0.6595269", "text": "func (bcn *BlockChainNode) GetBlockNum() uint32 {\n\tbcn.generalMutex.RLock()\n\tdefer bcn.generalMutex.RUnlock()\n\n\treturn bcn.Block.BlockCount\n}", "title": "" }, { "docid": "5639a3d63ad7cfe25eac031f29f24bd4", "score": "0.65821147", "text": "func (e *ContractClosedStateChange) GetBlockNumber() int64 {\n\treturn e.ClosedBlock\n}", "title": "" }, { "docid": "a9ad6509a89889d18e8f44361a782da1", "score": "0.6560306", "text": "func (ws *WatchService) updateBlockNumber() {\n\thead, err := ws.client.HeaderByNumber(context.Background(), nil)\n\tif err != nil {\n\t\tlog.Traceln(\"cannot fetch on-chain block number:\", err)\n\t\treturn\n\t}\n\n\tblkNum := head.Number.Uint64()\n\tvar topBlkNum uint64\n\tws.mu.Lock()\n\tif blkNum > ws.blkNum {\n\t\tws.blkNum = blkNum\n\t}\n\ttopBlkNum = ws.blkNum\n\tws.mu.Unlock()\n\tlog.Tracef(\"top block #: %d, on-chain #: %d\", topBlkNum, blkNum)\n}", "title": "" }, { "docid": "5cd588636e401765dd088baefd142ba7", "score": "0.6509774", "text": "func (_TellorGetters *TellorGettersCallerSession) GetMinedBlockNum(_requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\treturn _TellorGetters.Contract.GetMinedBlockNum(&_TellorGetters.CallOpts, _requestId, _timestamp)\n}", "title": "" }, { "docid": "e42aa1298edb409b9932fe834a476a11", "score": "0.6500484", "text": "func (e *ContractSecretRevealOnChainStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "895d95f70bf61734d3407729f5d80425", "score": "0.6480469", "text": "func (e *ContractPunishedStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "8659e474ed278fee695b85d161beadeb", "score": "0.64778775", "text": "func (e *ContractBalanceProofUpdatedStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "f43c4a088d45458d6f910d0e03308f1c", "score": "0.6471706", "text": "func (_TellorGetters *TellorGettersSession) GetMinedBlockNum(_requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\treturn _TellorGetters.Contract.GetMinedBlockNum(&_TellorGetters.CallOpts, _requestId, _timestamp)\n}", "title": "" }, { "docid": "110b86bb8fb4288d866faf512cd41e1e", "score": "0.64629024", "text": "func (rpc *UseRPC) UseBlockNumber() (int, error) {\n\tvar response string\n\tif err := rpc.call(\"eth_blockNumber\", &response); err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn ParseInt(response)\n}", "title": "" }, { "docid": "81d784ad2b36b2d7de68f404e659708c", "score": "0.6444659", "text": "func GetBlockNumber(\n\tethNodeUrl string, ctx context.Context) (int64, error) {\n\n\tcli, err := rpc.DialContext(ctx, ethNodeUrl)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tdefer cli.Close()\n\n\tvar result *hexutil.Big\n\terr = cli.CallContext(ctx, &result, \"eth_blockNumber\")\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn result.ToInt().Int64(), nil\n}", "title": "" }, { "docid": "a602cd7d7e51c567e1532c025cc1f13e", "score": "0.64428115", "text": "func (_L2OutputOracle *L2OutputOracleCallerSession) StartingBlockNumber() (*big.Int, error) {\n\treturn _L2OutputOracle.Contract.StartingBlockNumber(&_L2OutputOracle.CallOpts)\n}", "title": "" }, { "docid": "7e676c74c7d00e66ff3c478b852eda38", "score": "0.64424103", "text": "func (rpcCli *RpcClient) MC_getUncleCountByBlockNumber(number string) (int64, error) {\n\n\tpointer, err := rpcCli.netServeHandler(MC_getUncleCountByBlockNumber, []string{number})\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn pointer.ToInt()\n}", "title": "" }, { "docid": "c1aae41d2518fd9f05066af35f584e60", "score": "0.64237714", "text": "func (_TellorGetters *TellorGettersCaller) GetMinedBlockNum(opts *bind.CallOpts, _requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _TellorGetters.contract.Call(opts, &out, \"getMinedBlockNum\", _requestId, _timestamp)\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "b7549046ad9f45c6bcbfe2626568b107", "score": "0.640515", "text": "func (e *ContractSettledStateChange) GetBlockNumber() int64 {\n\treturn e.SettledBlock\n}", "title": "" }, { "docid": "410fc60dbb69a106e7fab719498d7989", "score": "0.6403593", "text": "func (_L2OutputOracle *L2OutputOracleCallerSession) NextBlockNumber() (*big.Int, error) {\n\treturn _L2OutputOracle.Contract.NextBlockNumber(&_L2OutputOracle.CallOpts)\n}", "title": "" }, { "docid": "d9430a6cc241e161c1974e53df9f5d3e", "score": "0.63965446", "text": "func (e *ContractCooperativeSettledStateChange) GetBlockNumber() int64 {\n\treturn e.SettledBlock\n}", "title": "" }, { "docid": "e22071b12ff3377c2f15d6235b576cc0", "score": "0.63862425", "text": "func (_L2OutputOracle *L2OutputOracleSession) StartingBlockNumber() (*big.Int, error) {\n\treturn _L2OutputOracle.Contract.StartingBlockNumber(&_L2OutputOracle.CallOpts)\n}", "title": "" }, { "docid": "0875f8a1f902637d3e9730bf3c01d659", "score": "0.6369814", "text": "func (_ITellor *ITellorCallerSession) GetMinedBlockNum(_requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\treturn _ITellor.Contract.GetMinedBlockNum(&_ITellor.CallOpts, _requestId, _timestamp)\n}", "title": "" }, { "docid": "44f348b6cabb800f06d7b0ac9f238cef", "score": "0.63677794", "text": "func (rpcCli *RpcClient) SCS_getBlockNumber(dappAddress string) (int64, error) {\n\n\tpointer, err := rpcCli.netServeHandler(SCS_getBlockNumber, []string{dappAddress})\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn pointer.ToInt()\n}", "title": "" }, { "docid": "886bb2e65066edb1415f2718d1d44cfb", "score": "0.6366974", "text": "func ReadBlockNumber(db *leveldb.DB, hash ibft.Hash) *uint64 {\n\tdata, _ := db.Get(blockNumberKey(hash), nil)\n\tif len(data) != 8 {\n\t\treturn nil\n\t}\n\tnumber := binary.BigEndian.Uint64(data)\n\treturn &number\n}", "title": "" }, { "docid": "4c37fd235b4a1f2d9498c119f9e611f2", "score": "0.6356692", "text": "func (block *Block) BlockNo() BlockNo {\n\treturn block.GetHeader().GetBlockNo()\n}", "title": "" }, { "docid": "688bf6fd6c28de50280b0f84f7d5c727", "score": "0.6353351", "text": "func (e *ContractBalanceStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "9dd9668d7db91f4b1f30f5cd7e66e316", "score": "0.6346193", "text": "func (_Extension *ExtensionCaller) GetMinedBlockNum(opts *bind.CallOpts, _requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _Extension.contract.Call(opts, &out, \"getMinedBlockNum\", _requestId, _timestamp)\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "871cc2566a9b3a61ca817df30e5d68d6", "score": "0.63415766", "text": "func (_Extension *ExtensionCallerSession) GetMinedBlockNum(_requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\treturn _Extension.Contract.GetMinedBlockNum(&_Extension.CallOpts, _requestId, _timestamp)\n}", "title": "" }, { "docid": "34d22e54ca80a6b3749fb31c925eff3d", "score": "0.6317934", "text": "func (_ITellor *ITellorSession) GetMinedBlockNum(_requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\treturn _ITellor.Contract.GetMinedBlockNum(&_ITellor.CallOpts, _requestId, _timestamp)\n}", "title": "" }, { "docid": "eb97cc7822950a15fd64a6e3f4dfeae7", "score": "0.6310639", "text": "func (_ITellor *ITellorCaller) GetMinedBlockNum(opts *bind.CallOpts, _requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _ITellor.contract.Call(opts, &out, \"getMinedBlockNum\", _requestId, _timestamp)\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "60aadb911bd2d266404b7a50520aca53", "score": "0.63105017", "text": "func (db *PublicRecord) GetBlockCount() int64 {\n\tcmd := `SELECT max(height) FROM blocks`\n\trow := db.conn.QueryRow(cmd)\n\n\tvar height uint64\n\terr := row.Scan(&height)\n\tif err != nil {\n\t\treturn 0\n\t}\n\treturn int64(height)\n}", "title": "" }, { "docid": "9d53ac041b8cfe9dc2b5f7f5b2d9dff9", "score": "0.62918174", "text": "func (btc *BlockTimeCounter) Block(t time.Time) int {\r\n\treturn int((t.Sub(btc.start) - 1) / btc.duration)\r\n}", "title": "" }, { "docid": "585abd62ddb200351cd322d4af78ea7f", "score": "0.6261169", "text": "func (rpcCli *RpcClient) MC_getBlockByNumber(number types.ComplexIntParameter, objFlag bool) (*requestData.Block, error) {\n\n\tpointer, err := rpcCli.netServeHandler(MC_getBlockByNumber, []interface{}{number.ToHex(), objFlag})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn pointer.ToBlock(objFlag)\n}", "title": "" }, { "docid": "e4765237888919835a230fdc47cba025", "score": "0.624963", "text": "func (_Extension *ExtensionSession) GetMinedBlockNum(_requestId *big.Int, _timestamp *big.Int) (*big.Int, error) {\n\treturn _Extension.Contract.GetMinedBlockNum(&_Extension.CallOpts, _requestId, _timestamp)\n}", "title": "" }, { "docid": "7b92ba7d9b3c04025fae4daffc50bcc0", "score": "0.6245225", "text": "func (rpcCli *RpcClient) MC_getBlockTransactionCountByNumber(number string) (int64, error) {\n\n\tpointer, err := rpcCli.netServeHandler(MC_getBlockTransactionCountByNumber, []string{number})\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn pointer.ToInt()\n}", "title": "" }, { "docid": "946df14a80eb78352620be29652fc990", "score": "0.6243029", "text": "func getBlockNumber(content []byte) int {\n blockNumber := (len(content) / BlockSize)\n if (len(content) % BlockSize != 0) {\n blockNumber += 1\n }\n return blockNumber\n}", "title": "" }, { "docid": "0db8864cb69dcacfb558ede9c1676f2d", "score": "0.6214218", "text": "func (_L2OutputOracle *L2OutputOracleCaller) StartingBlockNumber(opts *bind.CallOpts) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _L2OutputOracle.contract.Call(opts, &out, \"startingBlockNumber\")\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "72ab2325d05fa5a9efad643626e29c1d", "score": "0.61939406", "text": "func (_L2OutputOracle *L2OutputOracleCaller) NextBlockNumber(opts *bind.CallOpts) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _L2OutputOracle.contract.Call(opts, &out, \"nextBlockNumber\")\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "1bce96aae8b68563fc762e070d6e6685", "score": "0.61916006", "text": "func (e *Etherscan) GetBlockNumber() (*big.Int, error) {\n\turl := fmt.Sprintf(\"https://%s/api?module=proxy&action=eth_blockNumber&apikey=%s\",\n\t\te.Domain, e.APIKey,\n\t)\n\tres, err := req.Get(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar result blockNumberResponse\n\terr = res.ToJSON(&result)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(result.Result) == 0 {\n\t\treturn nil, errors.New(\"get block number failed\")\n\t}\n\n\tblockNumber, _ := new(big.Int).SetString(result.Result, 0)\n\n\treturn blockNumber, nil\n}", "title": "" }, { "docid": "1116526fedc9a5cbd01e2489a527d2d1", "score": "0.61858916", "text": "func (q *QueueProtocol) GetHighestBlockNum(param *types.ReqNil) (*types.ReplyBlockHeight, error) {\n\tmsg, err := q.send(blockchainKey, types.EventHighestBlock, param)\n\tif err != nil {\n\t\tlog.Error(\"ClosePeer\", \"Error\", err.Error())\n\t\treturn nil, err\n\t}\n\n\tif reply, ok := msg.GetData().(*types.ReplyBlockHeight); ok {\n\t\treturn reply, nil\n\t}\n\treturn nil, types.ErrInvalidParam\n}", "title": "" }, { "docid": "a6ad90cd7b0705fe479fd817908c4dc0", "score": "0.6176284", "text": "func (_L2OutputOracle *L2OutputOracleSession) NextBlockNumber() (*big.Int, error) {\n\treturn _L2OutputOracle.Contract.NextBlockNumber(&_L2OutputOracle.CallOpts)\n}", "title": "" }, { "docid": "664d5ff793d73eb423c4bd93f8c23f16", "score": "0.6159577", "text": "func (e *ContractChannelWithdrawStateChange) GetBlockNumber() int64 {\n\treturn e.BlockNumber\n}", "title": "" }, { "docid": "6bb64f2d1d995979c0ebedef876d2133", "score": "0.6109955", "text": "func EthBlockNumber(ctx context.Context, trans Trans) (*big.Int, error) {\n\tvar out *HexInt\n\terr := trans.Call(ctx, &out, \"eth_blockNumber\")\n\treturn (*big.Int)(out), errors.Wrap(err, `error in \"eth_blockNumber\"`)\n}", "title": "" }, { "docid": "8c5dd492d4c53c0db8c79955132a29f4", "score": "0.61029935", "text": "func (db *Database) LastBlockHeight() (int64, error) {\n\tvar height int64\n\terr := db.QueryRow(\"SELECT coalesce(MAX(height),0) AS height FROM block;\").Scan(&height)\n\treturn height, err\n}", "title": "" }, { "docid": "2e84087f4bc1a57e3b775f55fb06998f", "score": "0.6093937", "text": "func (lbu *LatestBlockUpdate) SetBlockNumber(u uint32) *LatestBlockUpdate {\n\tlbu.mutation.ResetBlockNumber()\n\tlbu.mutation.SetBlockNumber(u)\n\treturn lbu\n}", "title": "" }, { "docid": "e6f37151c16ffef38edc52b3179710fc", "score": "0.60809094", "text": "func (lbuo *LatestBlockUpdateOne) SetBlockNumber(u uint32) *LatestBlockUpdateOne {\n\tlbuo.mutation.ResetBlockNumber()\n\tlbuo.mutation.SetBlockNumber(u)\n\treturn lbuo\n}", "title": "" }, { "docid": "66b638304d2073eae1c01a6f6c6516e3", "score": "0.6034128", "text": "func (lbu *LatestBlockUpdate) AddBlockNumber(u uint32) *LatestBlockUpdate {\n\tlbu.mutation.AddBlockNumber(u)\n\treturn lbu\n}", "title": "" }, { "docid": "468db8571608e2b556ed3cb84489f219", "score": "0.59936243", "text": "func (lbuo *LatestBlockUpdateOne) AddBlockNumber(u uint32) *LatestBlockUpdateOne {\n\tlbuo.mutation.AddBlockNumber(u)\n\treturn lbuo\n}", "title": "" }, { "docid": "b4419a87e5039832e41b2fe0e151c9a5", "score": "0.5926033", "text": "func (db *MongoDbBridge) LastKnownBlock() (uint64, error) {\n\t// prep search options\n\topt := options.FindOne()\n\topt.SetSort(bson.D{{fiTransactionBlock, -1}})\n\topt.SetProjection(bson.D{{fiTransactionBlock, true}})\n\n\t// get the collection for account transactions\n\tcol := db.client.Database(db.dbName).Collection(coTransactions)\n\tres := col.FindOne(context.Background(), bson.D{}, opt)\n\tif res.Err() != nil {\n\t\t// may be no block at all\n\t\tif res.Err() == mongo.ErrNoDocuments {\n\t\t\tdb.log.Info(\"no blocks found in database\")\n\t\t\treturn 0, nil\n\t\t}\n\n\t\t// log issue\n\t\tdb.log.Error(\"can not get the top block\")\n\t\treturn 0, res.Err()\n\t}\n\n\t// get the actual value\n\tvar tx struct {\n\t\tBlock uint64 `bson:\"blk\"`\n\t}\n\n\t// get the data\n\terr := res.Decode(&tx)\n\tif err != nil {\n\t\tdb.log.Error(\"can not decode the top block\")\n\t\treturn 0, res.Err()\n\t}\n\n\treturn tx.Block, nil\n}", "title": "" }, { "docid": "fbda9ea707d3e15109dc2081b4d6570c", "score": "0.58354485", "text": "func (d *Dao) BlockTime(c context.Context, mid int64) (blockTime int64, err error) {\n\tinfo, err := d.memberRPC.BlockInfo(c, &blockmodel.RPCArgInfo{MID: mid})\n\tif err != nil {\n\t\terr = errors.Wrapf(err, \"%v\", mid)\n\t\treturn\n\t}\n\tif info.EndTime > 0 {\n\t\tblockTime = info.EndTime\n\t}\n\treturn\n}", "title": "" }, { "docid": "fbcb51cbe7e32f393d7bfe98612bcc3f", "score": "0.5809627", "text": "func get_last_block(client *rpcclient.Client) string {\n\tblockChainInfo, err := client.GetBlockChainInfo()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn blockChainInfo.BestBlockHash\n}", "title": "" }, { "docid": "0ba5d3d78c066e80e89c2a8fa9fb3720", "score": "0.57929116", "text": "func (campain *Campain) MineBlock() {\n\n\tcmd := &CmdGetLatestBlockNumber{}\n\tcmd.Init()\n\ttask := NewClientTask(campain.chainName, cmd)\n\tgoworker.AddTask(task)\n}", "title": "" }, { "docid": "539c9a6282e908e70ebf833145579942", "score": "0.57893", "text": "func BlockLevel(header externalapi.BlockHeader, maxBlockLevel int) int {\n\t// Genesis is defined to be the root of all blocks at all levels, so we define it to be the maximal\n\t// block level.\n\tif len(header.DirectParents()) == 0 {\n\t\treturn maxBlockLevel\n\t}\n\n\tproofOfWorkValue := NewState(header.ToMutable()).CalculateProofOfWorkValue()\n\tlevel := maxBlockLevel - proofOfWorkValue.BitLen()\n\t// If the block has a level lower than genesis make it zero.\n\tif level < 0 {\n\t\tlevel = 0\n\t}\n\treturn level\n}", "title": "" }, { "docid": "31014013c6c4614c6e7005c9829d5d73", "score": "0.57873136", "text": "func (s *store) LastCommittedBlockHeight() (uint64, error) {\n\tif s.isEmpty() {\n\t\treturn 0, nil\n\t}\n\treturn s.getLastCommittedBlock() + 1, nil\n}", "title": "" }, { "docid": "4ffec40b7a0b284ec37709bda2419df0", "score": "0.5771567", "text": "func (ds *Datastore) LatestBlock() (b Block, err error) {\n\t// run query\n\tbs := []Block{}\n\t_, err = datastore.NewQuery(\"Block\").\n\t\tLimit(1).\n\t\tOrder(\"-Height\").\n\t\tGetAll(ds.ctx, &bs)\n\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif len(bs) == 0 {\n\t\treturn b, ErrNoBlocks\n\t}\n\n\tb = bs[0]\n\treturn\n}", "title": "" }, { "docid": "d51fbbb6218ee538196303eb3b40e65f", "score": "0.57654214", "text": "func (rpcCli *RpcClient) MC_getBlockTransactionCountByHash(blockHash string) (int64, error) {\n\n\tpointer, err := rpcCli.netServeHandler(MC_getBlockTransactionCountByHash, []string{blockHash})\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn pointer.ToInt()\n}", "title": "" }, { "docid": "d3ea4852e5f25fc32e090db8c22f2068", "score": "0.5759934", "text": "func (app *BaseApp) LastBlockHeight() int64 {\n\treturn app.cms.LastCommitID().Version\n}", "title": "" }, { "docid": "d3ea4852e5f25fc32e090db8c22f2068", "score": "0.5759934", "text": "func (app *BaseApp) LastBlockHeight() int64 {\n\treturn app.cms.LastCommitID().Version\n}", "title": "" }, { "docid": "79d0fda46076cef5aae93df21a7a7b48", "score": "0.57489973", "text": "func (_RootChain *RootChainSession) CurrentChildBlock() (*big.Int, error) {\n\treturn _RootChain.Contract.CurrentChildBlock(&_RootChain.CallOpts)\n}", "title": "" }, { "docid": "d44583a348fe1b88eb906578b6319863", "score": "0.5737258", "text": "func (a BlockNumberArgs) NumberOrLatest() rpc.BlockNumberOrHash {\n\treturn a.NumberOr(rpc.BlockNumberOrHashWithNumber(rpc.LatestBlockNumber))\n}", "title": "" }, { "docid": "f927d2732f1435d47cae48c3793fef47", "score": "0.5730027", "text": "func (_Vtoken *VtokenCallerSession) AccrualBlockNumber() (*big.Int, error) {\n\treturn _Vtoken.Contract.AccrualBlockNumber(&_Vtoken.CallOpts)\n}", "title": "" }, { "docid": "4510423d00e1e992b3517735eaa1241c", "score": "0.57287014", "text": "func (_MergedMinerValidator *MergedMinerValidatorCallerSession) LastBlockSet() (*big.Int, error) {\n\treturn _MergedMinerValidator.Contract.LastBlockSet(&_MergedMinerValidator.CallOpts)\n}", "title": "" }, { "docid": "461f06c10f5d8fa7868d4577b0cb1004", "score": "0.5716218", "text": "func LatestBlock(b *blockchain, rw http.ResponseWriter) {\n\tvar blocks []*Block\n\t// for _, v := range Blocks(b) {\n\t// \th := fmt.Sprintf(\"%s\", v.Hash[0:7]) + \"...\"\n\t// \tv.Hash = h\n\t// \tif len(v.PrevHash) > 7 {\n\t// \t\tph := fmt.Sprintf(\"%s\", v.PrevHash[0:7]) + \"...\"\n\t// \t\tv.PrevHash = ph\n\t// \t}\n\t// \tblocks = append(blocks, v)\n\t// }\n\tblocks = Blocks(b)\n\tif len(blocks) > 6 {\n\t\tblocks = blocks[0:6]\n\t}\n\tutils.HandleErr(json.NewEncoder(rw).Encode(blocks))\n}", "title": "" }, { "docid": "442cc7737761cae910c7f66d518175f8", "score": "0.57160896", "text": "func (_RootChain *RootChainCallerSession) CurrentChildBlock() (*big.Int, error) {\n\treturn _RootChain.Contract.CurrentChildBlock(&_RootChain.CallOpts)\n}", "title": "" }, { "docid": "0c34bced2f4f48be642ee95868b53a46", "score": "0.57089674", "text": "func GetNetworkBlock(cfg *config.Config, c client.Client) string {\n\tvar networkHeight string\n\tq := client.NewQuery(\"SELECT last(block_height) FROM heimdall_network_latest_block\", cfg.InfluxDB.Database, \"\")\n\tif response, err := c.Query(q); err == nil && response.Error() == nil {\n\t\tfor _, r := range response.Results {\n\t\t\tif len(r.Series) != 0 {\n\t\t\t\tfor idx, col := range r.Series[0].Columns {\n\t\t\t\t\tif col == \"last\" {\n\t\t\t\t\t\theightValue := r.Series[0].Values[0][idx]\n\t\t\t\t\t\tnetworkHeight = fmt.Sprintf(\"%v\", heightValue)\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn networkHeight\n}", "title": "" }, { "docid": "e28a0745b6138f0874f0a267e8a5d062", "score": "0.5698672", "text": "func (_MergedMinerValidator *MergedMinerValidatorSession) LastBlockSet() (*big.Int, error) {\n\treturn _MergedMinerValidator.Contract.LastBlockSet(&_MergedMinerValidator.CallOpts)\n}", "title": "" }, { "docid": "a4e34a03a3dc9c7808699340e88a66e0", "score": "0.5695377", "text": "func (pb *Block) MaxTime() int64 { return pb.meta.MaxTime }", "title": "" }, { "docid": "26bda7f6dc935060ce3b4665026e57e1", "score": "0.5692111", "text": "func (lc *LocalChain) GetBlockCount() (int64, error) {\n\tvar count int64\n\tfor _, header := range lc.headers {\n\t\tif header.Height > count {\n\t\t\tcount = header.Height\n\t\t}\n\t}\n\treturn count, nil\n}", "title": "" }, { "docid": "670e474b39be15678fcee68ba29eff98", "score": "0.56828815", "text": "func (self CellRef) Block() int {\n\treturn blockForCell(self.Row, self.Col)\n}", "title": "" }, { "docid": "1d83d506c315346f4c61c1f250928d5f", "score": "0.56736135", "text": "func (_ArbSys *ArbSysSession) BlockUpperBound() (*big.Int, error) {\n\treturn _ArbSys.Contract.BlockUpperBound(&_ArbSys.CallOpts)\n}", "title": "" }, { "docid": "7500874e74c2ae74da189ba5a856a783", "score": "0.5669078", "text": "func (self *block) BlockId() BlockId {\n\treturn self.blockId\n}", "title": "" }, { "docid": "49cc96808d03290161e1bc7029ed0e5e", "score": "0.5668155", "text": "func (b *Blockchain) LastBlock() (*primitives.Block, error) {\n\thash, err := b.chain.GetBlock(b.chain.Height())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tblock, err := b.db.GetBlockForHash(*hash)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn block, nil\n}", "title": "" }, { "docid": "509637086d92880f35108de48ea5895e", "score": "0.5668018", "text": "func (b *BloQue) NumBlocks() int {\n\treturn b.blocks.Len()\n}", "title": "" }, { "docid": "e477d265eb2ceca95f92d8e4503558e5", "score": "0.5652803", "text": "func (tx *Transaction) PrevBlock() *big.Int {\n\treturn tx.data.PrevBlock\n}", "title": "" }, { "docid": "5cb8b62d1cb402615dbfd66f4dba7fc6", "score": "0.5621065", "text": "func (self CellSlice) Block() int {\n\tif len(self) == 0 {\n\t\treturn 0\n\t}\n\treturn self[0].Block()\n}", "title": "" } ]
46c0083764a6a954da3c4151fbab2101
SetExtensionAttribute10 sets the extensionAttribute10 property value. Tenth customizable extension attribute.
[ { "docid": "9200b0cd2c50ab168f75bb8c919d982b", "score": "0.875377", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute10(value *string)() {\n m.extensionAttribute10 = value\n}", "title": "" } ]
[ { "docid": "02382837d13ebc025a8b15e4eda89e6c", "score": "0.72704536", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute9(value *string)() {\n m.extensionAttribute9 = value\n}", "title": "" }, { "docid": "669e884663a0ba5219b4db366342b6a8", "score": "0.71606594", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute11(value *string)() {\n m.extensionAttribute11 = value\n}", "title": "" }, { "docid": "c8278553edfe3c2cc6073670a0ed5eb9", "score": "0.68469065", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute15(value *string)() {\n m.extensionAttribute15 = value\n}", "title": "" }, { "docid": "f77d54de42fbd2d9a7e92e0b40da9a06", "score": "0.64344573", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute12(value *string)() {\n m.extensionAttribute12 = value\n}", "title": "" }, { "docid": "dba92d56a3dd4f807dc9de935e44a70c", "score": "0.6431559", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute10()(*string) {\n return m.extensionAttribute10\n}", "title": "" }, { "docid": "3ec8656e78b5309c185bf2cffefe5b66", "score": "0.6284866", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute8(value *string)() {\n m.extensionAttribute8 = value\n}", "title": "" }, { "docid": "8f1e7397feec7ede37ee5c8285a7f160", "score": "0.62746793", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute14(value *string)() {\n m.extensionAttribute14 = value\n}", "title": "" }, { "docid": "d033c521c20b986ec61fcb614e10ccb3", "score": "0.5991872", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute13(value *string)() {\n m.extensionAttribute13 = value\n}", "title": "" }, { "docid": "f2ed9f95a0498750b989f0be54de14e1", "score": "0.5849955", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute7(value *string)() {\n m.extensionAttribute7 = value\n}", "title": "" }, { "docid": "dad7aa676a038fac3c3bedea73bdbe45", "score": "0.5833132", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute6(value *string)() {\n m.extensionAttribute6 = value\n}", "title": "" }, { "docid": "e5b82988e63afdf3a7ddab4e05e466f3", "score": "0.57690215", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute5(value *string)() {\n m.extensionAttribute5 = value\n}", "title": "" }, { "docid": "643582e9ef714f885a860ed486f5ae93", "score": "0.54991674", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute1(value *string)() {\n m.extensionAttribute1 = value\n}", "title": "" }, { "docid": "a542ef36c2a250d5be5b992749f99462", "score": "0.5413822", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute3(value *string)() {\n m.extensionAttribute3 = value\n}", "title": "" }, { "docid": "aacf9e07ea9f718d1c13f78183f132a8", "score": "0.5400845", "text": "func (ec *EventContextV03) SetExtension(name string, value interface{}) error {\n\tif ec.Extensions == nil {\n\t\tec.Extensions = make(map[string]interface{})\n\t}\n\n\tif _, ok := specV03Attributes[strings.ToLower(name)]; ok {\n\t\treturn fmt.Errorf(\"bad key %q: CloudEvents spec attribute MUST NOT be overwritten by extension\", name)\n\t}\n\n\tif value == nil {\n\t\tdelete(ec.Extensions, name)\n\t\tif len(ec.Extensions) == 0 {\n\t\t\tec.Extensions = nil\n\t\t}\n\t\treturn nil\n\t} else {\n\t\tv, err := types.Validate(value)\n\t\tif err == nil {\n\t\t\tec.Extensions[name] = v\n\t\t}\n\t\treturn err\n\t}\n}", "title": "" }, { "docid": "ed28d49205f98ffbfcdae07474a7ad43", "score": "0.53614867", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute4(value *string)() {\n m.extensionAttribute4 = value\n}", "title": "" }, { "docid": "76ea94601e077d0a65f37d70b700c8f2", "score": "0.52224714", "text": "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute2(value *string)() {\n m.extensionAttribute2 = value\n}", "title": "" }, { "docid": "275ed29da3956149d34cde5c966097b4", "score": "0.5146633", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute9()(*string) {\n return m.extensionAttribute9\n}", "title": "" }, { "docid": "2889cbdd9c57a38cf010b47f5220afb9", "score": "0.49097067", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute11()(*string) {\n return m.extensionAttribute11\n}", "title": "" }, { "docid": "6df2b55235ba1b8e629e914ed9678bb8", "score": "0.48436967", "text": "func (mtr *Dppdpp1intspareMetrics) SetSpare_10(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_10\"))\n\treturn nil\n}", "title": "" }, { "docid": "b50eb0ee87bf4ce114c341fd936b923c", "score": "0.48240042", "text": "func (c *Clac) Pow10() error {\n\treturn c.applyFloat(1, func(vals []value.Value) (value.Value, error) {\n\t\treturn binary(value.Int(10), \"**\", vals[0])\n\t})\n}", "title": "" }, { "docid": "5a7aec23e91fc94967c6ca06934a866c", "score": "0.47989792", "text": "func (mtr *Dprdpr1intspareMetrics) SetSpare_10(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_10\"))\n\treturn nil\n}", "title": "" }, { "docid": "092b4375b3ba0aab47a8df96f43c0683", "score": "0.4739132", "text": "func (mtr *Dppdpp0intspareMetrics) SetSpare_10(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_10\"))\n\treturn nil\n}", "title": "" }, { "docid": "e03cb964a920cf138d28ff134c96429e", "score": "0.4716781", "text": "func (mtr *Dprdpr0intspareMetrics) SetSpare_10(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_10\"))\n\treturn nil\n}", "title": "" }, { "docid": "e098c2b73ecab35ae442a5fe29560db4", "score": "0.46557274", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute15()(*string) {\n return m.extensionAttribute15\n}", "title": "" }, { "docid": "6bb0d50da9a6784ee523b24bee577e6f", "score": "0.46462882", "text": "func Pow10(in *big.Float) *big.Float {\n\treturn Pow(in, 10)\n}", "title": "" }, { "docid": "ab7a8a7cebe4f24b5182c1fd91fb8957", "score": "0.4632557", "text": "func (m *User) SetOnPremisesExtensionAttributes(value OnPremisesExtensionAttributesable)() {\n m.onPremisesExtensionAttributes = value\n}", "title": "" }, { "docid": "19163d210702abfc2955ef7b6157b775", "score": "0.44789803", "text": "func (m *Application) SetExtensionProperties(value []ExtensionPropertyable)() {\n m.extensionProperties = value\n}", "title": "" }, { "docid": "2452d52a10830f484cb967b4b82bc84d", "score": "0.43429846", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute8()(*string) {\n return m.extensionAttribute8\n}", "title": "" }, { "docid": "0539ce927b5720cde7c83bcb9c05b079", "score": "0.42923352", "text": "func (m *Group) SetExtensions(value []Extensionable)() {\n m.extensions = value\n}", "title": "" }, { "docid": "7eec0ca9fe9bc8dcfa31ea558ebf7809", "score": "0.428436", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute12()(*string) {\n return m.extensionAttribute12\n}", "title": "" }, { "docid": "b7eb630a2298b13a8efbbb61992d0b62", "score": "0.42829484", "text": "func registerModelModel10Flags(depth int, cmdPrefix string, cmd *cobra.Command) error {\n\n\tif err := registerModel10Aggregation(depth, cmdPrefix, cmd); err != nil {\n\t\treturn err\n\t}\n\n\tif err := registerModel10Items(depth, cmdPrefix, cmd); err != nil {\n\t\treturn err\n\t}\n\n\tif err := registerModel10Pagination(depth, cmdPrefix, cmd); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "ed4ce55deabb79526a66e03fba935bbd", "score": "0.42412382", "text": "func D10() int {\n\treturn D(10) //nolint\n}", "title": "" }, { "docid": "ab0ebf2e24b34b378d78379f3379cb4e", "score": "0.42098728", "text": "func (m *AndroidWorkProfileGeneralDeviceConfiguration) SetWorkProfilePasswordMinNumericCharacters(value *int32)() {\n m.workProfilePasswordMinNumericCharacters = value\n}", "title": "" }, { "docid": "8a9dfef767df70f50dd6c4c4c3afccbe", "score": "0.42085978", "text": "func (m *OnPremisesExtensionAttributes) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"extensionAttribute1\", m.GetExtensionAttribute1())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute10\", m.GetExtensionAttribute10())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute11\", m.GetExtensionAttribute11())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute12\", m.GetExtensionAttribute12())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute13\", m.GetExtensionAttribute13())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute14\", m.GetExtensionAttribute14())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute15\", m.GetExtensionAttribute15())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute2\", m.GetExtensionAttribute2())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute3\", m.GetExtensionAttribute3())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute4\", m.GetExtensionAttribute4())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute5\", m.GetExtensionAttribute5())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute6\", m.GetExtensionAttribute6())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute7\", m.GetExtensionAttribute7())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute8\", m.GetExtensionAttribute8())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"extensionAttribute9\", m.GetExtensionAttribute9())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "title": "" }, { "docid": "e40e830b79cad2fdda8c2408d4355003", "score": "0.41694945", "text": "func BenchmarkUpdateMetricsAgg10(b *testing.B) {\n\tvar m SampleMetric\n\tmst, err := newMetricSetTypeOf(m)\n\tif err != nil {\n\t\tb.Error(err)\n\t}\n\tb.ResetTimer()\n\tfor i := 0; i < b.N; i++ {\n\t\tm.IntVal += 5\n\t\tm.FloatVal += float64(i) * 0.1\n\t\tif i%10 == 9 {\n\t\t\tmst.update(m)\n\t\t\tm = SampleMetric{}\n\t\t}\n\t}\n\tmst.update(m)\n}", "title": "" }, { "docid": "3608252cbdd07b03a263b0de3a74c435", "score": "0.4166904", "text": "func schemaChangeV9ToV10(config *Config, tx *buntdb.Tx) error {\n\tprefix := \"account.registered.time \"\n\tvar accounts, times []string\n\ttx.AscendGreaterOrEqual(\"\", prefix, func(key, value string) bool {\n\t\tif !strings.HasPrefix(key, prefix) {\n\t\t\treturn false\n\t\t}\n\t\taccount := strings.TrimPrefix(key, prefix)\n\t\taccounts = append(accounts, account)\n\t\ttimes = append(times, value)\n\t\treturn true\n\t})\n\tfor i, account := range accounts {\n\t\ttime, err := strconv.ParseInt(times[i], 10, 64)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"corrupt registration time entry for %s: %v\\n\", account, err)\n\t\t\tcontinue\n\t\t}\n\t\ttime = time * 1000000000\n\t\ttx.Set(prefix+account, strconv.FormatInt(time, 10), nil)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "53c41ec3b6df03f97f8331ca145f2c58", "score": "0.41478363", "text": "func M10(dateTime []time.Time, data []float64) (higherActivity float64, onsetHigherActivity time.Time, err error) {\n\thigherActivity, onsetHigherActivity, err = HigherActivity(10, dateTime, data)\n\treturn\n}", "title": "" }, { "docid": "35f0896898d8beeefce56454e863551a", "score": "0.4144387", "text": "func SetExtNic(nic string) {\n\tsetValue(\"environment\", \"extnic\", nic)\n}", "title": "" }, { "docid": "60861abf7de884ed5ff294ff1f6e3da1", "score": "0.4141451", "text": "func (mr *MockBigInterfaceMockRecorder) Foo10(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Foo10\", reflect.TypeOf((*MockBigInterface)(nil).Foo10), arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n}", "title": "" }, { "docid": "45c2b7d3683d2e98c868f1a3580fba20", "score": "0.41390318", "text": "func (rr *OPT) SetExtendedRcode(v uint16) {\n\trr.Hdr.Ttl = rr.Hdr.Ttl&0x00FFFFFF | uint32(v>>4)<<24\n}", "title": "" }, { "docid": "c0f882cd1706ce67a40e4948ecb03179", "score": "0.41368613", "text": "func (m *User) SetExtensions(value []Extensionable)() {\n m.extensions = value\n}", "title": "" }, { "docid": "dadc6d6450a4a227f72f35355e5c26c3", "score": "0.41334358", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute14()(*string) {\n return m.extensionAttribute14\n}", "title": "" }, { "docid": "1c42f667f2a309d03d84eaab3c08f67f", "score": "0.4121651", "text": "func (in *iNode) setAttributes(\n\tsize *uint64,\n\tmode *os.FileMode,\n\tmtime *time.Time) {\n\t// TODO\n}", "title": "" }, { "docid": "8de6e7987cb9ead9ee7da2142a69e28b", "score": "0.41181728", "text": "func (d *Device) Inc10Percent() error {\n\tcurrent, err := d.internal.Current()\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar want uint\n\tif d.max < 10 {\n\t\twant = current + 1\n\t} else {\n\t\twant = current + d.max/10\n\t}\n\tif want > d.max {\n\t\twant = d.max\n\t}\n\treturn d.internal.Set(want)\n}", "title": "" }, { "docid": "ca0b6b32e54ea4930a334d54b52fb322", "score": "0.41157073", "text": "func (u *__FileStorage_Updater) Extension(newVal string) *__FileStorage_Updater {\n\tu.updates[\"extension = ? \"] = newVal\n\treturn u\n}", "title": "" }, { "docid": "d1e0b03ad236f44161ece92647acf7f5", "score": "0.4101532", "text": "func (m *OnPremisesExtensionAttributes) SetAdditionalData(value map[string]interface{})() {\n m.additionalData = value\n}", "title": "" }, { "docid": "3f7f5304450d8cc56bf928c9d80e6acf", "score": "0.40818074", "text": "func (n *Node) SetTreeSize(ctx context.Context, ts uint64) (err error) {\n\treturn n.SetXattrString(ctx, prefixes.TreesizeAttr, strconv.FormatUint(ts, 10))\n}", "title": "" }, { "docid": "370b950da057b7f75b6598462c56a3e4", "score": "0.40771565", "text": "func Base10(v any) *Value10 {\n\tswitch t := v.(type) {\n\tcase nil:\n\t\treturn new(Value10)\n\tcase *Value10:\n\t\treturn t\n\tcase int:\n\t\tv := int64(t)\n\t\treturn (*Value10)(&v)\n\tcase int64:\n\t\treturn (*Value10)(&t)\n\tcase *int64:\n\t\treturn (*Value10)(t)\n\tdefault:\n\t\tpanic(\"invalid flag initializer\")\n\t}\n}", "title": "" }, { "docid": "a1dcc052e2394869a441eb35d4d7a3ff", "score": "0.40364185", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute13()(*string) {\n return m.extensionAttribute13\n}", "title": "" }, { "docid": "888d0a0c857dac73fa3ba300e121423a", "score": "0.40229067", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute5()(*string) {\n return m.extensionAttribute5\n}", "title": "" }, { "docid": "4c6581145134c7cff896a8ebb58bd6ac", "score": "0.3999557", "text": "func (m *AdministrativeUnit) SetExtensions(value []Extensionable)() {\n m.extensions = value\n}", "title": "" }, { "docid": "aa6d5c57bae7462ff0f1d3be5ad99b3b", "score": "0.39954773", "text": "func (m *AndroidWorkProfileGeneralDeviceConfiguration) SetWorkProfilePasswordMinSymbolCharacters(value *int32)() {\n m.workProfilePasswordMinSymbolCharacters = value\n}", "title": "" }, { "docid": "2550c4f4350f69169ded8544bf345089", "score": "0.39952862", "text": "func (m *ParameterMutator) Extension(name string, value interface{}) *ParameterMutator {\n\tif m.proxy.extensions == nil {\n\t\tm.proxy.extensions = Extensions{}\n\t}\n\tm.proxy.extensions[name] = value\n\treturn m\n}", "title": "" }, { "docid": "e54e1e968d1bbf2b76aa15871127232d", "score": "0.39923188", "text": "func (c *WeightedCache) SetVerbose(verbose bool) {\n\tc.verbose = verbose\n}", "title": "" }, { "docid": "1e994e9e64f1b4f9ded500fcb98e78b5", "score": "0.3991953", "text": "func (m *SecuritySchemeMutator) Extension(name string, value interface{}) *SecuritySchemeMutator {\n\tif m.proxy.extensions == nil {\n\t\tm.proxy.extensions = Extensions{}\n\t}\n\tm.proxy.extensions[name] = value\n\treturn m\n}", "title": "" }, { "docid": "a87c28f76b782054ac09aba995d25319", "score": "0.39858335", "text": "func (gb *GeneratorBuilder) ExcludeCheckDigit10(exclude bool) *GeneratorBuilder {\n\tgb.exclCheckDigit10 = exclude\n\treturn gb\n}", "title": "" }, { "docid": "450cc72699c472f0ede1e61deb9d07c9", "score": "0.3944835", "text": "func (m *MacOSMinimumOperatingSystem) SetV1010(value *bool)() {\n err := m.GetBackingStore().Set(\"v10_10\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "c31cae7b1e76c7071f76f437096772b6", "score": "0.39441288", "text": "func (k Keeper) SetExtTransactionCount(ctx sdk.Context, count uint64) {\n\tstore := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.ExtTransactionCountKey))\n\tbyteKey := types.KeyPrefix(types.ExtTransactionCountKey)\n\tbz := []byte(strconv.FormatUint(count, 10))\n\tstore.Set(byteKey, bz)\n}", "title": "" }, { "docid": "e1e729583ce8eff73ee323abfea7ad7e", "score": "0.39194226", "text": "func (n *Node) SetTreeSize(ts uint64) (err error) {\n\treturn xattr.Set(n.InternalPath(), xattrs.TreesizeAttr, []byte(strconv.FormatUint(ts, 10)))\n}", "title": "" }, { "docid": "391d154caa13c9a8866b3679dfccd65c", "score": "0.390633", "text": "func ExampleReplaceExtension() {\n\tconst f = \"foo.bar.txt\"\n\tfmt.Println(ReplaceExtension(f, \".bin\"))\n\t// Output: foo.bar.bin\n}", "title": "" }, { "docid": "97e6ac6fbae2051d105c353cd81905ef", "score": "0.38876438", "text": "func (mtr *Dprdpr1intspareMetrics) SetSpare_9(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_9\"))\n\treturn nil\n}", "title": "" }, { "docid": "af30b125c2602d30017a2de3cf0e14f0", "score": "0.38786173", "text": "func (mtr *Dppdpp1intspareMetrics) SetSpare_9(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_9\"))\n\treturn nil\n}", "title": "" }, { "docid": "4cee3d2b775f9a9d85a7f3151db07b7d", "score": "0.38672656", "text": "func (s *server) SetExtraTags(tags []string) {\n\ts.extraTags = tags\n}", "title": "" }, { "docid": "ae7fc735889b1efef33af47dc4b0fc8e", "score": "0.38648343", "text": "func Log10(x float64) float64 {\n\tif haveArchLog10 {\n\t\treturn archLog10(x)\n\t}\n\treturn log10(x)\n}", "title": "" }, { "docid": "bea00a4de431d74ae8aa463f704e6bd8", "score": "0.3859516", "text": "func (c *Compiler) RegisterExtension(name string, meta *Schema, ext ExtCompiler) {\n\tc.extensions[name] = extension{meta, ext}\n}", "title": "" }, { "docid": "19d22afcfaef2f39f0752e05a6b5dacd", "score": "0.38592404", "text": "func (m *AndroidWorkProfileGeneralDeviceConfiguration) SetWorkProfilePasswordMinLetterCharacters(value *int32)() {\n m.workProfilePasswordMinLetterCharacters = value\n}", "title": "" }, { "docid": "6e0c8627626c241aae08a71aee7e612e", "score": "0.3856423", "text": "func (f genHelperEncoder) EncExtension(v interface{}, xfFn *extTypeTagFn) {\n\tf.e.e.EncodeExt(v, xfFn.rt, xfFn.tag, xfFn.ext)\n}", "title": "" }, { "docid": "647a9aac6fa45e4aa506df674d448f11", "score": "0.38515812", "text": "func (mtr *Dprdpr0intspareMetrics) SetSpare_9(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_9\"))\n\treturn nil\n}", "title": "" }, { "docid": "f3545427207771b9ffc1505c49d4da3a", "score": "0.38483867", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute7()(*string) {\n return m.extensionAttribute7\n}", "title": "" }, { "docid": "60805390da4c3ca8924f64ec3cbd6544", "score": "0.38477036", "text": "func (m *OnPremisesExtensionAttributes) GetExtensionAttribute6()(*string) {\n return m.extensionAttribute6\n}", "title": "" }, { "docid": "c25906d56ce1da262816610e6c1bc7e0", "score": "0.38456613", "text": "func SetChainlinkAPIPageSize(nodes []client.Chainlink, pageSize int) {\n\tfor _, n := range nodes {\n\t\tn.SetPageSize(pageSize)\n\t}\n}", "title": "" }, { "docid": "7d5d866d22175970cc5ab86eea3d8efc", "score": "0.38321015", "text": "func (m *ItemsMutator) Extension(name string, value interface{}) *ItemsMutator {\n\tif m.proxy.extensions == nil {\n\t\tm.proxy.extensions = Extensions{}\n\t}\n\tm.proxy.extensions[name] = value\n\treturn m\n}", "title": "" }, { "docid": "a72b9c1e1d7fdeab1a8d776afdcbef13", "score": "0.3821001", "text": "func (mtr *Dppdpp0intspareMetrics) SetSpare_9(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_9\"))\n\treturn nil\n}", "title": "" }, { "docid": "942409c6c34d08f0ac1e65c052c8ea01", "score": "0.38171187", "text": "func (client *Client) ModifyIntranetAttributeWithOptions(request *ModifyIntranetAttributeRequest, runtime *util.RuntimeOptions) (_result *ModifyIntranetAttributeResponse, _err error) {\n\t_err = util.ValidateModel(request)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\tquery := map[string]interface{}{}\n\tif !tea.BoolValue(util.IsUnset(request.BandWidth)) {\n\t\tquery[\"BandWidth\"] = request.BandWidth\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.InstanceId)) {\n\t\tquery[\"InstanceId\"] = request.InstanceId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.NodeId)) {\n\t\tquery[\"NodeId\"] = request.NodeId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.OwnerAccount)) {\n\t\tquery[\"OwnerAccount\"] = request.OwnerAccount\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.OwnerId)) {\n\t\tquery[\"OwnerId\"] = request.OwnerId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.ResourceOwnerAccount)) {\n\t\tquery[\"ResourceOwnerAccount\"] = request.ResourceOwnerAccount\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.ResourceOwnerId)) {\n\t\tquery[\"ResourceOwnerId\"] = request.ResourceOwnerId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.SecurityToken)) {\n\t\tquery[\"SecurityToken\"] = request.SecurityToken\n\t}\n\n\treq := &openapi.OpenApiRequest{\n\t\tQuery: openapiutil.Query(query),\n\t}\n\tparams := &openapi.Params{\n\t\tAction: tea.String(\"ModifyIntranetAttribute\"),\n\t\tVersion: tea.String(\"2015-01-01\"),\n\t\tProtocol: tea.String(\"HTTPS\"),\n\t\tPathname: tea.String(\"/\"),\n\t\tMethod: tea.String(\"POST\"),\n\t\tAuthType: tea.String(\"AK\"),\n\t\tStyle: tea.String(\"RPC\"),\n\t\tReqBodyType: tea.String(\"formData\"),\n\t\tBodyType: tea.String(\"json\"),\n\t}\n\t_result = &ModifyIntranetAttributeResponse{}\n\t_body, _err := client.CallApi(params, req, runtime)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\t_err = tea.Convert(_body, &_result)\n\treturn _result, _err\n}", "title": "" }, { "docid": "6aa2c330f366fd4b60cc6ab3a2d6e3c8", "score": "0.3809094", "text": "func SetCapabilityAttributeComplexForAllInstances(kv *api.KV, deploymentID, nodeName, capabilityName, attributeName string, attributeValue interface{}) error {\n\tids, err := GetNodeInstancesIds(kv, deploymentID, nodeName)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, errGrp, store := consulutil.WithContext(context.Background())\n\tfor _, instanceName := range ids {\n\t\tattrPath := path.Join(consulutil.DeploymentKVPrefix, deploymentID, \"topology/instances\", nodeName, instanceName, \"capabilities\", capabilityName, \"attributes\", attributeName)\n\t\tstoreComplexType(store, attrPath, attributeValue)\n\t}\n\treturn errGrp.Wait()\n}", "title": "" }, { "docid": "f41f9c5089032ef9ba6e68b1386303f1", "score": "0.3806878", "text": "func (n *Node) Setxattr(ctx context.Context, attr string, data []byte, flags uint32) syscall.Errno {\n\trn := n.rootNode()\n\tflags = uint32(filterXattrSetFlags(int(flags)))\n\n\t// ACLs are passed through without encryption\n\tif isAcl(attr) {\n\t\t// result of setting an acl depends on the user doing it\n\t\tvar context *fuse.Context\n\t\tif rn.args.PreserveOwner {\n\t\t\tcontext = toFuseCtx(ctx)\n\t\t}\n\t\treturn n.setXAttr(context, attr, data, flags)\n\t}\n\n\tcAttr, err := rn.encryptXattrName(attr)\n\tif err != nil {\n\t\treturn syscall.EINVAL\n\t}\n\tcData := rn.encryptXattrValue(data)\n\treturn n.setXAttr(nil, cAttr, cData, flags)\n}", "title": "" }, { "docid": "fbd36c32e5c18075a7d2e27005d89338", "score": "0.37937152", "text": "func (p *Processor) RegisterExtension(id string, extension ProcessorExtension) {\n\tp.extensions[id] = extension\n}", "title": "" }, { "docid": "18c87eda2397ffac5952032f4b7cd7e1", "score": "0.37811255", "text": "func attrModifier(attribute int) int {\n\treturn (attribute - 10) / 2\n}", "title": "" }, { "docid": "5bc58529afee448eb6f953fa0533af17", "score": "0.37667012", "text": "func (e *Extension) UnmarshalJSON(data []byte) error {\n\tif e == nil {\n\t\treturn errors.New(\"openrtb.Extension: UnmarshalJSON on nil pointer\")\n\t}\n\t*e = append((*e)[0:0], data...)\n\treturn nil\n}", "title": "" }, { "docid": "08ea56ea8bde3a1d4e148b1e0701552e", "score": "0.3755329", "text": "func (o *VulnerabilitiesRequest) SetExtended(v bool) {\n\to.Extended = &v\n}", "title": "" }, { "docid": "52afbcbc2e9e0db7d683ffe8f82b7806", "score": "0.37441817", "text": "func (m *MockBigInterface) Foo10(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 bool) Bar {\n\tif m.FnFoo10 != nil {\n\t\treturn m.FnFoo10(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\t}\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Foo10\", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\tret0, _ := ret[0].(Bar)\n\treturn ret0\n}", "title": "" }, { "docid": "d4f8d744d4942a01d7635fe60803d89b", "score": "0.37372133", "text": "func (u *testUtil) SetVerbose(value bool) {\n\tu.verbose = value\n}", "title": "" }, { "docid": "06f0643b7ce7bc8a8f67f4cf7995ee99", "score": "0.37353432", "text": "func (m pProcessElementAttrs) Extend(fnct func(m.UserSet, *etree.Element, map[string]*models.FieldInfo) map[string]interface{}) pProcessElementAttrs {\n\treturn pProcessElementAttrs{\n\t\tMethod: m.Method.Extend(fnct),\n\t}\n}", "title": "" }, { "docid": "2629ae0a384d2323a09e1b73e6e79b34", "score": "0.37261394", "text": "func (o *TicketStatisticParams) SetExtensionCategory(extensionCategory *string) {\n\to.ExtensionCategory = extensionCategory\n}", "title": "" }, { "docid": "3e7c4073f6b4dce7eba130ffcdecc6be", "score": "0.3723625", "text": "func Log10(arg float64) float64 {\n\treturn math.Log10(arg)\n}", "title": "" }, { "docid": "e7bd33ed54e297b3b82c0b0d92eeda3c", "score": "0.37180272", "text": "func (c *Cuckoo) SetNumericKeySize(size int) {\n\tswitch size {\n\tcase 4:\n\t\tc.buf.b = c.buf.base[0:4]\n\tcase 8:\n\t\tc.buf.b = c.buf.base[0:8]\n\tdefault:\n\t\tpanic(\"SetNumericKeySize\")\n\t}\n\tc.NumericKeySize = size\n}", "title": "" }, { "docid": "53b7effa5f1b4e7e26d9e35d33d33a0f", "score": "0.3716827", "text": "func Log10(tk obj.Token, args []oop.VarDef) oop.Val {\n\tval := args[0].Val\n\tif val.Type != oop.Int && val.Type != oop.Float {\n\t\tval.Data = 0.0\n\t}\n\treturn oop.Val{Data: math.Log10(val.Data.(float64)), Type: oop.Float}\n}", "title": "" }, { "docid": "3213ff33aa92073ed70ea219a10514be", "score": "0.36901167", "text": "func (l *Loader) SetAttrSpecial(i Sym, v bool) {\n\tif v {\n\t\tl.attrSpecial[i] = struct{}{}\n\t} else {\n\t\tdelete(l.attrSpecial, i)\n\t}\n}", "title": "" }, { "docid": "7fd25ebd787589226b7581c2ace25fa0", "score": "0.3684439", "text": "func (o *object) SetAttr(i uint8, val bool) {\n\tmask := byte(1 << (7 - i%8))\n\tif val {\n\t\to.Attributes[i/8] |= mask\n\t} else {\n\t\to.Attributes[i/8] &^= mask\n\t}\n}", "title": "" }, { "docid": "87b826999bc132c7c45f3557e375881d", "score": "0.368108", "text": "func (lm LinksManager) AutoAssignExtension(url *url.URL, t resource.Type) bool {\n\treturn true\n}", "title": "" }, { "docid": "6542bbaaee6f3d2628a1d0860c0dd96e", "score": "0.36810112", "text": "func (adaType *AdaSuperType) SetFormatLength(x uint32) {\n}", "title": "" }, { "docid": "e07656ba573c2226677ccb7b45ab90b7", "score": "0.36771294", "text": "func Parse10(s string) (int64, error) { return parse(s, units10) }", "title": "" }, { "docid": "fbcf89ff4c54d412cdde6f3c019047c7", "score": "0.36613885", "text": "func (p *Problem) Extend(key string, value interface{}) error {\n\n\tif _, reserved := ReservedKeys[strings.ToLower(key)]; reserved {\n\t\treturn ErrExtensionKeyIsReserved\n\t}\n\n\t_, keyFound := p.Extension(key)\n\tif !keyFound {\n\t\tp.extensionKeys = append(p.extensionKeys, key)\n\t}\n\n\tif value != nil {\n\t\tp.extensions[key] = value\n\t} else {\n\n\t\tdelete(p.extensions, key)\n\n\t\tfor x := 0; x < len(p.extensionKeys); {\n\n\t\t\tif strings.EqualFold(key, p.extensionKeys[x]) {\n\t\t\t\tp.extensionKeys = append(p.extensionKeys[:x], p.extensionKeys[x+1:]...)\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tx++\n\n\t\t}\n\n\t}\n\n\treturn nil\n\n}", "title": "" }, { "docid": "8eade349c05839bcac75cdc6e05fe790", "score": "0.36552215", "text": "func (m *Messenger) EnableChatExtension(homeURL HomeURL) error {\n\twrap := map[string]interface{}{\n\t\t\"home_url\": homeURL,\n\t}\n\tdata, err := json.Marshal(wrap)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(\"POST\", MessengerProfileURL, bytes.NewBuffer(data))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\treq.URL.RawQuery = \"access_token=\" + m.token\n\n\tclient := &http.Client{}\n\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn checkFacebookError(resp.Body)\n}", "title": "" }, { "docid": "c3b2e87978a518a7674e4efba2756bbd", "score": "0.36536583", "text": "func addAttribute(buf *bytes.Buffer, attrType uint16, data interface{}, dataSize int) {\n\tattr := syscall.RtAttr{\n\t\tLen: syscall.SizeofRtAttr,\n\t\tType: attrType,\n\t}\n\tattr.Len += uint16(dataSize)\n\tbinary.Write(buf, Endian, attr)\n\tswitch data := data.(type) {\n\tcase string:\n\t\tbinary.Write(buf, Endian, []byte(data))\n\t\tbuf.WriteByte(0) // terminate\n\tdefault:\n\t\tbinary.Write(buf, Endian, data)\n\t}\n\tfor i := 0; i < padding(int(attr.Len), syscall.NLMSG_ALIGNTO); i++ {\n\t\tbuf.WriteByte(0)\n\t}\n}", "title": "" }, { "docid": "133f4992fe5bf9d46be313a132e25467", "score": "0.36512193", "text": "func (m QuotaQuotaThresholdsExtended) MarshalJSON() ([]byte, error) {\n\t_parts := make([][]byte, 0, 2)\n\n\taO0, err := swag.WriteJSON(m.QuotaQuotaThresholds)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t_parts = append(_parts, aO0)\n\n\tvar dataAO1 struct {\n\t\tAdvisoryExceeded bool `json:\"advisory_exceeded,omitempty\"`\n\n\t\tAdvisoryLastExceeded int64 `json:\"advisory_last_exceeded,omitempty\"`\n\n\t\tHardExceeded bool `json:\"hard_exceeded,omitempty\"`\n\n\t\tHardLastExceeded int64 `json:\"hard_last_exceeded,omitempty\"`\n\n\t\tSoftExceeded bool `json:\"soft_exceeded,omitempty\"`\n\n\t\tSoftLastExceeded int64 `json:\"soft_last_exceeded,omitempty\"`\n\t}\n\n\tdataAO1.AdvisoryExceeded = m.AdvisoryExceeded\n\n\tdataAO1.AdvisoryLastExceeded = m.AdvisoryLastExceeded\n\n\tdataAO1.HardExceeded = m.HardExceeded\n\n\tdataAO1.HardLastExceeded = m.HardLastExceeded\n\n\tdataAO1.SoftExceeded = m.SoftExceeded\n\n\tdataAO1.SoftLastExceeded = m.SoftLastExceeded\n\n\tjsonDataAO1, errAO1 := swag.WriteJSON(dataAO1)\n\tif errAO1 != nil {\n\t\treturn nil, errAO1\n\t}\n\t_parts = append(_parts, jsonDataAO1)\n\n\treturn swag.ConcatJSON(_parts...), nil\n}", "title": "" }, { "docid": "f0ceb6a04903a9e363eacb9a785d48ef", "score": "0.3650555", "text": "func (i *InLine) AppendExtensions(extensions []Extension) {\n\ti.Extensions = append(i.Extensions, extensions...)\n}", "title": "" }, { "docid": "bd2fd6052b02e3dbdf57965636d18cd1", "score": "0.36442596", "text": "func (r *Record) SetExtra(data M) *Record {\n\tr.Extra = data\n\treturn r\n}", "title": "" }, { "docid": "06a2b47bb8b2a4a85ad0bd5b5fd51849", "score": "0.36355424", "text": "func WithVerbose(verbose bool) RuntimeOption {\n\treturn func(options *RuntimeOptions) {\n\t\toptions.Verbose = verbose\n\t}\n}", "title": "" }, { "docid": "943dd732a9d2935252afcfccda83c5f2", "score": "0.36327577", "text": "func (k *Item) SetMatchLimit(matchLimit MatchLimit) {\n\tif matchLimit != MatchLimitDefault {\n\t\tk.attr[MatchLimitKey] = matchTypeRef[matchLimit]\n\t} else {\n\t\tdelete(k.attr, MatchLimitKey)\n\t}\n}", "title": "" } ]
006518a110d6986251dccdc16506213e
Removes the container with the given id. Returns true if the item was removed. Returns false if the item was not found.
[ { "docid": "96b9fdb87eea34b6884a7b5197987b3d", "score": "0.64689714", "text": "func (srv *ScheduableService) RemoveContainer(id uint64) bool {\n\terr := srv.repo.Remove(id)\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" } ]
[ { "docid": "0ba5980224db648d590253ce5b97bc1a", "score": "0.69458866", "text": "func (module *Cart) Remove(id string) bool {\n\n\tif _, exists := module.items[id]; exists {\n\n\t\tdelete(module.items, id)\n\t\tmodule.storage.Save(module.items)\n\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "c171ae2aed812733f4c6f18001e9ec7e", "score": "0.6877627", "text": "func Remove(id int) bool {\n\tindex := -1\n\n\tfor i, product := range store {\n\t\tif product.ID == id {\n\t\t\tindex = i\n\t\t}\n\t}\n\n\tif index != -1 {\n\t\tstore = append(store[:index], store[index+1:]...)\n\t}\n\n\t// Returns true if item was found & removed\n\treturn index != -1\n}", "title": "" }, { "docid": "0215f2e57c42c97b65fea3b8e38de2c7", "score": "0.68010545", "text": "func (o *Objects) Remove(id string) bool {\n\tidx, ok := o.Contains(id)\n\tif !ok {\n\t\treturn false\n\t}\n\n\to.mutex.Lock()\n\tdefer o.mutex.Unlock()\n\to.ids = append(o.ids[:idx], o.ids[idx+1:]...)\n\n\treturn true\n}", "title": "" }, { "docid": "8fc90175c5633b5e9d78961f9b097d55", "score": "0.6564211", "text": "func (items Items) Remove(id string) (Items, bool) {\n\tindex := items.find(id)\n\tif index == -1 {\n\t\treturn items, false\n\t}\n\n\treturn append(items[:index], items[index+1:]...), true\n}", "title": "" }, { "docid": "04d90bf386871018e17aa4b4761edf17", "score": "0.62456167", "text": "func (c *inMemoryContainerStore) Remove(id string) {\n\tc.Lock()\n\tdelete(c.s, id)\n\tfor pos, container := range c.c {\n\t\tif container.ID == id {\n\t\t\tc.c = append(c.c[0:pos], c.c[pos+1:]...)\n\t\t\tbreak\n\t\t}\n\t}\n\tc.Unlock()\n}", "title": "" }, { "docid": "b837821524db48fa6bd9177d23258d7b", "score": "0.6056843", "text": "func (store *BaseStore) Remove(id uint, m interface{}) (bool, error) {\n\terr := store.database.Where(\"id = ?\", id).Delete(m).Error\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "title": "" }, { "docid": "be0aee44884b4da51390247b2b03867f", "score": "0.6029238", "text": "func (q *QoS12) Remove(id uint16) bool {\n\tq.Lock()\n\tif qPub, ok := q.lookup[id]; ok {\n\t\tq.l.Remove(qPub)\n\t\tdelete(q.lookup, id)\n\t\tq.Unlock()\n\t\treturn true\n\t}\n\tq.Unlock()\n\treturn false\n}", "title": "" }, { "docid": "7beda1c0c950c735a93b808a22161411", "score": "0.5962145", "text": "func (cm *ContainerManager) RemoveByID(id string) error {\n\tcontainer, ok := cm.Containers[id]\n\tif !ok {\n\t\treturn fmt.Errorf(\"no container with id %s exists\", id)\n\t}\n\n\tcm.Syncer.Remove(container)\n\tdelete(cm.Containers, id)\n\treturn nil\n}", "title": "" }, { "docid": "098fc7e605bf8181906e854be4f1ede0", "score": "0.58982337", "text": "func (c *ContainerService) Remove(id string) error {\n\treturn c.do(\"DELETE\", fmt.Sprintf(\"/containers/%s\", id), nil, nil)\n}", "title": "" }, { "docid": "5adf2888b606e1712bc495571c6ee08f", "score": "0.5826113", "text": "func (ce CEDocker) Remove(id string) (err error) {\n\treturn ce.cli.ContainerRemove(ce.ctx, id, types.ContainerRemoveOptions{})\n}", "title": "" }, { "docid": "0ec46d40dea3affbfc15a2a0d40b62f9", "score": "0.57792354", "text": "func (q *QoS2Part2) Remove(id uint16) bool {\n\tq.Lock()\n\tif qPub, ok := q.lookup[id]; ok {\n\t\tq.l.Remove(qPub)\n\t\tdelete(q.lookup, id)\n\t\tq.Unlock()\n\t\treturn true\n\t}\n\tq.Unlock()\n\treturn false\n}", "title": "" }, { "docid": "5c320b02cb157871c12189d539879f9b", "score": "0.5774847", "text": "func removeContainer(\n\tlogger *log.Entry,\n\tclient client.DockerClient,\n\tcontainerID string,\n) (bool, error) {\n\tlogger.Debug(\"Removing container\")\n\terr := client.RemoveContainer(docker.RemoveContainerOptions{\n\t\tID: containerID,\n\t\tRemoveVolumes: true,\n\t\tForce: true,\n\t})\n\tswitch err.(type) {\n\tcase *docker.NoSuchContainer:\n\t\treturn false, nil\n\tcase nil:\n\t\treturn true, nil\n\t}\n\tlogger.WithFields(log.Fields{\"container\": containerID}).Warnf(\n\t\t\"Failed to remove container: %s\", err)\n\treturn false, err\n}", "title": "" }, { "docid": "4c3e8fed77e69660247750e14bddbfad", "score": "0.57392925", "text": "func removeContainerByID(cntx context.Context, contID string, cli *client.Client) {\n\n\t// TODO: Weigh the advantages of using the `Force: true` flag here\n\terr := cli.ContainerRemove(cntx, contID, types.ContainerRemoveOptions{Force: true})\n\tif err != nil {\n\t\tfmt.Println(\"ERROR: can't remove container\")\n\t}\n}", "title": "" }, { "docid": "dd93edf33dd646d252c1c03a7ab5caf2", "score": "0.5702924", "text": "func (set *HashSet) Remove(item interface{}) bool {\n\tif _, exists:= set.items[item]; !exists {\n\t\treturn false\n\t}\n\tdelete(set.items, item)\n\treturn true\n}", "title": "" }, { "docid": "5ebf528cfe0b899c4708d0b4b5a88860", "score": "0.5680598", "text": "func (s *Set[T]) Remove(item T) bool {\n\tif _, exists := s.items[item]; !exists {\n\t\treturn false\n\t}\n\n\tdelete(s.items, item)\n\treturn true\n}", "title": "" }, { "docid": "ab4bed3fed1e5b5b5b1dc8d9551dacc1", "score": "0.5605798", "text": "func (s *droneStorage) Remove(ctx context.Context, droneID string) bool {\n\ts.mx.Lock()\n\tdefer s.mx.Unlock()\n\n\tfor i, d := range s.m {\n\t\tif d.ID == droneID {\n\t\t\ts.m = append(s.m[:i], s.m[i+1:]...)\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ba601b09c64a6539ca0738de55c661ae", "score": "0.55996305", "text": "func (c *Container) Del(key any) bool {\n\tif c.HasKey(key) {\n\t\tdelete(c.Data, key)\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "36e63b72834864cbae571031a5c15b6b", "score": "0.5573604", "text": "func (c ContainerID) Remove() error {\n\tif Debug || c == \"nil\" {\n\t\treturn nil\n\t}\n\treturn runDockerCommand(\"docker\", \"rm\", \"-v\", string(c)).Run()\n}", "title": "" }, { "docid": "4880e549e809071a71591aed7dfc3299", "score": "0.5570932", "text": "func (r *relayItems) Delete(id uint32) (relayItem, bool) {\n\tr.Lock()\n\titem, ok := r.items[id]\n\tif !ok {\n\t\tr.Unlock()\n\t\tr.logger.WithFields(LogField{\"id\", id}).Warn(\"Attempted to delete non-existent relay item.\")\n\t\treturn item, false\n\t}\n\tdelete(r.items, id)\n\tif item.tomb {\n\t\tr.tombs--\n\t}\n\tr.Unlock()\n\n\titem.timeout.Release()\n\treturn item, !item.tomb\n}", "title": "" }, { "docid": "cb18ce9976ce49cae7fefd3db705911d", "score": "0.55546856", "text": "func (d *Driver) Remove(id string) error {\n\treturn containerfs.EnsureRemoveAll(d.dir(id))\n}", "title": "" }, { "docid": "f9774ec19aa656852fca07e6c9186dd0", "score": "0.55129474", "text": "func (set PodSet) Remove(podID podmodel.ID) bool {\n\tif _, exists := set[podID]; exists {\n\t\tdelete(set, podID)\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "a9842ddf1e0b7df662a7ffcf63744abc", "score": "0.54985785", "text": "func (a *Agent) removeContainer(ID string) {\n\tdata, ok := a.containers[ID]\n\tif ok {\n\t\tlog.Println(\"remove container\", data.name)\n\t\tdelete(a.containers, ID)\n\t}\n\terr := os.Remove(path.Join(containersDataDir, ID))\n\tif err != nil {\n\t\tlog.Println(\"Error removing container data directory: \", err)\n\t}\n}", "title": "" }, { "docid": "81db700f7906ccdb354036916f4fad41", "score": "0.5495181", "text": "func (ssh *sshcmds) Remove(id string) error {\r\n\treturn ssh.mikrotik.Remove(ssh.path+\"/remove\", id)\r\n}", "title": "" }, { "docid": "9059eb2ffded0176c995c39012a9e697", "score": "0.5464728", "text": "func (c *cmd) Remove(id string) error {\r\n\treturn c.mikrotik.Remove(c.path+\"/remove\", id)\r\n}", "title": "" }, { "docid": "36638a31e147c25319f50a7f01b164fc", "score": "0.5459999", "text": "func (c ContainerID) Remove() (err error) {\n\tif !Debug {\n\t\terr = exec.Command(\"docker\", \"rm\", \"-v\", string(c)).Run()\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"Error removing %s: %v\", c, err)\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "e662315e76a1c3306f646892df68bceb", "score": "0.5437819", "text": "func (s *Set) Remove(id string) {\n\tdelete(s.data, id)\n}", "title": "" }, { "docid": "5711e2de31409e950c36aa17cfabe05f", "score": "0.5401195", "text": "func (mik *Mikrotik) Remove(apipath, id string) error {\r\n\t_, err := mik.RunArgs(apipath, \"=.id=\"+id)\r\n\treturn err\r\n}", "title": "" }, { "docid": "081f39e646ffb707b1c6c26accf36c4f", "score": "0.53713435", "text": "func (s *Sandbox) removeContainer(containerID string) error {\n\tif s == nil {\n\t\treturn types.ErrNeedSandbox\n\t}\n\n\tif containerID == \"\" {\n\t\treturn types.ErrNeedContainerID\n\t}\n\n\tif _, ok := s.containers[containerID]; !ok {\n\t\treturn errors.Wrapf(types.ErrNoSuchContainer, \"Could not remove the container %q from the sandbox %q containers list\",\n\t\t\tcontainerID, s.id)\n\t}\n\n\tdelete(s.containers, containerID)\n\n\treturn nil\n}", "title": "" }, { "docid": "3400636f16f28ed913f88938875f5ce7", "score": "0.5358665", "text": "func (k *Keychain) Remove(key []byte) (bool, error) {\n\tk.mtx.Lock()\n\n\tv, found := k.entries.Search(key)\n\tif found {\n\t\tentry := v.(*data.Entry)\n\t\tif entry.ValueSize != -1 {\n\t\t\tdefer k.mtx.Unlock()\n\n\t\t\tif err := k.appendItemDelete(key); err != nil {\n\t\t\t\treturn false, err\n\t\t\t}\n\n\t\t\tentry.ValueSize = -1\n\t\t\treturn true, nil\n\t\t}\n\t}\n\n\tk.mtx.Unlock()\n\treturn false, nil\n}", "title": "" }, { "docid": "b7b09fe75ca1ae172deb25527e501082", "score": "0.5354611", "text": "func (trID *TableIDtoMatchIDStore) Remove(id int64) {\n\ttrID.RLock()\n\tdefer trID.RUnlock()\n\tdelete(trID.data, id)\n\treturn\n}", "title": "" }, { "docid": "66d34697665746b371b1d2efcf82970c", "score": "0.5344376", "text": "func StopContainerById(id string, done chan bool, logger *log.Logger) {\n\tif err := cli.ContainerStop(context.Background(), id, nil); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := cli.ContainerRemove(context.Background(), id, types.ContainerRemoveOptions{}); err != nil {\n\t\tpanic(err)\n\t}\n\tfor {\n\t\tlogger.Printf(message.LOG_WAITING_STOP)\n\t\ttime.Sleep(500 * time.Millisecond)\n\t\tif stillRunning := containerRunningById(id); stillRunning == false {\n\t\t\tlogger.Printf(message.LOG_STOPPED)\n\t\t\tdone <- true\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "d6781f39f8b25cf6a7b2e95b0b16dee7", "score": "0.5336394", "text": "func ContainerRemove(id string) error {\n\treturn mgr.Do(func(ctx context.Context, cli *client.Client) (err error) {\n\t\topts := types.ContainerRemoveOptions{}\n\t\terr = cli.ContainerRemove(ctx, id, opts)\n\t\treturn\n\t})\n}", "title": "" }, { "docid": "03f32e89a085a77ac7201149c63d4b23", "score": "0.53259414", "text": "func (c *ContainerdContainerizer) ContainerRemove(id string) error {\n\t// create a new context with namespace\n\tctx := namespaces.WithNamespace(context.Background(), c.Namespace)\n\n\tcontainer, err:= c.Client.LoadContainer(ctx, id)\n\tif err != nil {\n\t\tlogger.GetInstance().Warn(\"get container from id failed\", zap.String(\"id\", id), zap.Error(err))\n\t\treturn err\n\t}\n\n\t//delete container\n\tif err:= container.Delete(ctx, containerd.WithSnapshotCleanup); err != nil {\n\t\tlogger.GetInstance().Error(\"delete container by id failed\", zap.String(\"id\", id), zap.Error(err))\n\t\treturn err\n\t}\n\n\tlogger.GetInstance().Info(\"container deleted \", zap.String(\"id\", id))\n\treturn nil\n}", "title": "" }, { "docid": "b9f3deace2ab057de4a13b05d4f2dbf3", "score": "0.5324125", "text": "func (m *manager) Remove(ctx context.Context, id string) error {\n\treturn m.service.Remove(ctx, id)\n}", "title": "" }, { "docid": "a8be1ae70f2f25cca855cca8db7cce86", "score": "0.5305802", "text": "func (repo *TodoRepository) DeleteById(id *string) (bool, error) {\n\tdatabase, err := datastore.NewDatabase()\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tresult, err := database.Exec(\"DELETE FROM todos WHERE id=?\", id)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\trowsAffected , err := result.RowsAffected()\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\treturn rowsAffected > 0, nil\n}", "title": "" }, { "docid": "d607d6a70c28480285ed219b08c969f0", "score": "0.52815133", "text": "func removeContainerByIDHandler(rw http.ResponseWriter, req *http.Request) {\n\tvars := mux.Vars(req)\n\tcontainerID := vars[\"id\"]\n\n\tcontainer, err := getContainer(containerID)\n\tif err != nil {\n\t\trequests.ErrorJSON(rw, http.StatusBadRequest, requests.StatusFailed, err.Error())\n\t\treturn\n\t}\n\n\t// In separate routines, reset the agent and recycle the instance.\n\tif env != \"testing\" {\n\t\tgo func() {\n\t\t\tlog.Println(\"calling delancey delete\")\n\t\t\terr := delancey.Delete(container)\n\t\t\tif err != nil {\n\t\t\t\t// TODO: handle error\n\t\t\t\tfmt.Println(err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tlog.Println(\"successfully deleted\")\n\n\t\t\tlog.Println(\"putting instance in collection\")\n\t\t\terr = db.Set(schemas.InstancesCollection, container.Instance.ID, container.Instance)\n\t\t\tif err != nil {\n\t\t\t\t// TODO: handle error\n\t\t\t\tfmt.Println(err)\n\t\t\t}\n\t\t\tlog.Println(\"successfully stored instance\")\n\t\t}()\n\t}\n\n\tdb.Delete(schemas.ContainersCollection, container.ID)\n\trenderer.JSON(rw, http.StatusOK, map[string]string{\n\t\t\"status\": requests.StatusRemoved,\n\t})\n}", "title": "" }, { "docid": "394baffb26fd5a79b1cfaf0dc5bf2bad", "score": "0.52360284", "text": "func Remove(id string) {\n\t// Lock the mutex\n\tmutex.Lock()\n\tdefer mutex.Unlock()\n\n\t// Try to get the session from the cached sessions map\n\ts, ok := sessions[id]\n\tif ok {\n\t\t// Set the valid flag to false, to be sure, this\n\t\t// session won't be saved to the database\n\t\ts.valid = false\n\t}\n\n\t// Delete the session from the map\n\tdelete(sessions, id)\n\n\t// Remove the session also from the database\n\tremoveSessionFromDB(id)\n}", "title": "" }, { "docid": "ab07b42e5f43b77c68cc82df7e732dd0", "score": "0.5228808", "text": "func (c *DockerContainerizer) ContainerRemove(id string) error {\n\treturn c.Client.RemoveContainer(docker.RemoveContainerOptions{\n\t\tID: id,\n\t})\n}", "title": "" }, { "docid": "0c2b5509d5735c9f1a518a63c7711cc5", "score": "0.522509", "text": "func (t *DynamicSet[T]) Remove(id DynamicSetItemID) {\n\tdelete(t.items, id)\n}", "title": "" }, { "docid": "9df209f92e2e35742d6fc20e46a85e69", "score": "0.5222172", "text": "func DeleteItem(ID string) bool {\n\tlist := GetItemList()\n\tfor idx, _ := range list {\n\t\tif(list[idx].ID == ID){\n\t\t\tlist = append(list[:idx], list[idx+1:]...)\n\t\t\twriteItem(list)\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "d79addb10d8bf83fd27046477d1440cd", "score": "0.5208316", "text": "func Remove(id string) error {\n\treturn collection().Remove(bson.M{\"_id\": id})\n}", "title": "" }, { "docid": "690ccb4c8a6d31101bd65d60c07f5d37", "score": "0.5180654", "text": "func (registry *Registry) Delete(id string) error {\n\tregistry.Lock()\n\tdefer registry.Unlock()\n\n\t_, exists := registry.items[id]\n\tif !exists {\n\t\treturn errors.New(\"Registry delete error\")\n\t}\n\n\tdelete(registry.items, id)\n\treturn nil\n}", "title": "" }, { "docid": "5fd59c0045884454ba23234a5bee35af", "score": "0.51769346", "text": "func (s *Store) Delete(id string) {\n\ts.lock.Lock()\n\tdefer s.lock.Unlock()\n\tid, err := s.idIndex.Get(id)\n\tif err != nil {\n\t\t// Note: The idIndex.Delete and delete doesn't handle truncated index.\n\t\t// So we need to return if there are error.\n\t\treturn\n\t}\n\tc := s.containers[id]\n\tif c.IO != nil {\n\t\tc.IO.Close()\n\t}\n\ts.labels.Release(c.ProcessLabel)\n\ts.idIndex.Delete(id)\n\tdelete(s.containers, id)\n}", "title": "" }, { "docid": "18a893ebbf65465ac2abe4ef2a28fb18", "score": "0.5172114", "text": "func Remove(ctx context.Context, nameOrID string, force, volumes *bool) error {\n\tconn, err := bindings.GetClient(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tparams := url.Values{}\n\tif force != nil {\n\t\tparams.Set(\"force\", strconv.FormatBool(*force))\n\t}\n\tif volumes != nil {\n\t\tparams.Set(\"vols\", strconv.FormatBool(*volumes))\n\t}\n\tresponse, err := conn.DoRequest(nil, http.MethodDelete, \"/containers/%s\", params, nameOrID)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn response.Process(nil)\n}", "title": "" }, { "docid": "ab8e42e654e7eda5210c6445567b9f06", "score": "0.5170239", "text": "func (c *Collection) Delete(id string) (bool, error) {\n\tt := c.pool.NewTransaction()\n\tdeleted := false\n\tt.Delete(c, id, &deleted)\n\tif err := t.Exec(); err != nil {\n\t\treturn deleted, err\n\t}\n\treturn deleted, nil\n}", "title": "" }, { "docid": "cc5af8493b23d0635be5a563d252fd07", "score": "0.51224726", "text": "func (m *BasicTempRepoManager) Remove(id string) error {\n\tm.lck.Lock()\n\tdefer m.lck.Unlock()\n\tentry, ok := m.entries[id]\n\tif !ok {\n\t\treturn nil\n\t}\n\tif err := os.RemoveAll(entry.path); err != nil {\n\t\treturn err\n\t}\n\tdelete(m.entries, id)\n\treturn nil\n}", "title": "" }, { "docid": "ea01653f83ba37c4f5ec2707904898aa", "score": "0.51179343", "text": "func (o *MeshLibrary) RemoveItem(id int64) {\n\tlog.Println(\"Calling MeshLibrary.RemoveItem()\")\n\n\t// Build out the method's arguments\n\tgoArguments := make([]reflect.Value, 1, 1)\n\tgoArguments[0] = reflect.ValueOf(id)\n\n\t// Call the parent method.\n\n\to.callParentMethod(o.baseClass(), \"remove_item\", goArguments, \"\")\n\n\tlog.Println(\" Function successfully completed.\")\n\n}", "title": "" }, { "docid": "b6268857b25bcbb0b7dae43adae86860", "score": "0.5115436", "text": "func (r *Room) Remove(obj Objecter) bool {\n\tvar i int\n\tfor i = 0; i < len(r.inventory); i++ {\n\t\tif obj == r.inventory[i] {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif i == len(r.inventory) {\n\t\treturn false\n\t}\n\n\tcopy(r.inventory[i:], r.inventory[i+1:])\n\tr.inventory[len(r.inventory)-1] = nil\n\tr.inventory = r.inventory[:len(r.inventory)-1]\n\treturn true\n}", "title": "" }, { "docid": "d6300040caca95274d9c8eb5b7ed72c4", "score": "0.51107115", "text": "func (c *ObjCache) Del(k string) bool {\n\tc.mu.Lock()\n\titem, ok := c.items[k]\n\tif ok {\n\t\tc.itemCount = c.itemCount - 1\n\t\tdelete(c.items, k)\n\t\tc.list.Remove(item)\n\t}\n\tc.mu.Unlock()\n\treturn ok\n}", "title": "" }, { "docid": "9f97d5d62db110a965805913dbe6e043", "score": "0.5106851", "text": "func (set InterfaceSet) Remove(ifName string) bool {\n\tif _, exists := set[ifName]; exists {\n\t\tdelete(set, ifName)\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "b081c8978f11fb891d7fa8dcdd9dc716", "score": "0.51066536", "text": "func (r *SQLiteRepository) RemoveItemFromDB(itemID string) (bool, error) {\n\tif itemID == \"0\" {\n\t\treturn false, errors.New(\"item cannot be removed from warehouse, please check again later\")\n\t}\n\treturn true, nil\n}", "title": "" }, { "docid": "55ea5f648a0ccda6d9af753f4816e7e1", "score": "0.5103684", "text": "func (e *ERaft) IsIDRemoved(id uint64) bool {\n\treturn e.removed[types.ID(id)]\n}", "title": "" }, { "docid": "f0cb661a0d1bf3c60ede762cfe13283f", "score": "0.50950325", "text": "func Delete(id string) bool{\n\tbucket, _ := GetCluster().OpenBucket(\"UBookBucket\", \"\")\n\tvar n1qlParams []interface{}\n\tquery := gocb.NewN1qlQuery(\"DELETE FROM UBookBucket WHERE META(UBookBucket).id = $1\")\n\tn1qlParams = append(n1qlParams, id)\n\t_, err := bucket.ExecuteN1qlQuery(query, n1qlParams)\n\tif err!=nil{\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "373be14752591a88696f13a11ba2b04b", "score": "0.50849974", "text": "func (r *repo) RemoveSlide(ctx context.Context, slideID uint64) (bool, error) {\n\tquery := squirrel.Delete(tableName).\n\t\tWhere(squirrel.Eq{\"id\": slideID}).\n\t\tRunWith(r.db).\n\t\tPlaceholderFormat(squirrel.Dollar)\n\n\tresult, err := query.ExecContext(ctx)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\trowsAffected, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tif rowsAffected <= 0 {\n\t\treturn false, ErrSlideNotFound\n\t}\n\n\treturn true, err\n}", "title": "" }, { "docid": "2384303e50906786a3a3b123051962cf", "score": "0.5078058", "text": "func (s *LRUCache) Remove(key string) bool {\n\tif element, ok := s.items[key]; ok {\n\t\ts.removeNode(element)\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "273a2f537708067103e7738984a86fac", "score": "0.50569266", "text": "func (m *taskManager) Remove(containerID string) {\n\tm.mu.Lock()\n\tdefer m.mu.Unlock()\n\n\ttask, ok := m.tasks[containerID]\n\tif !ok {\n\t\t// the task was already removed or just never existed, no-op\n\t\treturn\n\t}\n\n\tm.remove(containerID, task)\n}", "title": "" }, { "docid": "3f09c870c5fc12913b53b4f6d2f59e02", "score": "0.50463575", "text": "func containerRunningById(id string) bool {\n\tcontainers := getContainers()\n\tfor _, container := range containers {\n\t\tif container.ID == id {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "c4cea6cd14a650dd13d9d26ca47bfd54", "score": "0.5040526", "text": "func (list *PushSubscriptions) Remove(subscriptionID string) bool {\n\tfor index, item := range list.Items {\n\t\tif item.ID() == subscriptionID {\n\t\t\tlist.Items = append(list.Items[:index], list.Items[index+1:]...)\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "a2a82b556d4f856c1312ebe24887ee7b", "score": "0.5039223", "text": "func (engine *Engine) RemoveContainer(containerid string) error {\n\n\tcontainer := engine.Container(containerid)\n\tif container == nil {\n\t\treturn fmt.Errorf(\"remove container %s not found\", ShortContainerID(containerid))\n\t}\n\n\t//rename engine container, append '-expel' suffix.\n\tif ret := strings.HasSuffix(container.Info.Name, \"-expel\"); !ret {\n\t\texpelName := strings.TrimPrefix(container.Info.Name, \"/\") + \"-\" + rand.UUID(true)[:8] + \"-expel\"\n\t\toperate := models.ContainerOperate{Action: \"rename\", Container: container.Info.ID, NewName: expelName}\n\t\tif err := engine.OperateContainer(operate); err != nil {\n\t\t\tlogger.ERROR(\"[#cluster#] engine %s container %s expel, rename error:%s\", engine.IP, ShortContainerID(container.Info.ID), err.Error())\n\t\t} else {\n\t\t\tlogger.WARN(\"[#cluster#] engine %s container %s expel, rename to %s.\", engine.IP, ShortContainerID(container.Info.ID), operate.NewName)\n\t\t}\n\t}\n\n\t//remove engine local metabase of container.\n\tengine.Lock()\n\tengine.configCache.RemoveContainerBaseConfig(container.MetaID(), containerid)\n\tdelete(engine.containers, containerid)\n\tengine.Unlock()\n\n\tdefer func() {\n\t\tengine.Lock()\n\t\tif _, ret := engine.containers[containerid]; ret {\n\t\t\tdelete(engine.containers, containerid)\n\t\t}\n\t\tengine.Unlock()\n\t}()\n\n\tif ret := engine.useRemovePool(container.Config); ret {\n\t\tengine.removePool.Lock()\n\t\tif _, ret := engine.removePool.containers[containerid]; !ret {\n\t\t\tengine.removePool.containers[containerid] = &RemoveContainer{\n\t\t\t\tmetaID: container.MetaID(),\n\t\t\t\tcontainerID: containerid,\n\t\t\t\ttimeStamp: time.Now().Unix(),\n\t\t\t\tfailCount: 0,\n\t\t\t}\n\t\t\tlogger.INFO(\"[#cluster#] engine %s container %s add to remove-delay pool.\", engine.IP, ShortContainerID(containerid))\n\t\t}\n\t\tengine.removePool.Unlock()\n\t} else {\n\t\tif err := engine.client.RemoveContainerRequest(context.Background(), containerid); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlogger.INFO(\"[#cluster#] engine %s remove container %s\", engine.IP, ShortContainerID(containerid))\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4b5c25a6a26b08cc9bcebe64da2c76b6", "score": "0.5038695", "text": "func (p *peers) RemovePeerByID(id string) bool {\n\tnewPeers := make([]Peer, 0, len(p.Peers))\n\tfound := false\n\tfor _, x := range p.Peers {\n\t\tif x.ID != id {\n\t\t\tnewPeers = append(newPeers, x)\n\t\t} else {\n\t\t\tfound = true\n\t\t}\n\t}\n\tp.Peers = newPeers\n\treturn found\n}", "title": "" }, { "docid": "f6da873c2aad453346f07418d11e45df", "score": "0.503792", "text": "func DelItemById(id int) error {\n\tif _, ok := DB[id]; !ok {\n\t\treturn errors.New(\"Item with that id not found\")\n\t}\n\tdelete(DB, id)\n\treturn nil\n}", "title": "" }, { "docid": "94bf72f32f35a41884797fb03cec0fbd", "score": "0.5035705", "text": "func ContainerExists(exec boil.Executor, id int) (bool, error) {\n\tvar exists bool\n\tsql := \"select exists(select 1 from \\\"containers\\\" where \\\"id\\\"=$1 limit 1)\"\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, id)\n\t}\n\n\trow := exec.QueryRow(sql, id)\n\n\terr := row.Scan(&exists)\n\tif err != nil {\n\t\treturn false, errors.Wrap(err, \"models: unable to check if containers exists\")\n\t}\n\n\treturn exists, nil\n}", "title": "" }, { "docid": "e468d85e8dcab39779048a44883f56b4", "score": "0.5031034", "text": "func RemoveRecord(_id int) bool {\n\tsqlCommand := fmt.Sprintf(\"DELETE FROM RECORDS WHERE id=%d\", _id)\n\tresult, err := mSQLDB.Exec(sqlCommand)\n\tif CheckErrorJustALittleBitAlarmed(err) {\n\t\treturn false\n\t}\n\n\trowsAffected, err2 := result.RowsAffected()\n\tif CheckErrorJustALittleBitAlarmed(err2) {\n\t\treturn false\n\t}\n\n\tif rowsAffected <= 0 {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "0dc937a255405fe461addf98a02f9d47", "score": "0.50237507", "text": "func (ts *TableStore) Remove(id int64) {\n\tts.RLock()\n\tdefer ts.RUnlock()\n\tdelete(ts.data, id)\n\treturn\n}", "title": "" }, { "docid": "ec0759e6421a5189649080e6a91cf509", "score": "0.5012089", "text": "func (s *BulletStorage) Remove(id int) {\n\tif !s.vec[id].occupied {\n\t\tpanic(\"removeing already removed value\")\n\t}\n\n\ts.count--\n\ts.outdated = true\n\n\ts.freeIDs.BiInsert(id, gen.IntBiComp)\n\ts.vec[id].occupied = false\n}", "title": "" }, { "docid": "dcc12209907006c69cf1e8e4e3e19fd9", "score": "0.50081944", "text": "func (h *MinHash) Remove(adr mat.Point, id, group int) bool {\n\treturn h.Nodes[prj(adr.X, adr.Y, h.w)].Remove(id, group)\n}", "title": "" }, { "docid": "82b8c33e2d481044d32395be05a2df2e", "score": "0.5002169", "text": "func (m *eventManager) Remove(eventID int64) bool {\n\tif m == nil || atomic.LoadInt32(&m.started) == 0 {\n\t\treturn false\n\t}\n\tm.m.Lock()\n\tdefer m.m.Unlock()\n\tfor i := range m.events {\n\t\tif m.events[i].ID == eventID {\n\t\t\tm.events = append(m.events[:i], m.events[i+1:]...)\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "1d687d73a7b3e276aefc7dbc8136ef66", "score": "0.49967474", "text": "func (cm CarMap) Remove(id uuid.UUID) {\n\tdelete(cm, id)\n}", "title": "" }, { "docid": "1c6e2cdd489cf81bb6d8574014c7e2cb", "score": "0.49846992", "text": "func (c *Cache) Remove(name string) (wasWriting bool) {\n\tname = clean(name)\n\tc.mu.Lock()\n\titem := c.item[name]\n\tif item != nil {\n\t\tdelete(c.item, name)\n\t}\n\tc.mu.Unlock()\n\tif item == nil {\n\t\treturn false\n\t}\n\treturn item.remove(\"file deleted\")\n}", "title": "" }, { "docid": "f510c4ebbe34a6128dc05eee2198b243", "score": "0.4970162", "text": "func (mem *chunkedUploadProgressStorer) Remove(id string) error {\n\tdelete(mem.list, id)\n\treturn nil\n}", "title": "" }, { "docid": "878b4bf3f806dbb17d45df453a7cf659", "score": "0.49684837", "text": "func (trID *TableIDtoPlayerIDStore) Remove(id int64) {\n\ttrID.RLock()\n\tdefer trID.RUnlock()\n\tdelete(trID.data, id)\n\treturn\n}", "title": "" }, { "docid": "10db46603ddff299e9c8f3196cc1dd1d", "score": "0.49664158", "text": "func (d *Database) RemoveById(id uint) {\n\td.removeByTypedId(key.Type(id))\n}", "title": "" }, { "docid": "999dbdd4b0adf0c98492a5ff8f6580c9", "score": "0.496485", "text": "func (q *Queue) Remove(ctx context.Context, value interface{}) (bool, error) {\n\tif data, err := q.validateAndSerialize(value); err != nil {\n\t\treturn false, err\n\t} else {\n\t\trequest := codec.EncodeQueueRemoveRequest(q.name, data)\n\t\tif response, err := q.invokeOnPartition(ctx, request, q.partitionID); err != nil {\n\t\t\treturn false, nil\n\t\t} else {\n\t\t\treturn codec.DecodeQueueRemoveResponse(response), nil\n\t\t}\n\t}\n}", "title": "" }, { "docid": "2be2fc912c942c4104ed8d31b99bcee3", "score": "0.49592763", "text": "func Remove(id types.UUID) error {\n\tfor i, s := range sessions {\n\t\tif s.ID == id {\n\t\t\tsessions = append(sessions[:i], sessions[i+1:]...)\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn fmt.Errorf(\"Could not find session\")\n}", "title": "" }, { "docid": "23b5a6bebfedad8f357de4f7b4123d12", "score": "0.49446768", "text": "func (w *ThreadSafe) Remove(c cid.Cid, ses uint64) bool {\n\tw.lk.Lock()\n\tdefer w.lk.Unlock()\n\te, ok := w.set[c]\n\tif !ok {\n\t\treturn false\n\t}\n\n\tdelete(e.SesTrk, ses)\n\tif len(e.SesTrk) == 0 {\n\t\tdelete(w.set, c)\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "0784e3fd51ad4aad82ad04a826648250", "score": "0.4918735", "text": "func (lc *MockLostController) RemoveById(ctx context.Context, id int) (int, error) {\n\t_, err := lc.GetById(context.Background(), id)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tfor index, lost := range lc.Losts {\n\t\tif lost.Id == id {\n\t\t\tpictureId := lost.PictureId\n\t\t\tif index == len(lc.Losts)-1 {\n\t\t\t\tlc.Losts = lc.Losts[:id]\n\t\t\t} else {\n\t\t\t\tlc.Losts = append(lc.Losts[:id], lc.Losts[(id+1):]...)\n\t\t\t}\n\t\t\treturn pictureId, nil\n\t\t}\n\t}\n\treturn 0, errs.LostNotFound\n}", "title": "" }, { "docid": "2afc7250d881336749788b6213478fde", "score": "0.49169683", "text": "func (queue *Queue) Delete(cfg *Config, id string) bool {\n\tclient := cfg.RiakConnection()\n\tbucket, err := client.NewBucketType(\"messages\", queue.Name)\n\tif err == nil {\n\t\terr = bucket.Delete(id)\n\t\tif err == nil {\n\t\t\tdefer decrementMessageCount(cfg.Stats.Client, queue.Name, 1)\n\t\t\treturn true\n\t\t}\n\t}\n\n\t// if we got here we're borked\n\t// TODO stats cleanup? Possibility that this gets us out of sync\n\tlogrus.Error(err)\n\treturn false\n}", "title": "" }, { "docid": "4b04d2c8ae5dfa2fea244a1c4176abdb", "score": "0.4916546", "text": "func (t *Tile) Remove(obj ObjectLike) bool {\n\tt.zone.lock()\n\tdefer t.zone.unlock()\n\n\tif t.remove(obj) {\n\t\tt.zone.notifyRemove(t, obj)\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "0acbab936937482777a7dfea271e510b", "score": "0.49141595", "text": "func (t *Tree) Remove(b gfx.Boundable) bool {\n\tt.Lock()\n\tdefer t.Unlock()\n\n\t// Find in the node map and delete it.\n\tn, ok := t.nodeByObject[b]\n\tif !ok {\n\t\treturn false\n\t}\n\tdelete(t.nodeByObject, b)\n\n\tvar oct int\n\tfor oct = 0; oct < 9; oct++ {\n\t\tfor index, o := range n.objects[oct] {\n\t\t\tif o.b == b {\n\t\t\t\t// This is the object.\n\t\t\t\tn.objects[oct][index] = nil\n\t\t\t\tn.objects[oct] = append(n.objects[oct][:index], n.objects[oct][index+1:]...)\n\t\t\t\tt.numObjects--\n\t\t\t\tt.decimate(n)\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\n\tpanic(\"Failed to remove object.\")\n}", "title": "" }, { "docid": "3362f946f7720b0005538383ad3b189c", "score": "0.49050426", "text": "func RemoveCategory(_id int) bool {\n\n\tsqlCommand := fmt.Sprintf(\"DELETE FROM RECORDS WHERE category_id=%d\", _id)\n\tresult, _ := mSQLDB.Exec(sqlCommand)\n\n\tsqlCommand = fmt.Sprintf(\"DELETE FROM CATEGORIES WHERE id=%d\", _id)\n\tresult, err := mSQLDB.Exec(sqlCommand)\n\tif CheckErrorJustALittleBitAlarmed(err) {\n\t\treturn false\n\t}\n\n\trowsAffected, err2 := result.RowsAffected()\n\tif CheckErrorJustALittleBitAlarmed(err2) {\n\t\treturn false\n\t}\n\n\tif rowsAffected <= 0 {\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "295db4da068dc2adce966a754f295d0c", "score": "0.4897604", "text": "func Del(id string) error {\n\treturn getC().Del(id)\n}", "title": "" }, { "docid": "106a93d184abec9c1b3b923063ca42b8", "score": "0.4893495", "text": "func (this RegistryContainerEntry) Remove(zkc zk.ZK) error {\n\tregkey, _, err := RegistryKeyValue(KContainer, this)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// We use this regkey to figure out the parent node:\n\tcontainers_path := filepath.Dir(regkey)\n\tglog.V(50).Infoln(\"Looking for containers in\", containers_path, \"for entry\", this.ContainerId)\n\n\tparent_node, err := zkc.Get(containers_path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tmatches, err := parent_node.FilterChildrenRecursive(func(z *zk.Node) bool {\n\t\thost, _ := ParseHostPort(filepath.Base(z.GetPath()))\n\t\t// This is a filter function\n\t\treturn host != this.ContainerId || !z.IsLeaf()\n\t})\n\t// Delete the matches\n\tfor _, match := range matches {\n\t\terr = zkc.Delete(match.GetPath())\n\t\tif err != nil {\n\t\t\tglog.Warningln(\"Error de-registering\", match.GetPath(), err)\n\t\t\treturn err\n\t\t}\n\t\tglog.Infoln(\"De-registered\", match.GetPath(), err)\n\t}\n\n\t// After the children have been removed, check the parent again\n\tparent_node, err = zkc.Get(containers_path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif parent_node.Stats.NumChildren == 0 {\n\t\tglog.Infoln(\"No children under\", containers_path, \"removing parent.\")\n\t\terr = zkc.Delete(containers_path)\n\t\tif err != nil {\n\t\t\tglog.Warningln(\"Error removing parent node\", containers_path)\n\t\t\treturn err\n\t\t} else {\n\t\t\tglog.Infoln(\"Removed parent node\", containers_path)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "576ad2c4d83573379d82c323c3388378", "score": "0.48926944", "text": "func (e *fileExtractor) removeContainer(ctx context.Context, containerID string) error {\n\treturn e.client.RemoveContainer(ctx, docker.RemoveContainerOptions{\n\t\tID: containerID,\n\t})\n}", "title": "" }, { "docid": "c2d669c0c3ef32bc6e84c68fa851f250", "score": "0.48861104", "text": "func (s *SQLState) HasContainer(id string) (bool, error) {\n\tconst query = \"SELECT 1 FROM containers WHERE Id=?;\"\n\n\tif id == \"\" {\n\t\treturn false, ErrEmptyID\n\t}\n\n\tif !s.valid {\n\t\treturn false, ErrDBClosed\n\t}\n\n\trow := s.db.QueryRow(query, id)\n\n\tvar check int\n\terr := row.Scan(&check)\n\tif err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn false, nil\n\t\t}\n\n\t\treturn false, errors.Wrapf(err, \"error questing database for existence of container %s\", id)\n\t} else if check != 1 {\n\t\treturn false, errors.Wrapf(ErrInternal, \"check digit for HasContainer query incorrect\")\n\t}\n\n\treturn true, nil\n}", "title": "" }, { "docid": "dfd2810c56a205a56b8f7a868f4fbe17", "score": "0.48829016", "text": "func (s *InMemoryState) HasContainer(id string) (bool, error) {\n\tif id == \"\" {\n\t\treturn false, define.ErrEmptyID\n\t}\n\n\tctr, ok := s.containers[id]\n\tif !ok || (s.namespace != \"\" && s.namespace != ctr.config.Namespace) {\n\t\treturn false, nil\n\t}\n\n\treturn true, nil\n}", "title": "" }, { "docid": "a00d1c1f01cece108000bed22865383c", "score": "0.48798946", "text": "func (c *Client) Delete(id string) error {\n\titem, err := c.fetchItem(id)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// TODO: if CollectionType, check if empty\n\tif item.Type == rmtool.CollectionType {\n\t\terr = c.checkEmpty(item.ID)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\twrap := make([]uploadItem, 1)\n\twrap[0] = item.toUpload()\n\tresult := make([]Item, 0)\n\tc.storageRequest(\"PUT\", epDelete, wrap, result)\n\n\tif len(result) != 1 {\n\t\treturn fmt.Errorf(\"got unexpected number of items (%v)\", len(result))\n\t}\n\ti := result[0]\n\n\t// A successful response can still include errors\n\treturn i.Err()\n}", "title": "" }, { "docid": "659f5d656aca322db0b088f803abfce1", "score": "0.48765546", "text": "func (c *Calcium) RemoveContainer(ids []string) (chan *types.RemoveContainerMessage, error) {\n\tch := make(chan *types.RemoveContainerMessage)\n\n\tcontainers, err := c.GetContainers(ids)\n\tif err != nil {\n\t\treturn ch, err\n\t}\n\n\tgo func() {\n\t\twg := sync.WaitGroup{}\n\t\twg.Add(len(containers))\n\n\t\tfor _, container := range containers {\n\t\t\tgo func(container *types.Container) {\n\t\t\t\tdefer wg.Done()\n\n\t\t\t\tsuccess := true\n\t\t\t\tmessage := \"success\"\n\n\t\t\t\tif err := removeOneContainer(container); err != nil {\n\t\t\t\t\tsuccess = false\n\t\t\t\t\tmessage = err.Error()\n\t\t\t\t}\n\t\t\t\tch <- &types.RemoveContainerMessage{\n\t\t\t\t\tContainerID: container.ID,\n\t\t\t\t\tSuccess: success,\n\t\t\t\t\tMessage: message,\n\t\t\t\t}\n\t\t\t}(container)\n\t\t}\n\n\t\twg.Wait()\n\t\tclose(ch)\n\t}()\n\n\treturn ch, nil\n\n}", "title": "" }, { "docid": "1ea9389deb30fccd54d044c1c3283859", "score": "0.48672476", "text": "func Remove(containerId string, force bool) {\n\n\tctx := context.Background()\n\n\tcli, cliErr := client.NewEnvClient()\n\tif cliErr != nil {\n\t\tpanic(cliErr)\n\t}\n\n\tremOpt := types.ContainerRemoveOptions{\n\t\tRemoveVolumes: true,\n\t\tRemoveLinks: true,\n\t\tForce: force,\n\t}\n\n\tremErr := cli.ContainerRemove(ctx, containerId, remOpt)\n\tif remErr != nil {\n\t\tpanic(remErr)\n\t}\n}", "title": "" }, { "docid": "06226d8f4838d922b9fb7b9ff9929711", "score": "0.48663986", "text": "func DD_removeRect(rect *DD_rectT, id interface{}, root **DD_nodeT) bool {\n\tvar reInsertList *DD_listNodeT\n\n\tif !DD_removeRectRec(rect, id, *root, &reInsertList) {\n\t\t// Found and deleted a data item\n\t\t// Reinsert any branches from eliminated nodes\n\t\tfor reInsertList != nil {\n\t\t\ttempNode := reInsertList.node\n\n\t\t\tfor index := 0; index < tempNode.count; index++ {\n\t\t\t\t// TODO go over this code. should I use (tempNode->m_level - 1)?\n\t\t\t\tDD_insertRect(&tempNode.branch[index], root, tempNode.level)\n\t\t\t}\n\t\t\treInsertList = reInsertList.next\n\t\t}\n\n\t\t// Check for redundant root (not leaf, 1 child) and eliminate TODO replace\n\t\t// if with while? In case there is a whole branch of redundant roots...\n\t\tif (*root).count == 1 && (*root).isInternalNode() {\n\t\t\ttempNode := (*root).branch[0].child\n\t\t\t*root = tempNode\n\t\t}\n\t\treturn false\n\t} else {\n\t\treturn true\n\t}\n}", "title": "" }, { "docid": "c682b34058b69bbcdffff8c80ccf5484", "score": "0.48660055", "text": "func (crls *ContivRuleLists) RemoveByIdx(idx int) bool {\n\tif idx < crls.numItems {\n\t\truleList := crls.ruleLists[idx]\n\t\tif idx < crls.numItems-1 {\n\t\t\tcopy(crls.ruleLists[idx:], crls.ruleLists[idx+1:])\n\t\t}\n\t\tcrls.numItems--\n\t\tcrls.ruleLists[crls.numItems] = nil\n\t\tdelete(crls.byID, ruleList.ID)\n\t\tfor iface := range ruleList.Interfaces {\n\t\t\tdelete(crls.byInterface, iface)\n\t\t}\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "9d72097b7a57c00c191109142d4eeb97", "score": "0.48477423", "text": "func (s *Store) Remove(iid ffs.APIID, c cid.Cid) error {\n\tscs, err := s.get(c)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"getting current storage configs for cid: %s\", err)\n\t}\n\tidx := -1\n\tfor i := range scs {\n\t\tif scs[i].IID == iid {\n\t\t\tidx = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif idx != -1 {\n\t\tscs[idx] = scs[len(scs)-1]\n\t\tscs = scs[:len(scs)-1]\n\t}\n\tif err := s.persist(c, scs); err != nil {\n\t\treturn fmt.Errorf(\"persisting updated storage configs for cid: %s\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e8a2ad6b528849d9748b6327e66a4059", "score": "0.4846363", "text": "func Remove(id string) error {\n\treturn gWallets.remove(id)\n}", "title": "" }, { "docid": "8e54718d73d1d6053a3e15382af834d8", "score": "0.48463187", "text": "func (c *Cursor) Remove(v uint64) (changed bool, err error) {\n\thi, lo := highbits(v), lowbits(v)\n\n\t// Move cursor to the key of the container.\n\t// Exit if container does not exist.\n\tif exact, err := c.Seek(hi); err != nil {\n\t\treturn false, err\n\t} else if !exact {\n\t\treturn false, nil\n\t}\n\n\t// If the container exists and bit is not set then update the page.\n\telem := &c.stack.elems[c.stack.top]\n\tleafPage, _, err := c.tx.readPage(elem.pgno)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tcell := readLeafCell(leafPage, elem.index)\n\n\tswitch cell.Type {\n\tcase ContainerTypeArray:\n\t\t// Exit if value does not exists in array container.\n\t\ta := toArray16(cell.Data)\n\t\ti, ok := arrayIndex(a, lo)\n\t\tif !ok {\n\t\t\treturn false, nil\n\t\t} else if len(a) == 1 {\n\t\t\treturn true, c.deleteLeafCell(cell.Key)\n\t\t}\n\n\t\t// Copy container data and remove new value.\n\t\tother := c.array[:len(a)-1]\n\t\tcopy(other[:i], a[:i])\n\t\tcopy(other[i:], a[i+1:])\n\t\treturn true, c.putLeafCell(leafCell{Key: cell.Key, Type: ContainerTypeArray, ElemN: len(other), BitN: cell.BitN - 1, Data: fromArray16(other)})\n\n\tcase ContainerTypeRLE:\n\t\tr := toInterval16(cell.Data)\n\t\ti, contains := roaring.BinSearchRuns(lo, r)\n\t\tif !contains {\n\t\t\treturn false, nil\n\t\t}\n\t\t// INVAR: lo is in run[i]\n\t\tcopy(c.rle[:], r)\n\t\truns := c.rle[:len(r)]\n\n\t\tif lo == runs[i].Last && lo == runs[i].Start {\n\t\t\truns = append(runs[:i], runs[i+1:]...)\n\t\t} else if lo == runs[i].Last {\n\t\t\truns[i].Last--\n\t\t} else if lo == c.rle[i].Start {\n\t\t\truns[i].Start++\n\t\t} else if lo > runs[i].Start {\n\t\t\t// INVAR: Start < lo < Last.\n\t\t\t// We remove lo, so split into two runs:\n\t\t\tlast := runs[i].Last\n\t\t\truns[i].Last = lo - 1\n\t\t\t// INVAR: runs[:i] is correct, but still need to insert the new interval at i+1.\n\t\t\truns = append(runs, roaring.Interval16{})\n\t\t\t// copy the tail first\n\t\t\tcopy(runs[i+2:], runs[i+1:])\n\t\t\t// overwrite with the new interval.\n\t\t\truns[i+1] = roaring.Interval16{Start: lo + 1, Last: last}\n\t\t}\n\t\tif len(runs) == 0 {\n\t\t\treturn true, c.deleteLeafCell(cell.Key)\n\t\t}\n\t\treturn true, c.putLeafCell(leafCell{Key: cell.Key, Type: ContainerTypeRLE, ElemN: len(runs), BitN: cell.BitN - 1, Data: fromInterval16(runs)})\n\n\tcase ContainerTypeBitmapPtr:\n\t\tpgno, bm, err := c.tx.leafCellBitmap(toPgno(cell.Data))\n\t\tif err != nil {\n\t\t\treturn false, errors.Wrap(err, \"cursor.add\")\n\t\t}\n\t\ta := cloneArray64(bm)\n\t\tif a[lo/64]&(1<<uint64(lo%64)) == 0 {\n\t\t\t// not present.\n\t\t\treturn false, nil\n\t\t}\n\n\t\t// clear the bit\n\t\ta[lo/64] &^= 1 << uint64(lo%64)\n\t\tcell.BitN--\n\n\t\tif cell.BitN == 0 {\n\t\t\tif err := c.tx.freePgno(pgno); err != nil {\n\t\t\t\treturn false, err\n\t\t\t}\n\t\t\treturn true, c.deleteLeafCell(cell.Key)\n\t\t}\n\n\t\t// shrink if we've gotten small.\n\t\tif cell.BitN <= ArrayMaxSize {\n\t\t\tcbm := roaring.NewContainerBitmap(cell.BitN, a)\n\t\t\t// convert to array\n\t\t\tcbm = roaring.Optimize(cbm)\n\n\t\t\tleafCell1 := ConvertToLeafArgs(cell.Key, cbm)\n\t\t\t// ConvertToLeafArgs returns leafCell1 with BitN and ElemN updated.\n\t\t\treturn true, c.putLeafCell(leafCell1)\n\t\t}\n\n\t\t// rewrite page, still as a bitmap.\n\t\tif err := c.tx.writeBitmapPage(pgno, fromArray64(a)); err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\treturn true, c.putLeafCell(cell)\n\n\tdefault:\n\t\treturn false, fmt.Errorf(\"rbf.Cursor.Add(): invalid container type: %d\", cell.Type)\n\t}\n}", "title": "" }, { "docid": "4b6e78a3a54bd11e12ae960bc2648979", "score": "0.48461795", "text": "func (c *container) remove(vm *VirtualMachine) {\n\tif c.id == \"\" {\n\t\treturn\n\t}\n\n\targs := [][]string{\n\t\t{\"rm\", \"-v\", \"-f\", c.id},\n\t\t{\"volume\", \"rm\", \"-f\", c.name},\n\t}\n\n\tfor i := range args {\n\t\tcmd := exec.Command(\"docker\", args[i]...)\n\t\terr := cmd.Run()\n\t\tif err != nil {\n\t\t\tlog.Printf(\"%s %s: %s\", vm.Name, cmd.Args, err)\n\t\t}\n\t}\n\n\tc.id = \"\"\n}", "title": "" }, { "docid": "def27afd7b9daed93608b9a48d9157e5", "score": "0.48342484", "text": "func (m *taskManager) remove(containerID string, task *Task) {\n\tdelete(m.tasks, containerID)\n\ttask.cancel()\n}", "title": "" }, { "docid": "791f7fb3bc382044c2d979bb488f15d6", "score": "0.4813352", "text": "func (room *Room) Remove(id string) error {\n\troom.mutex.Lock()\n\tdefer room.mutex.Unlock()\n\n\tif client, ok := room.clients[id]; ok {\n\t\terr := client.Close()\n\t\tif err != nil {\n\t\t\treturn nil\n\t\t}\n\t\tdelete(room.clients, client.ID)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "ad78a54bc6e1cc70e24e1ab4d1ed3140", "score": "0.48058614", "text": "func (d *stiDocker) RemoveContainer(id string) error {\n\topts := docker.RemoveContainerOptions{\n\t\tID: id,\n\t\tRemoveVolumes: true,\n\t\tForce: true,\n\t}\n\treturn d.client.RemoveContainer(opts)\n}", "title": "" }, { "docid": "8a0de63517e3d16812ee96ea804afe9c", "score": "0.47886452", "text": "func (c *Canvas) Delete(it Item) {\n\tremoved := false\n\tc.Atomically(func(flush FlushFunc) {\n\t\tvar next *list.Element\n\t\tfor e := c.items.Front(); e != nil; e = next {\n\t\t\tnext = e.Next()\n\t\t\tif e.Value.(Item) == it {\n\t\t\t\tc.items.Remove(e)\n\t\t\t\tflush(it.Bbox(), nil)\n\t\t\t\tremoved = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif removed {\n\t\t\tit.SetContainer(NullBacking())\n\t\t} else {\n\t\t\tlog.Printf(\"item %T not removed\", it)\n\t\t}\n\t})\n}", "title": "" }, { "docid": "f67e34745be930f2a3ca3096adea96d7", "score": "0.47794446", "text": "func (fc *FetchCache) Clear(id string) {\n\tfc.Lock(id)\n\tdefer fc.Unlock(id)\n\tif _, found := fc.items[id]; !found {\n\t\treturn\n\t}\n\n\tdelete(fc.items, id)\n}", "title": "" }, { "docid": "9f6a061c81c9039fc2b371967d18adcd", "score": "0.47765833", "text": "func (m *Map) DeleteID(id backend.ID) {\n\tm.m.Lock()\n\tdefer m.m.Unlock()\n\n\tpos, _, err := m.find(Blob{ID: id}, false)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tm.list = append(m.list[:pos], m.list[pos+1:]...)\n}", "title": "" } ]
9fccbbc8cd618ef70a987b3287043017
Specifies the maximum time to attempt to reconcile the cluster git repository source with the remote. Defaults to `600`.
[ { "docid": "3f71504e93a2047f8452a22fb29042a5", "score": "0.44608796", "text": "func (o FluxConfigurationGitRepositoryPtrOutput) TimeoutInSeconds() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v *FluxConfigurationGitRepository) *int {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.TimeoutInSeconds\n\t}).(pulumi.IntPtrOutput)\n}", "title": "" } ]
[ { "docid": "f4949f62a8d067a5c2ac5a9c6c08cee1", "score": "0.5101107", "text": "func (j *DSGit) CalculateTimeToReset(ctx *Ctx, rateLimit, rateLimitReset int) (seconds int) {\n\tseconds = rateLimitReset\n\treturn\n}", "title": "" }, { "docid": "b6a55a4ac646a3f26a867666839abadd", "score": "0.50380546", "text": "func FetchMaxWait(timeout time.Duration) FetchOpt {\n\treturn func(req *pullRequest) error {\n\t\tif timeout <= 0 {\n\t\t\treturn fmt.Errorf(\"%w: timeout value must be greater than 0\", ErrInvalidOption)\n\t\t}\n\t\treq.Expires = timeout\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "3d9251fc54172582885d866db502184c", "score": "0.48413748", "text": "func testSyncWithMaxDuration(t *testing.T, cutoffMode fs.CutoffMode) {\n\tctx := context.Background()\n\tctx, ci := fs.AddConfig(ctx)\n\tif *fstest.RemoteName != \"\" {\n\t\tt.Skip(\"Skipping test on non local remote\")\n\t}\n\tr := fstest.NewRun(t)\n\n\tmaxDuration := 250 * time.Millisecond\n\tci.MaxDuration = maxDuration\n\tci.CutoffMode = cutoffMode\n\tci.CheckFirst = true\n\tci.OrderBy = \"size\"\n\tci.Transfers = 1\n\tci.Checkers = 1\n\tbytesPerSecond := 10 * 1024\n\taccounting.TokenBucket.SetBwLimit(fs.BwPair{Tx: fs.SizeSuffix(bytesPerSecond), Rx: fs.SizeSuffix(bytesPerSecond)})\n\tdefer accounting.TokenBucket.SetBwLimit(fs.BwPair{Tx: -1, Rx: -1})\n\n\t// write one small file which we expect to transfer and one big one which we don't\n\tfile1 := r.WriteFile(\"file1\", string(make([]byte, 16)), t1)\n\tfile2 := r.WriteFile(\"file2\", string(make([]byte, 50*1024)), t1)\n\tr.CheckLocalItems(t, file1, file2)\n\tr.CheckRemoteItems(t)\n\n\taccounting.GlobalStats().ResetCounters()\n\tstartTime := time.Now()\n\terr := Sync(ctx, r.Fremote, r.Flocal, false)\n\trequire.True(t, errors.Is(err, ErrorMaxDurationReached))\n\n\tif cutoffMode == fs.CutoffModeHard {\n\t\tr.CheckRemoteItems(t, file1)\n\t\tassert.Equal(t, int64(1), accounting.GlobalStats().GetTransfers())\n\t} else {\n\t\tr.CheckRemoteItems(t, file1, file2)\n\t\tassert.Equal(t, int64(2), accounting.GlobalStats().GetTransfers())\n\t}\n\n\telapsed := time.Since(startTime)\n\tconst maxTransferTime = 20 * time.Second\n\n\twhat := fmt.Sprintf(\"expecting elapsed time %v between %v and %v\", elapsed, maxDuration, maxTransferTime)\n\tassert.True(t, elapsed >= maxDuration, what)\n\tassert.True(t, elapsed < maxTransferTime, what)\n}", "title": "" }, { "docid": "4b11677779407aaa3a968cba9c778eca", "score": "0.4834178", "text": "func (o FluxConfigurationGitRepositoryOutput) TimeoutInSeconds() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v FluxConfigurationGitRepository) *int { return v.TimeoutInSeconds }).(pulumi.IntPtrOutput)\n}", "title": "" }, { "docid": "001f0387f75f5d149a105e19cdee534c", "score": "0.4788926", "text": "func (c *Config) ReconnectTimeout(seconds uint) *Config {\n\tc.GetContext().ReconnectTimeout = strconv.FormatUint(uint64(seconds), 10)\n\treturn c\n}", "title": "" }, { "docid": "fc21809760dd1d58ff4dd5331ff5d369", "score": "0.47739926", "text": "func WithMaxConcurrentReconciles(max int) Option {\n\treturn func(r *Reconciler) error {\n\t\tif max < 1 {\n\t\t\treturn errors.New(\"maxConcurrentReconciles must be at least 1\")\n\t\t}\n\t\tr.maxConcurrentReconciles = max\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "fc21809760dd1d58ff4dd5331ff5d369", "score": "0.47739926", "text": "func WithMaxConcurrentReconciles(max int) Option {\n\treturn func(r *Reconciler) error {\n\t\tif max < 1 {\n\t\t\treturn errors.New(\"maxConcurrentReconciles must be at least 1\")\n\t\t}\n\t\tr.maxConcurrentReconciles = max\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "6f0a236b89eaf9f69dfffeefe49b515f", "score": "0.470563", "text": "func (j *DSGitHub) CalculateTimeToReset(ctx *Ctx, rateLimit, rateLimitReset int) (seconds int) {\n\tseconds = rateLimitReset\n\treturn\n}", "title": "" }, { "docid": "6535cd24c2319f54cc1bd5dec7bcea43", "score": "0.46926916", "text": "func WithMaxReconnectTimes(n int) core.BuildOption {\n\treturn func(o interface{}) {\n\t\to.(*serverOptions).MaxReconnectTimes = n\n\t}\n}", "title": "" }, { "docid": "893b50ba341d69d35a4697d422f959db", "score": "0.467493", "text": "func RemoteTimeout(s *scen.Scenario, stdout, stderr io.Writer, addr string, timeout time.Duration) (float64, error) {\n\tclient, err := cloudlus.Dial(addr)\n\tif err != nil {\n\t\treturn math.Inf(1), err\n\t}\n\tdefer client.Close()\n\n\texecfn := func(scn *scen.Scenario) (float64, error) {\n\t\tj, err := BuildRemoteJob(scn, objfile)\n\t\tif err != nil {\n\t\t\treturn math.Inf(1), fmt.Errorf(\"failed to build remote job: %v\", err)\n\t\t}\n\t\tj.Timeout = timeout\n\n\t\t// closing this channel might cause a send on a closed channel if the\n\t\t// timeout in the select below fires before the goroutine completes.\n\t\tdone := make(chan bool, 1)\n\t\tgo func() {\n\t\t\tj, err = client.Run(j)\n\t\t\tdone <- true\n\t\t}()\n\n\t\tselect {\n\t\tcase <-done:\n\t\t\tif err != nil {\n\t\t\t\treturn math.Inf(1), fmt.Errorf(\"job execution failed: %v\", err)\n\t\t\t}\n\t\tcase <-time.After(j.Timeout + 1*time.Hour):\n\t\t\treturn math.Inf(1), fmt.Errorf(\"job rpc timeout limit reached\")\n\t\t}\n\n\t\tif err := writeLogs(j, stdout, stderr); err != nil {\n\t\t\treturn math.Inf(1), fmt.Errorf(\"job logging failed: %v\", err)\n\t\t}\n\n\t\tdata, err := client.RetrieveOutfileData(j, objfile)\n\t\tif err != nil {\n\t\t\treturn math.Inf(1), fmt.Errorf(\"couldn't find objective result file: %v\", err)\n\t\t}\n\n\t\tval, err := strconv.ParseFloat(strings.TrimSpace(string(data)), 64)\n\t\tif err != nil {\n\t\t\treturn math.Inf(1), fmt.Errorf(\"invalid objective string '%s': %v\", data, err)\n\t\t}\n\t\treturn val, nil\n\t}\n\n\treturn s.CalcTotalObjective(execfn)\n}", "title": "" }, { "docid": "de3816e539b80b5ea6936787b208386e", "score": "0.46128798", "text": "func TestPledgeMaxConcurrentGet(t *testing.T) {\n\trequire.NoError(t, os.Setenv(\"GET_2K_MAX_CONCURRENT\", \"1\"))\n\tt.Cleanup(func() {\n\t\trequire.NoError(t, os.Unsetenv(\"GET_2K_MAX_CONCURRENT\"))\n\t})\n\n\tkit.QuietMiningLogs()\n\n\tblockTime := 50 * time.Millisecond\n\n\tctx, cancel := context.WithCancel(context.Background())\n\tdefer cancel()\n\n\t_, miner, ens := kit.EnsembleMinimal(t, kit.NoStorage()) // no mock proofs\n\tens.InterconnectAll().BeginMiningMustPost(blockTime)\n\n\t// separate sealed and storage paths so that finalize move needs to happen\n\tminer.AddStorage(ctx, t, func(meta *storiface.LocalStorageMeta) {\n\t\tmeta.CanSeal = true\n\t})\n\tminer.AddStorage(ctx, t, func(meta *storiface.LocalStorageMeta) {\n\t\tmeta.CanStore = true\n\t})\n\n\t// NOTE: This test only repros the issue when Fetch tasks take ~10s, there's\n\t// no great way to do that in a non-horribly-hacky way\n\n\t/* The horribly hacky way:\n\n\tdiff --git a/storage/sealer/sched_worker.go b/storage/sealer/sched_worker.go\n\tindex 35acd755d..76faec859 100644\n\t--- a/storage/sealer/sched_worker.go\n\t+++ b/storage/sealer/sched_worker.go\n\t@@ -513,6 +513,10 @@ func (sw *schedWorker) startProcessingTask(req *WorkerRequest) error {\n\t tw.start()\n\t err = <-werr\n\n\t+ if req.TaskType == sealtasks.TTFetch {\n\t+ time.Sleep(10 * time.Second)\n\t+ }\n\t+\n\t select {\n\t case req.ret <- workerResponse{err: err}:\n\t case <-req.Ctx.Done():\n\n\t*/\n\n\tminer.PledgeSectors(ctx, 3, 0, nil)\n}", "title": "" }, { "docid": "eabc2a541f68b88fabe1c6933d61ef2a", "score": "0.4605297", "text": "func (o *GetRedbeamsFlowLogsProgressByResourceCrnParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "65b1f55e1fc72f1c1ad9e04f38a07a4b", "score": "0.4583391", "text": "func (dataChannel *DataChannel) CalculateRetransmissionTimeout(log log.T, streamingMessage StreamingMessage) {\n\tnewRoundTripTime := float64(GetRoundTripTime(streamingMessage))\n\n\tdataChannel.RoundTripTimeVariation = ((1 - config.RTTVConstant) * dataChannel.RoundTripTimeVariation) +\n\t\t(config.RTTVConstant * math.Abs(dataChannel.RoundTripTime-newRoundTripTime))\n\n\tdataChannel.RoundTripTime = ((1 - config.RTTConstant) * dataChannel.RoundTripTime) +\n\t\t(config.RTTConstant * newRoundTripTime)\n\n\tdataChannel.RetransmissionTimeout = time.Duration(dataChannel.RoundTripTime +\n\t\tmath.Max(float64(config.ClockGranularity), float64(4*dataChannel.RoundTripTimeVariation)))\n\n\t// Ensure RetransmissionTimeout do not exceed maximum timeout defined\n\tif dataChannel.RetransmissionTimeout > config.MaxTransmissionTimeout {\n\t\tdataChannel.RetransmissionTimeout = config.MaxTransmissionTimeout\n\t}\n}", "title": "" }, { "docid": "3b445ec447a504cf574d69844a56db84", "score": "0.4541431", "text": "func (cp *statusConnectionPool) scheduleResurrect(c *Connection) {\n\tfactor := math.Min(float64(c.Failures-1), float64(defaultResurrectTimeoutFactorCutoff))\n\ttimeout := time.Duration(defaultResurrectTimeoutInitial.Seconds() * math.Exp2(factor) * float64(time.Second))\n\tif debugLogger != nil {\n\t\tdebugLogger.Logf(\"Resurrect %s (failures=%d, factor=%1.1f, timeout=%s) in %s\\n\", c.URL, c.Failures, factor, timeout, c.DeadSince.Add(timeout).Sub(time.Now().UTC()).Truncate(time.Second))\n\t}\n\n\ttime.AfterFunc(timeout, func() {\n\t\tcp.Lock()\n\t\tdefer cp.Unlock()\n\n\t\tc.Lock()\n\t\tdefer c.Unlock()\n\n\t\tif !c.IsDead {\n\t\t\tif debugLogger != nil {\n\t\t\t\tdebugLogger.Logf(\"Already resurrected %s\\n\", c.URL)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\tcp.resurrect(c, true)\n\t})\n}", "title": "" }, { "docid": "df74108c1783979012bb5d1c11de96cc", "score": "0.45283017", "text": "func WithTimeout(duration time.Duration) ReconcilerOption {\n\treturn func(r *Reconciler) {\n\t\tr.timeout = duration\n\t}\n}", "title": "" }, { "docid": "57bbabccb2ddd5dede9936fcebb4dade", "score": "0.45114926", "text": "func (o *GetRepository15Params) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "f7e1cd63071f680a9162b8656d9d9e8b", "score": "0.44866917", "text": "func SetConnectTimeout(timeout int) {\n\tsshOptions[\"ConnectTimeout\"] = fmt.Sprintf(\"%d\", timeout)\n}", "title": "" }, { "docid": "9d76f18390b637c06b8a0d2b6d312412", "score": "0.44799358", "text": "func (r *ReconcileTargetPool) Reconcile(request reconcile.Request) (reconcile.Result, error) {\n\tvar finalizer = utils.Finalizer\n\tlog.Printf(\"Reconciling TargetPool %s/%s\\n\", request.Namespace, request.Name)\n\t// Fetch the Address r.k8sObject\n\terr := r.client.Get(context.TODO(), request.NamespacedName, r.k8sObject)\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\tlog.Printf(\"Request object not found, could have been deleted after reconcile request.\")\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\tlog.Printf(\"Error reading the object - requeue the request %s.\", err.Error())\n\t\treturn r.reconcileResult, err\n\t}\n\tvar kind = r.k8sObject.TypeMeta.Kind\n\n\t// Define a new instance object\n\tr.spec = r.k8sObject.Spec\n\n\t// fetch annotations\n\tr.annotations = r.k8sObject.GetAnnotations()\n\n\t// update requeue duration based on annotation\n\tduration, err := time.ParseDuration(utils.GetAnnotation(r.annotations, utils.ReconcilePeriodAnnotation))\n\tif err == nil {\n\t\tr.reconcileResult.RequeueAfter = duration\n\t}\n\n\t// log into GCE using project\n\tif utils.GetAnnotation(r.annotations, utils.ProjectIDAnnotation) != \"\" {\n\t\tr.gce, err = gce.New(utils.ProjectIDAnnotation)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\t// check if the resource is set to be deleted\n\t// stolen from https://github.com/operator-framework/operator-sdk/blob/fc9b6b1277b644d152534b22614351aa3d1405ba/pkg/ansible/controller/reconcile.go\n\tdeleted := r.k8sObject.GetDeletionTimestamp() != nil\n\tpendingFinalizers := r.k8sObject.GetFinalizers()\n\tfinalizerExists := len(pendingFinalizers) > 0\n\tif !finalizerExists && !deleted && !utils.Contains(pendingFinalizers, finalizer) {\n\t\tlog.Printf(\"Adding finalizer %s to resource\", finalizer)\n\t\tfinalizers := append(pendingFinalizers, finalizer)\n\t\tr.k8sObject.SetFinalizers(finalizers)\n\t\terr := r.client.Update(context.TODO(), r.k8sObject)\n\t\tif err != nil {\n\t\t\treturn r.reconcileResult, err\n\t\t}\n\t}\n\n\t// fetch the corresponding object from GCE\n\tgceObject, err := r.read()\n\tif err != nil {\n\t\treturn r.reconcileResult, err\n\t}\n\t// if it doesn't existin in gcp and is set to be deleted,\n\t// then we can strip out the finalizer to let k8s actually delete it.\n\tif gceObject == nil && deleted && finalizerExists {\n\t\tlog.Printf(\"reconcile: remove finalizer %s from %s/%s\", finalizer, r.k8sObject.Namespace, r.k8sObject.Name)\n\t\tfinalizers := []string{}\n\t\tfor _, pendingFinalizer := range pendingFinalizers {\n\t\t\tif pendingFinalizer != finalizer {\n\t\t\t\tfinalizers = append(finalizers, pendingFinalizer)\n\t\t\t}\n\t\t}\n\t\tr.k8sObject.SetFinalizers(finalizers)\n\t\terr := r.client.Update(context.TODO(), r.k8sObject)\n\t\tif err != nil {\n\t\t\treturn r.reconcileResult, err\n\t\t}\n\t\t//todo fix this to stop requeuing\n\t\tlog.Printf(\"reconcile: Successfully deleted %s/%s, do not requeue\", r.k8sObject.Namespace, r.k8sObject.Name)\n\t\treturn reconcile.Result{Requeue: false}, nil\n\t\t//r.reconcileResult.RequeueAfter, _ = time.ParseDuration(\"10m\")\n\t}\n\t// if not deleted and gceObject doesn't exist we can create one.\n\tif !deleted && gceObject == nil {\n\t\tlog.Printf(\"reconcile: creating %s instance %s\", kind, r.spec.Name)\n\t\terr := r.create()\n\t\treturn r.reconcileResult, err\n\t}\n\n\tif gceObject != nil {\n\t\t//spew.Dump(gceObject)\n\t\tif deleted && finalizerExists {\n\t\t\tlog.Printf(\"reconcile: time to delete %s\", r.spec.Name)\n\t\t\terr := r.destroy()\n\t\t\tif err != nil {\n\t\t\t\tr.reconcileResult.RequeueAfter, _ = time.ParseDuration(\"5s\")\n\t\t\t\treturn r.reconcileResult, err\n\t\t\t}\n\t\t\tr.reconcileResult.RequeueAfter, _ = time.ParseDuration(\"5s\")\n\t\t\treturn r.reconcileResult, err\n\t\t}\n\t\tlog.Printf(\"reconcile: resource %s already exists\", r.spec.Name)\n\t\tif r.k8sObject.Status.Status == \"READY\" {\n\t\t\tlog.Printf(\"reconcile: successfully created %s/%s, change requeue to 10mins so we don't stampede gcp.\", r.k8sObject.Namespace, r.k8sObject.Name)\n\t\t\tr.reconcileResult.RequeueAfter, _ = time.ParseDuration(\"10m\")\n\t\t\treturn r.reconcileResult, nil\n\t\t}\n\t\tif r.k8sObject.Status.Status == \"FAILED\" {\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// update our k8s resource to include status from resource\n\t\tif gceObject.SelfLink != \"\" {\n\t\t\tr.k8sObject.Status.Status = \"READY\"\n\t\t}\n\t\tr.k8sObject.Status.SelfLink = gceObject.SelfLink\n\t\tr.k8sObject.Status.CreationTimestamp = gceObject.CreationTimestamp\n\t\tr.k8sObject.Status.Id = gceObject.Id\n\t\tr.k8sObject.Status.Region = gceObject.Region\n\t\tlog.Printf(\"reconcile: update k8s status for %s/%s\", r.k8sObject.Namespace, r.k8sObject.Name)\n\t\terr = r.client.Update(context.TODO(), r.k8sObject)\n\t\tif err != nil {\n\t\t\treturn r.reconcileResult, err\n\t\t}\n\t\treturn r.reconcileResult, nil\n\n\t}\n\treturn reconcile.Result{}, nil\n}", "title": "" }, { "docid": "c145d72890981d375b4de902ea6c5d76", "score": "0.44620788", "text": "func (r *CronJobReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error)", "title": "" }, { "docid": "91e2a2879fd32039ae11accc49273705", "score": "0.4424605", "text": "func (r *ReconcileReleaseManager) Reconcile(request reconcile.Request) (reconcile.Result, error) {\n\tstart := time.Now()\n\ttraceID := uuid.New().String()\n\treqLog := clog.WithValues(\"Request.Namespace\", request.Namespace, \"Request.Name\", request.Name, \"Trace\", traceID)\n\tdefer func() {\n\t\treqLog.Info(\"Finished releasemanager reconcile\", \"Elapsed\", time.Since(start))\n\t}()\n\treqLog.Info(\"Reconciling ReleaseManager\")\n\tctx := context.TODO()\n\n\t// Fetch the ReleaseManager instance\n\trm := &picchuv1alpha1.ReleaseManager{}\n\tif err := r.client.Get(ctx, request.NamespacedName, rm); err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\treturn r.requeue(reqLog, err)\n\t}\n\tr.scheme.Default(rm)\n\n\trmLog := reqLog.WithValues(\"App\", rm.Spec.App, \"Fleet\", rm.Spec.Fleet, \"Target\", rm.Spec.Target)\n\trmLog.Info(\"Reconciling Existing ReleaseManager\")\n\n\tif lastUpdated := rm.Status.LastUpdated; lastUpdated != nil {\n\t\treconcileInterval.With(prometheus.Labels{\n\t\t\t\"app\": rm.Spec.App,\n\t\t\t\"target\": rm.Spec.Target,\n\t\t}).Observe(start.Sub(lastUpdated.Time).Seconds())\n\t}\n\n\tclusters, err := r.getClustersByFleet(ctx, rm.Namespace, rm.Spec.Fleet)\n\tif err != nil {\n\t\treturn r.requeue(rmLog, fmt.Errorf(\"Failed to get clusters for fleet %s: %w\", rm.Spec.Fleet, err))\n\t}\n\tclusterInfo := ClusterInfoList{}\n\tfor _, cluster := range clusters {\n\t\tvar scalingFactor = 0.0\n\t\tif cluster.Spec.ScalingFactor != nil {\n\t\t\tscalingFactor = *cluster.Spec.ScalingFactor\n\t\t}\n\t\tclusterInfo = append(clusterInfo, ClusterInfo{\n\t\t\tName: cluster.Name,\n\t\t\tLive: !cluster.Spec.HotStandby,\n\t\t\tScalingFactor: scalingFactor,\n\t\t})\n\t}\n\tif clusterInfo.ClusterCount(true) == 0 {\n\t\treturn r.requeue(rmLog, err)\n\t}\n\tplanApplier, err := r.newPlanApplier(ctx, rmLog, clusters)\n\tif err != nil {\n\t\treturn r.requeue(rmLog, err)\n\t}\n\n\tdeliveryClusters, err := r.getClustersByFleet(ctx, rm.Namespace, r.config.ServiceLevelsFleet)\n\tif err != nil {\n\t\treturn r.requeue(rmLog, fmt.Errorf(\"Failed to get delivery clusters for fleet %s: %w\", r.config.ServiceLevelsFleet, err))\n\t}\n\tdeliveryClusterInfo := ClusterInfoList{}\n\tfor _, cluster := range deliveryClusters {\n\t\tvar scalingFactor = 0.0\n\t\tif cluster.Spec.ScalingFactor != nil {\n\t\t\tscalingFactor = *cluster.Spec.ScalingFactor\n\t\t}\n\t\tdeliveryClusterInfo = append(deliveryClusterInfo, ClusterInfo{\n\t\t\tName: cluster.Name,\n\t\t\tLive: !cluster.Spec.HotStandby,\n\t\t\tScalingFactor: scalingFactor,\n\t\t})\n\t}\n\n\tdeliveryApplier, err := r.newPlanApplier(ctx, rmLog, deliveryClusters)\n\tif err != nil {\n\t\treturn r.requeue(rmLog, err)\n\t}\n\n\tobserver, err := r.newObserver(ctx, rmLog, clusters)\n\tif err != nil {\n\t\treturn r.requeue(rmLog, err)\n\t}\n\n\trevisions, err := r.getRevisions(ctx, rmLog, request.Namespace, rm.Spec.Fleet, rm.Spec.App, rm.Spec.Target)\n\tif err != nil {\n\t\treturn r.requeue(rmLog, err)\n\t}\n\n\tfaults, err := r.getFaults(ctx, rmLog, request.Namespace, rm.Spec.App, rm.Spec.Target)\n\tif err != nil {\n\t\treturn r.requeue(rmLog, err)\n\t}\n\n\tobservation, err := observer.Observe(ctx, rm.TargetNamespace())\n\tif err != nil {\n\t\treturn r.requeue(rmLog, err)\n\t}\n\n\tic := &IncarnationController{\n\t\tdeliveryClient: r.client,\n\t\tdeliveryApplier: deliveryApplier,\n\t\tplanApplier: planApplier,\n\t\tlog: rmLog,\n\t\treleaseManager: rm,\n\t\tclusterInfo: clusterInfo,\n\t}\n\n\tsyncer := ResourceSyncer{\n\t\tdeliveryClient: r.client,\n\t\tdeliveryApplier: deliveryApplier,\n\t\tplanApplier: planApplier,\n\t\tobserver: observer,\n\t\tinstance: rm,\n\t\tincarnations: newIncarnationCollection(ic, revisions, observation, r.config),\n\t\treconciler: r,\n\t\tlog: rmLog,\n\t\tpicchuConfig: r.config,\n\t\tfaults: faults,\n\t}\n\n\tif !rm.IsDeleted() {\n\t\trmLog.Info(\"Sync'ing releasemanager\")\n\t\trs, err := syncer.sync(ctx)\n\t\tif err != nil {\n\t\t\treturn r.requeue(rmLog, err)\n\t\t}\n\t\trm.Status.Revisions = rs\n\t\ttimeNow := metav1.NewTime(time.Now())\n\t\trm.Status.LastUpdated = &timeNow\n\t\tif err := utils.UpdateStatus(ctx, r.client, rm); err != nil {\n\t\t\treturn r.requeue(rmLog, err)\n\t\t}\n\t\trmLog.Info(\"Updated releasemanager status\", \"Content\", rm.Status, \"Type\", \"ReleaseManager.Status\")\n\t\treturn r.requeue(rmLog, nil)\n\t} else if !rm.IsFinalized() {\n\t\trmLog.Info(\"Deleting ServiceLevels\")\n\t\tif err := syncer.delServiceLevels(ctx); err != nil {\n\t\t\treturn r.requeue(rmLog, err)\n\t\t}\n\n\t\trmLog.Info(\"Deleting releasemanager\")\n\t\tif err := syncer.del(ctx); err != nil {\n\t\t\treturn r.requeue(rmLog, err)\n\t\t}\n\n\t\trm.Finalize()\n\t\terr := r.client.Update(ctx, rm)\n\t\treturn r.requeue(rmLog, err)\n\t}\n\n\trmLog.Info(\"ReleaseManager is deleted and finalized\")\n\treturn reconcile.Result{}, nil\n}", "title": "" }, { "docid": "7479bfd0d50f821e6638101025a70f98", "score": "0.44193137", "text": "func (r *CSIPowerMaxRevProxyReconciler) Reconcile(ctx context.Context, request ctrl.Request) (ctrl.Result, error) {\n\treqLogger := log.WithValues(\"Request.Namespace\", request.Namespace, \"Request.Name\", request.Name)\n\treqLogger.Info(\"Reconciling CSIPowerMaxRevProxy\")\n\tretryInterval := constants.DefaultRetryInterval\n\treqLogger.Info(\"################Starting Reconcile##############\")\n\t// Fetch the CSIPowerMaxRevProxy instance\n\tinstance := &storagev1.CSIPowerMaxRevProxy{}\n\terr := r.Client.Get(context.TODO(), request.NamespacedName, instance)\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\treturn reconcile.Result{}, err\n\t}\n\tstatus := instance.Status\n\t// newStatus is the status object which is modified and finally used to update the Status\n\t// in case the instance or the status is updated\n\tnewStatus := status.DeepCopy()\n\t// oldStatus is the previous status of the CR instance\n\t// This is used to compare if there is a need to update the status\n\toldStatus := status.DeepCopy()\n\toldState := oldStatus.State\n\treqLogger.Info(fmt.Sprintf(\"Proxy was previously in (%s) state\", oldState))\n\t// Check if the proxy spec has changed\n\texpectedHash, actualHash, changed := utils.ProxyChanged(instance)\n\tif changed {\n\t\tmessage := fmt.Sprintf(\"Proxy spec has changed (%d vs %d)\", actualHash, expectedHash)\n\t\tnewStatus.ProxyHash = expectedHash\n\t\treqLogger.Info(message)\n\t} else {\n\t\treqLogger.Info(\"No changes detected in the proxy spec\")\n\t}\n\tcheckStateOnly := false\n\tswitch oldState {\n\tcase constants.Running:\n\t\tfallthrough\n\tcase constants.Succeeded:\n\t\tif changed {\n\t\t\t// If the proxy hash has changed, we need to update the proxy again\n\t\t\tnewStatus.State = constants.Updating\n\t\t\treqLogger.Info(\"Changed state to Updating as proxy spec changed\")\n\t\t} else {\n\t\t\t// Just check the state of the proxy and update status accordingly\n\t\t\treqLogger.Info(\"Recalculating proxy state(only) as there is no change in proxy spec\")\n\t\t\tcheckStateOnly = true\n\t\t}\n\tcase constants.InvalidConfig:\n\t\tfallthrough\n\tcase constants.Failed:\n\t\tif changed {\n\t\t\t// Do a reconcile as we detected a change\n\t\t\tnewStatus.State = constants.Updating\n\t\t} else {\n\t\t\treqLogger.Info(fmt.Sprintf(\"CR is in (%s) state. Reconcile request won't be requeued\",\n\t\t\t\tnewStatus.State))\n\t\t\treturn logBannerAndReturn(reconcile.Result{}, nil, reqLogger)\n\t\t}\n\tcase constants.NoState:\n\t\tnewStatus.State = constants.Updating\n\tcase constants.Updating:\n\t\treqLogger.Info(\"Proxy already in Updating state\")\n\t}\n\t// Check if proxy is in running state (only if the status was previously set to Succeeded or Running)\n\tif checkStateOnly {\n\t\treturn handleSuccess(context.TODO(), instance, r.Client, reqLogger, newStatus, oldStatus)\n\t}\n\tif changed {\n\t\t// Also update the status as we calculate the hash every time\n\t\tnewStatus.LastUpdate = setLastStatusUpdate(oldStatus, storagev1.Updating, \"\")\n\t\tupdateStatusError := updateStatus(context.TODO(), instance, r.Client, reqLogger, newStatus, oldStatus)\n\t\tif updateStatusError != nil {\n\t\t\tnewStatus.LastUpdate.ErrorMessage = updateStatusError.Error()\n\t\t\treqLogger.Info(fmt.Sprintf(\"\\n################End Reconcile %s %s##############\\n\",\n\t\t\t\t\"CSIPowerMaxReverseProxy\", request))\n\t\t\treturn logBannerAndReturn(reconcile.Result{Requeue: true, RequeueAfter: retryInterval}, updateStatusError, reqLogger)\n\t\t}\n\t}\n\t// Always validate the spec\n\terr = ValidateProxySpec(context.TODO(), r.Client, instance)\n\tif err != nil {\n\t\treturn handleValidationError(context.TODO(), instance, r.Client, reqLogger, err)\n\t}\n\t// Set the proxy status to updating\n\tnewStatus.State = constants.Updating\n\tsyncErr := SyncProxy(instance, r.Client, reqLogger)\n\tif syncErr == nil {\n\t\t// Mark the proxy state as succeeded\n\t\tnewStatus.State = constants.Succeeded\n\t\terrorMsg := \"\"\n\t\trunning, err := utils.CalculateProxyState(context.TODO(), ReverseProxyName, instance.Namespace, r.Client, newStatus)\n\t\tif err != nil {\n\t\t\terrorMsg = err.Error()\n\t\t}\n\t\tif running {\n\t\t\tnewStatus.State = constants.Running\n\t\t}\n\t\tnewStatus.LastUpdate = setLastStatusUpdate(oldStatus,\n\t\t\tutils.GetOperatorConditionTypeFromState(newStatus.State), errorMsg)\n\t\tupdateStatusError := updateStatus(context.TODO(), instance, r.Client, reqLogger, newStatus, oldStatus)\n\t\tif updateStatusError != nil {\n\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: retryInterval}, updateStatusError\n\t\t}\n\t\tif newStatus.State != constants.Running {\n\t\t\treturn logBannerAndReturn(reconcile.Result{Requeue: true, RequeueAfter: retryInterval}, nil, reqLogger)\n\t\t}\n\t\treturn logBannerAndReturn(reconcile.Result{}, nil, reqLogger)\n\t}\n\t// Failed to sync proxy deployment\n\t// Look at the last condition\n\t_, _ = utils.CalculateProxyState(context.TODO(), ReverseProxyName, instance.Namespace, r.Client, newStatus)\n\tnewStatus.LastUpdate = setLastStatusUpdate(oldStatus, storagev1.Error, syncErr.Error())\n\t// Check the last condition\n\tif oldStatus.LastUpdate.Condition == storagev1.Error {\n\t\treqLogger.Info(\" Proxy previously encountered an error\")\n\t\ttimeSinceLastConditionChange := metav1.Now().Sub(oldStatus.LastUpdate.Time.Time).Round(time.Second)\n\t\treqLogger.Info(fmt.Sprintf(\"Time since last condition change :%v\", timeSinceLastConditionChange))\n\t\tif timeSinceLastConditionChange >= constants.MaxRetryDuration {\n\t\t\t// Mark the proxy as failed and update the condition\n\t\t\tnewStatus.State = constants.Failed\n\t\t\tnewStatus.LastUpdate = setLastStatusUpdate(oldStatus,\n\t\t\t\tutils.GetOperatorConditionTypeFromState(newStatus.State), syncErr.Error())\n\t\t\t// This will trigger a reconcile again\n\t\t\t_ = updateStatus(context.TODO(), instance, r.Client, reqLogger, newStatus, oldStatus)\n\t\t\treturn logBannerAndReturn(reconcile.Result{Requeue: false}, nil, reqLogger)\n\t\t}\n\t\tretryInterval = time.Duration(math.Min(float64(timeSinceLastConditionChange.Nanoseconds()*2),\n\t\t\tfloat64(constants.MaxRetryInterval.Nanoseconds())))\n\t} else {\n\t\t_ = updateStatus(context.TODO(), instance, r.Client, reqLogger, newStatus, oldStatus)\n\t}\n\treqLogger.Info(fmt.Sprintf(\"Retry Interval: %v\", retryInterval))\n\n\t// Don't return an error here. Controller runtime will immediately requeue the request\n\t// Also the requeueAfter setting only is effective after an amount of time\n\treturn logBannerAndReturn(reconcile.Result{Requeue: true, RequeueAfter: retryInterval}, nil, reqLogger)\n}", "title": "" }, { "docid": "589da2b773897bf694234669df1358a0", "score": "0.4414657", "text": "func (o *GetDistroXOperationProgressByResourceCrnParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "fb65ce6371d4b2d3edce2251cc382fd0", "score": "0.44052774", "text": "func (c *Lock) Configure(config *config.ConfigParams) {\n\tc.retryTimeout = config.GetAsLongWithDefault(\"options.retry_timeout\", c.retryTimeout)\n}", "title": "" }, { "docid": "5d7b0f55e0e1ecd3f8e39c946b688f88", "score": "0.439205", "text": "func (g *testGenerator) limitRetarget(oldDiff, newDiff int64) int64 {\n\tmaxRetarget := g.params.RetargetAdjustmentFactor\n\tswitch {\n\tcase newDiff == 0:\n\t\tfallthrough\n\tcase (oldDiff / newDiff) > (maxRetarget - 1):\n\t\treturn oldDiff / maxRetarget\n\tcase (newDiff / oldDiff) > (maxRetarget - 1):\n\t\treturn oldDiff * maxRetarget\n\t}\n\n\treturn newDiff\n}", "title": "" }, { "docid": "2d409955dffaa4323ce99740625e1595", "score": "0.43726587", "text": "func (c *CmdExecutor) GlusterCliExecTimeout() int {\n\ttimeout := 1 + (int(c.GlusterCliTimeout())+1)/60\n\n\tif timeout < 10 {\n\t\ttimeout = 10\n\t}\n\n\treturn timeout\n}", "title": "" }, { "docid": "d4aeb49615d676f99ae54937ee5df338", "score": "0.43698558", "text": "func computeMaximumTimeout(cfg *gatewayv1.Timeouts) time.Duration {\n\tif cfg == nil {\n\t\treturn timeouts.DefaultTimeout\n\t}\n\n\tret := cfg.Default.AsDuration()\n\tfor _, e := range cfg.Overrides {\n\t\toverride := e.Timeout.AsDuration()\n\t\tif ret == 0 || override == 0 {\n\t\t\treturn 0\n\t\t}\n\n\t\tif override > ret {\n\t\t\tret = override\n\t\t}\n\t}\n\n\treturn ret\n}", "title": "" }, { "docid": "a24342c12e1a9f5f5ddca6850426f007", "score": "0.43373254", "text": "func (o *GetRemotesupportConnectemcParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "922dd530c540c0e4bc5bf5e86b135fdd", "score": "0.4319362", "text": "func (db *DB) SetConnMaxLifetime(d time.Duration) {\n\tdb.master.SetConnMaxLifetime(d)\n\n\topts := goworkers.Options{Workers: numWorkers}\n\tgw := goworkers.New(opts)\n\tdefer gw.Stop(false)\n\n\tdb.RLock()\n\tdefer db.RUnlock()\n\tfor _, readReplica := range db.readReplicas {\n\t\treadReplica := readReplica\n\t\tgw.Submit(func() {\n\t\t\treadReplica.SetConnMaxLifetime(d)\n\t\t})\n\t}\n}", "title": "" }, { "docid": "f9710e64f4a90cfe151adcaa542387a9", "score": "0.42757088", "text": "func (r *MerakiSourceReconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {\n\tctx := context.Background()\n\tlog := r.Log.WithValues(\"merakisource\", req.NamespacedName)\n\n\t// get meraki source resource\n\tvar source dnsv1alpha1.MerakiSource\n\tif err := r.Get(ctx, req.NamespacedName, &source); err != nil {\n\t\tif apierrs.IsNotFound(err) {\n\t\t\t// 404, wait for next notification\n\t\t\tlog.V(1).Info(\"not found\")\n\t\t\treturn ctrl.Result{}, nil\n\t\t}\n\t\tlog.Error(err, \"unable to fetch MerakiSource\")\n\t\treturn ctrl.Result{}, err\n\t}\n\n\tvar dnsEndpoint endpoint.DNSEndpoint\n\t// dns endpoint will have the same name as the MerakiSource\n\tif err := r.Get(ctx, req.NamespacedName, &dnsEndpoint); err != nil {\n\t\tif apierrs.IsNotFound(err) {\n\t\t\tlog.V(1).Info(\"dns endpoint not found\")\n\t\t\t// create it\n\t\t\tdnsEndpoint = endpoint.DNSEndpoint{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: source.Name,\n\t\t\t\t\tNamespace: source.Namespace,\n\t\t\t\t},\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Error(err, \"unable to get dns endpoint\", \"dns-endpoint\", req.NamespacedName)\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\t}\n\n\tif err := ctrl.SetControllerReference(&source, &dnsEndpoint, r.Scheme); err != nil {\n\t\treturn ctrl.Result{}, err\n\t}\n\n\t// update the spec from MerakiData\n\t// don't query meraki if we already did in the last 1 minute\n\tif source.Status.SyncedAt == nil || time.Since(source.Status.SyncedAt.Time) > r.APIThrottleInterval {\n\t\tendpoints, err := r.GetEndpoints(&source)\n\t\tif err != nil {\n\t\t\tlog.Error(err, \"failed to get endpoints\")\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\n\t\tdnsEndpoint.Spec.Endpoints = endpoints\n\n\t\tif r.isNew(dnsEndpoint) {\n\t\t\tif err := r.Create(ctx, &dnsEndpoint); err != nil {\n\t\t\t\tlog.Error(err, \"failed to create dns endpoint\", \"dns-endpoint\", dnsEndpoint)\n\t\t\t\treturn ctrl.Result{}, err\n\t\t\t}\n\t\t\tlog.V(1).Info(\"created dns endpoint\", \"dns-endpoint\", dnsEndpoint.GetName())\n\t\t} else {\n\t\t\tif err := r.Update(ctx, &dnsEndpoint); err != nil {\n\t\t\t\tlog.Error(err, \"failed to update dns endpoint\", \"dns-endpoint\")\n\t\t\t\treturn ctrl.Result{}, err\n\t\t\t}\n\t\t\tlog.V(1).Info(\"updated dns endpoint\", \"dns-endpoint\", dnsEndpoint.GetName())\n\t\t}\n\n\t\tts := metav1.Now()\n\t\tsource.Status.SyncedAt = &ts\n\t}\n\n\tref, err := ref.GetReference(r.Scheme, &dnsEndpoint)\n\tif err != nil {\n\t\tlog.Error(err, \"unable to make reference to dns endpoint\", \"dns-endpoint\", dnsEndpoint)\n\t\treturn ctrl.Result{}, err\n\t}\n\tsource.Status.Endpoint = *ref\n\tif err := r.Status().Update(ctx, &source); err != nil {\n\t\tif apierrs.IsConflict(err) {\n\t\t\tlog.V(1).Info(\"stale MerakiSource, requeue\")\n\t\t\treturn ctrl.Result{Requeue: true}, nil\n\t\t}\n\t\tlog.Error(err, \"unable to update MerakiSource status\")\n\t\treturn ctrl.Result{}, err\n\t}\n\n\treturn ctrl.Result{RequeueAfter: r.RequeueInterval}, nil\n}", "title": "" }, { "docid": "496f28c6642af4204498a1861ceec8d4", "score": "0.4265044", "text": "func getJitteredNetworkRetryTime() time.Duration {\n\treturn time.Duration(900+rand.Intn(100)) * time.Millisecond\n}", "title": "" }, { "docid": "9098ea3f999cfa4040b7ef08f7c17d4f", "score": "0.42641118", "text": "func (cd *ConnectionDetails) RetryLimit() int {\n\ti, err := strconv.Atoi(defaults.String(cd.Options[\"retry_limit\"], \"1000\"))\n\tif err != nil {\n\t\treturn 100\n\t}\n\treturn i\n}", "title": "" }, { "docid": "d27ebaaba6c26349e3be69c511ac1263", "score": "0.42618206", "text": "func (o *UpdateDmrClusterLinkParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "dd6245ab9381f9013e38c73bfa744986", "score": "0.42461565", "text": "func (o *GetRepositoriesParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "0953903b70ab09bfac24f90438fea609", "score": "0.42441642", "text": "func (o *OrgRemoveTeamRepositoryParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "bdd247f2a7c70b27295f47ab24c1de28", "score": "0.42422727", "text": "func configureRequestTimeout() {\n\trequestTimeout = defaultRequestTimeout\n\n\tif v := os.Getenv(registryClientTimeoutEnvName); v != \"\" {\n\t\ttimeout, err := strconv.Atoi(v)\n\t\tif err == nil && timeout > 0 {\n\t\t\trequestTimeout = time.Duration(timeout) * time.Second\n\t\t}\n\t}\n}", "title": "" }, { "docid": "cf27adec91638eb7ad46287cb18f2be9", "score": "0.42266425", "text": "func (r *ReconcileGitSource) Reconcile(request reconcile.Request) (reconcile.Result, error) {\n\treqLogger := log.WithValues(\"Request.Namespace\", request.Namespace, \"Request.Name\", request.Name)\n\treqLogger.Info(\"Reconciling GitSource\")\n\n\t// Fetch the GitSource instance\n\tgitSource := &v1alpha1.GitSource{}\n\terr := r.client.Get(context.TODO(), request.NamespacedName, gitSource)\n\tif err != nil {\n\t\treqLogger.Error(err, \"Error getting GitSource object\")\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\treturn reconcile.Result{}, err\n\t}\n\tgitSourceLogger := gslog.LogWithGSValues(reqLogger, gitSource)\n\n\tisDirty := updateStatus(gitSourceLogger, r.client, request.Namespace, gitSource)\n\n\tif isDirty {\n\t\terr = r.client.Update(context.TODO(), gitSource)\n\t\tif err != nil {\n\t\t\tgitSourceLogger.Error(err, \"Error updating GitSource object\")\n\t\t\tif errors.IsNotFound(err) {\n\t\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t\t// Return and don't requeue\n\t\t\t\treturn reconcile.Result{}, nil\n\t\t\t}\n\t\t\t// Error updating the object - requeue the request.\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\treturn reconcile.Result{}, nil\n}", "title": "" }, { "docid": "43c2cbd1edbedad74ae565c2d8015d07", "score": "0.4215025", "text": "func getRecloneTime(gitDir string) (time.Time, error) {\n\t// We store the time we recloned the repository. If the value is missing,\n\t// we store the current time. This decouples this timestamp from the\n\t// different ways a clone can appear in gitserver.\n\tupdate := func() (time.Time, error) {\n\t\tnow := time.Now()\n\t\tcmd := exec.Command(\"git\", \"config\", \"--add\", \"sourcegraph.recloneTimestamp\", strconv.FormatInt(time.Now().Unix(), 10))\n\t\tcmd.Dir = gitDir\n\t\tif _, err := cmd.Output(); err != nil {\n\t\t\treturn now, errors.Wrap(wrapCmdError(cmd, err), \"failed to update recloneTimestamp\")\n\t\t}\n\t\treturn now, nil\n\t}\n\n\tcmd := exec.Command(\"git\", \"config\", \"--get\", \"sourcegraph.recloneTimestamp\")\n\tcmd.Dir = gitDir\n\tout, err := cmd.Output()\n\tif err != nil {\n\t\t// Exit code 1 means the key is not set.\n\t\tif ee, ok := err.(*exec.ExitError); ok && ee.Sys().(syscall.WaitStatus).ExitStatus() == 1 {\n\t\t\treturn update()\n\t\t}\n\t\treturn time.Unix(0, 0), errors.Wrap(wrapCmdError(cmd, err), \"failed to determine clone timestamp\")\n\t}\n\n\tsec, err := strconv.ParseInt(strings.TrimSpace(string(out)), 10, 0)\n\tif err != nil {\n\t\t// If the value is bad update it to the current time\n\t\tnow, err2 := update()\n\t\tif err2 != nil {\n\t\t\terr = err2\n\t\t}\n\t\treturn now, err\n\t}\n\n\treturn time.Unix(sec, 0), nil\n}", "title": "" }, { "docid": "ab67694cde0a48e333b29b020096e3aa", "score": "0.4209493", "text": "func (o ProxyDefaultTargetGroupConnectionPoolConfigOutput) ConnectionBorrowTimeout() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v ProxyDefaultTargetGroupConnectionPoolConfig) *int { return v.ConnectionBorrowTimeout }).(pulumi.IntPtrOutput)\n}", "title": "" }, { "docid": "f89652fa3ef7350a54e0431b52914bdf", "score": "0.42069712", "text": "func (c *FromCommand) ConnectionTimeout() time.Duration {\n\tstr := ExpandEnv(c.cmd.args[\"timeout\"])\n\tto, err := time.ParseDuration(str)\n\tif err != nil {\n\t\tto = time.Second * 120\n\t}\n\treturn to\n}", "title": "" }, { "docid": "b2d422f4a8c74f9431c1968712c141da", "score": "0.4196789", "text": "func electionTimeout() int64 {\n\treturn int64(rand.Intn(MAXELECTIMEOUT- MINELECTIMEOUT) + MINELECTIMEOUT)\n}", "title": "" }, { "docid": "381ce5852733b64b7f0a464715cda1ca", "score": "0.41925275", "text": "func ConnMaxLifetime(cml time.Duration) Option {\n\treturn func(o *options) {\n\t\to.connMaxLifetime = cml\n\t}\n}", "title": "" }, { "docid": "b8d77b1c49ca78e0ffee8e116d70f78f", "score": "0.41852015", "text": "func (o *AddRepositoryParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "8db046d2f4398f90caf675d0948e5abe", "score": "0.41831496", "text": "func (r *ReconcileChe) Reconcile(request reconcile.Request) (reconcile.Result, error) {\n\t// Fetch the CheCluster instance\n\ttests := r.tests\n\tinstance, err := r.GetCR(request)\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\treturn reconcile.Result{}, err\n\t}\n\n\tisOpenShift, isOpenShift4, err := util.DetectOpenShift()\n\tif err != nil {\n\t\tlogrus.Errorf(\"An error occurred when detecting current infra: %s\", err)\n\t}\n\tif isOpenShift {\n\t\t// delete oAuthClient before CR is deleted\n\t\tdoInstallOpenShiftoAuthProvider := instance.Spec.Auth.OpenShiftOauth\n\t\tif doInstallOpenShiftoAuthProvider {\n\t\t\tif err := r.ReconcileFinalizer(instance); err != nil {\n\t\t\t\treturn reconcile.Result{}, err\n\t\t\t}\n\t\t}\n\t}\n\tif isOpenShift {\n\t\t// create a secret with router tls cert when on OpenShift infra and router is configured with a self signed certificate\n\t\tif instance.Spec.Server.SelfSignedCert ||\n\t\t\t// To use Openshift v4 OAuth, the OAuth endpoints are served from a namespace\n\t\t\t// and NOT from the Openshift API Master URL (as in v3)\n\t\t\t// So we also need the self-signed certificate to access them (same as the Che server)\n\t\t\t(isOpenShift4 && instance.Spec.Auth.OpenShiftOauth && ! instance.Spec.Server.TlsSupport) {\n\t\t\tif err := r.CreateTLSSecret(instance, \"\", \"self-signed-certificate\"); err != nil {\n\t\t\t\treturn reconcile.Result{}, err\n\t\t\t}\n\t\t}\n\t\tif instance.Spec.Auth.OpenShiftOauth {\n\t\t\t// create a secret with OpenShift API crt to be added to keystore that RH SSO will consume\n\t\t\tbaseURL, err := util.GetClusterPublicHostname(isOpenShift4)\n\t\t\tif err != nil {\n\t\t\t\tlogrus.Errorf(\"Failed to get OpenShift cluster public hostname. A secret with API crt will not be created and consumed by RH-SSO/Keycloak\")\n\t\t\t} else {\n\t\t\t\tif err := r.CreateTLSSecret(instance, baseURL, \"openshift-api-crt\"); err != nil {\n\t\t\t\t\treturn reconcile.Result{}, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif !tests {\n\t\tdeployment := &appsv1.Deployment{}\n\t\tname := \"che\"\n\t\tcheFlavor := instance.Spec.Server.CheFlavor\n\t\tif cheFlavor == \"codeready\" {\n\t\t\tname = cheFlavor\n\t\t}\n\t\terr = r.client.Get(context.TODO(), types.NamespacedName{Name: name, Namespace: instance.Namespace}, deployment)\n\t\tif err != nil && instance.Status.CheClusterRunning != UnavailableStatus {\n\t\t\tif err := r.SetCheUnavailableStatus(instance, request); err != nil {\n\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t}\n\t\t}\n\t}\n\t// create service accounts:\n\t// che is the one which token is used to create workspace objects\n\t// che-workspace is SA used by plugins like exec and terminal with limited privileges\n\tcheServiceAccount := deploy.NewServiceAccount(instance, \"che\")\n\tif err := r.CreateServiceAccount(instance, cheServiceAccount); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\tworkspaceServiceAccount := deploy.NewServiceAccount(instance, \"che-workspace\")\n\tif err := r.CreateServiceAccount(instance, workspaceServiceAccount); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\t// create exec and view roles for CheCluster server and workspaces\n\texecRole := deploy.NewRole(instance, \"exec\", []string{\"pods/exec\"}, []string{\"*\"})\n\tif err := r.CreateNewRole(instance, execRole); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\tviewRole := deploy.NewRole(instance, \"view\", []string{\"pods\"}, []string{\"list\"})\n\tif err := r.CreateNewRole(instance, viewRole); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\t// create RoleBindings for created (and existing ClusterRole) roles and service accounts\n\tcheRoleBinding := deploy.NewRoleBinding(instance, \"che\", cheServiceAccount.Name, \"edit\", \"ClusterRole\")\n\tif err := r.CreateNewRoleBinding(instance, cheRoleBinding); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\texecRoleBinding := deploy.NewRoleBinding(instance, \"che-workspace-exec\", workspaceServiceAccount.Name, execRole.Name, \"Role\")\n\tif err = r.CreateNewRoleBinding(instance, execRoleBinding); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\tviewRoleBinding := deploy.NewRoleBinding(instance, \"che-workspace-view\", workspaceServiceAccount.Name, viewRole.Name, \"Role\")\n\tif err := r.CreateNewRoleBinding(instance, viewRoleBinding); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\n\t// If the user specified an additional cluster role to use for the Che workspace, create a role binding for it\n\t// Use a role binding instead of a cluster role binding to keep the additional access scoped to the workspace's namespace\n\tworkspaceClusterRole := instance.Spec.Server.CheWorkspaceClusterRole\n\tif workspaceClusterRole != \"\" {\n\t\tcustomRoleBinding := deploy.NewRoleBinding(instance, \"che-workspace-custom\", workspaceServiceAccount.Name, workspaceClusterRole, \"ClusterRole\")\n\t\tif err = r.CreateNewRoleBinding(instance, customRoleBinding); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\n\tif err := r.GenerateAndSaveFields(instance, request); err != nil {\n\t\tinstance, _ = r.GetCR(request)\n\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t}\n\tchePostgresPassword := instance.Spec.Database.ChePostgresPassword\n\tkeycloakPostgresPassword := instance.Spec.Auth.KeycloakPostgresPassword\n\tkeycloakAdminPassword := instance.Spec.Auth.KeycloakAdminPassword\n\n\t// Create Postgres resources and provisioning unless an external DB is used\n\texternalDB := instance.Spec.Database.ExternalDB\n\tif !externalDB {\n\t\t// Create a new postgres service\n\t\tpostgresLabels := deploy.GetLabels(instance, \"postgres\")\n\t\tpostgresService := deploy.NewService(instance, \"postgres\", []string{\"postgres\"}, []int32{5432}, postgresLabels)\n\t\tif err := r.CreateService(instance, postgresService); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\t// Create a new Postgres PVC object\n\t\tpvc := deploy.NewPvc(instance, \"postgres-data\", \"1Gi\", postgresLabels)\n\t\tif err := r.CreatePVC(instance, pvc); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\tif !tests {\n\t\t\terr = r.client.Get(context.TODO(), types.NamespacedName{Name: pvc.Name, Namespace: instance.Namespace}, pvc)\n\t\t\tif pvc.Status.Phase != \"Bound\" {\n\t\t\t\tk8sclient.GetPostgresStatus(pvc, instance.Namespace)\n\t\t\t}\n\t\t}\n\t\t// Create a new Postgres deployment\n\t\tpostgresDeployment := deploy.NewPostgresDeployment(instance, chePostgresPassword, isOpenShift)\n\t\tif err := r.CreateNewDeployment(instance, postgresDeployment); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\ttime.Sleep(time.Duration(1) * time.Second)\n\t\tpgDeployment, err := r.GetEffectiveDeployment(instance, postgresDeployment.Name)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"Failed to get %s deployment: %s\", postgresDeployment.Name, err)\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\tif !tests {\n\t\t\tif pgDeployment.Status.AvailableReplicas != 1 {\n\t\t\t\tscaled := k8sclient.GetDeploymentStatus(\"postgres\", instance.Namespace)\n\t\t\t\tif !scaled {\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 5}, err\n\t\t\t\t}\n\t\t\t}\n\t\t\tpgCommand := deploy.GetPostgresProvisionCommand(instance)\n\t\t\tdbStatus := instance.Status.DbProvisoned\n\t\t\t// provision Db and users for Che and Keycloak servers\n\t\t\tif !dbStatus {\n\t\t\t\tpodToExec, err := k8sclient.GetDeploymentPod(pgDeployment.Name, instance.Namespace)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn reconcile.Result{}, err\n\t\t\t\t}\n\t\t\t\tprovisioned := ExecIntoPod(podToExec, pgCommand, \"create Keycloak DB, user, privileges\", instance.Namespace)\n\t\t\t\tif provisioned {\n\t\t\t\t\tfor {\n\t\t\t\t\t\tinstance.Status.DbProvisoned = true\n\t\t\t\t\t\tif err := r.UpdateCheCRStatus(instance, \"status: provisioned with DB and user\", \"true\"); err != nil {\n\t\t\t\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 5}, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tcheFlavor := util.GetValue(instance.Spec.Server.CheFlavor, deploy.DefaultCheFlavor)\n\tingressStrategy := util.GetValue(instance.Spec.K8SOnly.IngressStrategy, deploy.DefaultIngressStrategy)\n\tingressDomain := instance.Spec.K8SOnly.IngressDomain\n\ttlsSupport := instance.Spec.Server.TlsSupport\n\tprotocol := \"http\"\n\tif tlsSupport {\n\t\tprotocol = \"https\"\n\t}\n\t// create Che service and route\n\tcheLabels := deploy.GetLabels(instance, util.GetValue(instance.Spec.Server.CheFlavor, deploy.DefaultCheFlavor))\n\n\tcheService := deploy.NewService(instance, \"che-host\", []string{\"http\", \"metrics\"}, []int32{8080, 8087}, cheLabels)\n\tif err := r.CreateService(instance, cheService); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\tif !isOpenShift {\n\t\tcheIngress := deploy.NewIngress(instance, cheFlavor, \"che-host\", 8080)\n\t\tif err := r.CreateNewIngress(instance, cheIngress); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\tcheHost := ingressDomain\n\t\tif ingressStrategy == \"multi-host\" {\n\t\t\tcheHost = cheFlavor + \"-\" + instance.Namespace + \".\" + ingressDomain\n\t\t}\n\t\tif len(instance.Spec.Server.CheHost) == 0 {\n\t\t\tinstance.Spec.Server.CheHost = cheHost\n\t\t\tif err := r.UpdateCheCRSpec(instance, \"CheHost URL\", cheHost); err != nil {\n\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t}\n\t\t}\n\t} else {\n\t\tcheRoute := deploy.NewRoute(instance, cheFlavor, \"che-host\", 8080)\n\t\tif tlsSupport {\n\t\t\tcheRoute = deploy.NewTlsRoute(instance, cheFlavor, \"che-host\", 8080)\n\t\t}\n\t\tif err := r.CreateNewRoute(instance, cheRoute); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\tif len(instance.Spec.Server.CheHost) == 0 {\n\t\t\tinstance.Spec.Server.CheHost = cheRoute.Spec.Host\n\t\t\tif len(cheRoute.Spec.Host) < 1 {\n\t\t\t\tcheRoute := r.GetEffectiveRoute(instance, cheRoute.Name)\n\t\t\t\tinstance.Spec.Server.CheHost = cheRoute.Spec.Host\n\t\t\t}\n\t\t\tif err := r.UpdateCheCRSpec(instance, \"CheHost URL\", instance.Spec.Server.CheHost); err != nil {\n\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t}\n\t\t}\n\t}\n\t// create and provision Keycloak related objects\n\tExternalKeycloak := instance.Spec.Auth.ExternalKeycloak\n\n\tif !ExternalKeycloak {\n\t\tkeycloakLabels := deploy.GetLabels(instance, \"keycloak\")\n\t\tkeycloakService := deploy.NewService(instance, \"keycloak\", []string{\"http\"}, []int32{8080}, keycloakLabels)\n\t\tif err := r.CreateService(instance, keycloakService); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\t// create Keycloak ingresses when on k8s\n\t\tif !isOpenShift {\n\t\t\tkeycloakIngress := deploy.NewIngress(instance, \"keycloak\", \"keycloak\", 8080)\n\t\t\tif err := r.CreateNewIngress(instance, keycloakIngress); err != nil {\n\t\t\t\treturn reconcile.Result{}, err\n\t\t\t}\n\t\t\tkeycloakURL := protocol + \"://\" + ingressDomain\n\t\t\tif ingressStrategy == \"multi-host\" {\n\t\t\t\tkeycloakURL = protocol + \"://keycloak-\" + instance.Namespace + \".\" + ingressDomain\n\t\t\t}\n\t\t\tif len(instance.Spec.Auth.KeycloakURL) == 0 {\n\t\t\t\tinstance.Spec.Auth.KeycloakURL = keycloakURL\n\t\t\t\tif err := r.UpdateCheCRSpec(instance, \"Keycloak URL\", instance.Spec.Auth.KeycloakURL); err != nil {\n\t\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\t// create Keycloak route\n\t\t\tkeycloakRoute := deploy.NewRoute(instance, \"keycloak\", \"keycloak\", 8080)\n\t\t\tif tlsSupport {\n\t\t\t\tkeycloakRoute = deploy.NewTlsRoute(instance, \"keycloak\", \"keycloak\", 8080)\n\t\t\t}\n\t\t\tif err = r.CreateNewRoute(instance, keycloakRoute); err != nil {\n\t\t\t\treturn reconcile.Result{}, err\n\t\t\t}\n\t\t\tkeycloakURL := keycloakRoute.Spec.Host\n\t\t\tif len(instance.Spec.Auth.KeycloakURL) == 0 {\n\t\t\t\tinstance.Spec.Auth.KeycloakURL = protocol + \"://\" + keycloakURL\n\t\t\t\tif len(keycloakURL) < 1 {\n\t\t\t\t\tkeycloakURL := r.GetEffectiveRoute(instance, keycloakRoute.Name).Spec.Host\n\t\t\t\t\tinstance.Spec.Auth.KeycloakURL = protocol + \"://\" + keycloakURL\n\t\t\t\t}\n\t\t\t\tif err := r.UpdateCheCRSpec(instance, \"Keycloak URL\", instance.Spec.Auth.KeycloakURL); err != nil {\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t\t}\n\t\t\t\tinstance.Status.KeycloakURL = protocol + \"://\" + keycloakURL\n\t\t\t\tif err := r.UpdateCheCRStatus(instance, \"status: Keycloak URL\", instance.Spec.Auth.KeycloakURL); err != nil {\n\t\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tkeycloakDeployment := deploy.NewKeycloakDeployment(instance, keycloakPostgresPassword, keycloakAdminPassword, cheFlavor,\n\t\t\tr.GetEffectiveSecretResourceVersion(instance, \"self-signed-certificate\"),\n\t\t\tr.GetEffectiveSecretResourceVersion(instance, \"openshift-api-crt\"))\n\t\tif err := r.CreateNewDeployment(instance, keycloakDeployment); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\ttime.Sleep(time.Duration(1) * time.Second)\n\t\tdeployment, err := r.GetEffectiveDeployment(instance, keycloakDeployment.Name)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"Failed to get %s deployment: %s\", keycloakDeployment.Name, err)\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\tif !tests {\n\t\t\tif deployment.Status.AvailableReplicas != 1 {\n\t\t\t\tscaled := k8sclient.GetDeploymentStatus(keycloakDeployment.Name, instance.Namespace)\n\t\t\t\tif !scaled {\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 5}, err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tcheCertSecretVersion := r.GetEffectiveSecretResourceVersion(instance, \"self-signed-certificate\")\n\t\t\topenshiftApiCertSecretVersion := r.GetEffectiveSecretResourceVersion(instance, \"openshift-api-crt\")\n\t\t\tif deployment.Spec.Template.Spec.Containers[0].Image != instance.Spec.Auth.KeycloakImage ||\n\t\t\tcheCertSecretVersion != deployment.Annotations[\"che.self-signed-certificate.version\"] ||\n\t\t\topenshiftApiCertSecretVersion != deployment.Annotations[\"che.openshift-api-crt.version\"] {\n\t\t\t\tkeycloakDeployment := deploy.NewKeycloakDeployment(instance, keycloakPostgresPassword, keycloakAdminPassword, cheFlavor, cheCertSecretVersion, openshiftApiCertSecretVersion)\n\t\t\t\tlogrus.Infof(\"Updating Keycloak deployment with an image %s\", instance.Spec.Auth.KeycloakImage)\n\t\t\t\tif err := controllerutil.SetControllerReference(instance, keycloakDeployment, r.scheme); err != nil {\n\t\t\t\t\tlogrus.Errorf(\"An error occurred: %s\", err)\n\t\t\t\t}\n\t\t\t\tif err := r.client.Update(context.TODO(), keycloakDeployment); err != nil {\n\t\t\t\t\tlogrus.Errorf(\"Failed to update Keycloak deployment: %s\", err)\n\t\t\t\t}\n\n\t\t\t}\n\t\t\tkeycloakRealmClientStatus := instance.Status.KeycloakProvisoned\n\t\t\tif !keycloakRealmClientStatus {\n\t\t\t\tif err := r.CreateKyecloakResources(instance, request, keycloakDeployment.Name); err != nil {\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 5}, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif isOpenShift {\n\t\t\tdoInstallOpenShiftoAuthProvider := instance.Spec.Auth.OpenShiftOauth\n\t\t\tif doInstallOpenShiftoAuthProvider {\n\t\t\t\topenShiftIdentityProviderStatus := instance.Status.OpenShiftoAuthProvisioned\n\t\t\t\tif !openShiftIdentityProviderStatus {\n\t\t\t\t\tif err := r.CreateIdentityProviderItems(instance, request, cheFlavor, keycloakDeployment.Name, isOpenShift4); err != nil {\n\t\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 5}, err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t// create Che ConfigMap which is synced with CR and is not supposed to be manually edited\n\t// controller will reconcile this CM with CR spec\n\tcheHost := instance.Spec.Server.CheHost\n\tcheEnv := deploy.GetConfigMapData(instance)\n\tcheConfigMap := deploy.NewCheConfigMap(instance, cheEnv)\n\tif err := r.CreateNewConfigMap(instance, cheConfigMap); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\n\t// create a custom ConfigMap that won't be synced with CR spec\n\t// to be able to override envs and not clutter CR spec with fields which are too numerous\n\tcustomCM := &corev1.ConfigMap{\n\t\tData: deploy.GetCustomConfigMapData(),\n\t\tTypeMeta: metav1.TypeMeta{\n\t\t\tKind: \"ConfigMap\"},\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: \"custom\",\n\t\t\tNamespace: instance.Namespace,\n\t\t\tLabels: cheLabels}}\n\tif err := r.CreateNewConfigMap(instance, customCM); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\t// configMap resource version will be an env in Che deployment to easily update it when a ConfigMap changes\n\t// which will automatically trigger Che rolling update\n\tcmResourceVersion := cheConfigMap.ResourceVersion\n\t// create Che deployment\n\tcheImageRepo := util.GetValue(instance.Spec.Server.CheImage, deploy.DefaultCheServerImageRepo)\n\tcheImageTag := util.GetValue(instance.Spec.Server.CheImageTag, deploy.DefaultCheServerImageTag)\n\tif cheFlavor == \"codeready\" {\n\t\tcheImageRepo = util.GetValue(instance.Spec.Server.CheImage, deploy.DefaultCodeReadyServerImageRepo)\n\t\tcheImageTag = util.GetValue(instance.Spec.Server.CheImageTag, deploy.DefaultCodeReadyServerImageTag)\n\t}\n\tcheDeployment, err := deploy.NewCheDeployment(instance, cheImageRepo, cheImageTag, cmResourceVersion, isOpenShift)\n\tif err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\tif err = r.CreateNewDeployment(instance, cheDeployment); err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\t// sometimes Get cannot find deployment right away\n\ttime.Sleep(time.Duration(1) * time.Second)\n\tdeployment, err := r.GetEffectiveDeployment(instance, cheDeployment.Name)\n\tif err != nil {\n\t\tlogrus.Errorf(\"Failed to get %s deployment: %s\", cheDeployment.Name, err)\n\t\treturn reconcile.Result{}, err\n\t}\n\tif !tests {\n\t\tif deployment.Status.AvailableReplicas != 1 {\n\t\t\tinstance, _ := r.GetCR(request)\n\t\t\tif err := r.SetCheUnavailableStatus(instance, request); err != nil {\n\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t}\n\t\t\tscaled := k8sclient.GetDeploymentStatus(cheDeployment.Name, instance.Namespace)\n\t\t\tif !scaled {\n\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 5}, err\n\t\t\t}\n\t\t\terr = r.client.Get(context.TODO(), types.NamespacedName{Name: cheDeployment.Name, Namespace: instance.Namespace}, deployment)\n\t\t\tif deployment.Status.AvailableReplicas == 1 {\n\t\t\t\tif err := r.SetCheAvailableStatus(instance, request, protocol, cheHost); err != nil {\n\t\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t\t}\n\t\t\t\tif instance.Status.CheVersion != cheImageTag {\n\t\t\t\t\tinstance.Status.CheVersion = cheImageTag\n\t\t\t\t\tif err := r.UpdateCheCRStatus(instance, \"version\", cheImageTag); err != nil {\n\t\t\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif deployment.Status.Replicas > 1 {\n\t\t\tlogrus.Infof(\"Deployment %s is in the rolling update state\", cheDeployment.Name)\n\t\t\tif err := r.SetCheRollingUpdateStatus(instance, request); err != nil {\n\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t}\n\t\t\tk8sclient.GetDeploymentRollingUpdateStatus(cheDeployment.Name, instance.Namespace)\n\t\t\tdeployment, _ := r.GetEffectiveDeployment(instance, cheDeployment.Name)\n\t\t\tif deployment.Status.Replicas == 1 {\n\t\t\t\tif err := r.SetCheAvailableStatus(instance, request, protocol, cheHost); err != nil {\n\t\t\t\t\tinstance, _ = r.GetCR(request)\n\t\t\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif deployment.Spec.Template.Spec.Containers[0].Image != cheDeployment.Spec.Template.Spec.Containers[0].Image {\n\t\tif err := controllerutil.SetControllerReference(instance, deployment, r.scheme); err != nil {\n\t\t\tlogrus.Errorf(\"An error occurred: %s\", err)\n\t\t}\n\t\tlogrus.Infof(\"Updating %s %s with image %s:%s\", cheDeployment.Name, cheDeployment.Kind, cheImageRepo, cheImageTag)\n\t\tinstance.Status.CheVersion = cheImageTag\n\t\tif err := r.UpdateCheCRStatus(instance, \"version\", cheImageTag); err != nil {\n\t\t\tinstance, _ = r.GetCR(request)\n\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t}\n\t\tif err := r.client.Update(context.TODO(), cheDeployment); err != nil {\n\t\t\tlogrus.Errorf(\"Failed to update %s %s: %s\", deployment.Kind, deployment.Name, err)\n\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\n\t\t}\n\t}\n\n\t// reconcile routes/ingresses before reconciling Che deployment\n\tactiveConfigMap := &corev1.ConfigMap{}\n\tif err := r.client.Get(context.TODO(), types.NamespacedName{Name: \"che\", Namespace: instance.Namespace}, activeConfigMap); err != nil {\n\t\tlogrus.Errorf(\"ConfigMap %s not found: %s\", activeConfigMap.Name, err)\n\t}\n\tif !tlsSupport && activeConfigMap.Data[\"CHE_INFRA_OPENSHIFT_TLS__ENABLED\"] == \"true\" {\n\t\troutesUpdated, err := r.ReconcileTLSObjects(instance, request, cheFlavor, tlsSupport, isOpenShift)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"An error occurred when updating routes %s\", err)\n\t\t}\n\t\tif routesUpdated {\n\t\t\tlogrus.Info(\"Routes have been updated with TLS config\")\n\t\t}\n\t}\n\tif tlsSupport && activeConfigMap.Data[\"CHE_INFRA_OPENSHIFT_TLS__ENABLED\"] == \"false\" {\n\t\troutesUpdated, err := r.ReconcileTLSObjects(instance, request, cheFlavor, tlsSupport, isOpenShift)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"An error occurred when updating routes %s\", err)\n\t\t}\n\t\tif routesUpdated {\n\t\t\tlogrus.Info(\"Routes have been updated with TLS config\")\n\t\t}\n\t}\n\t// Reconcile Che ConfigMap to align with CR spec\n\tcmUpdated, err := r.UpdateConfigMap(instance)\n\tif err != nil {\n\t\treturn reconcile.Result{}, err\n\t}\n\t// Delete OpenShift identity provider if OpenShift oAuth is false in spec\n\t// but OpenShiftoAuthProvisioned is true in CR status, e.g. when oAuth has been turned on and then turned off\n\tdeleted, err := r.ReconcileIdentityProvider(instance, isOpenShift4)\n\tif deleted {\n\t\tif err := r.DeleteFinalizer(instance); err != nil {\n\t\t\tinstance, _ = r.GetCR(request)\n\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t}\n\t\tinstance.Status.OpenShiftoAuthProvisioned = false\n\t\tif err := r.UpdateCheCRStatus(instance, \"provisioned with OpenShift oAuth\", \"false\"); err != nil {\n\t\t\tinstance, _ = r.GetCR(request)\n\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t}\n\t\tinstance.Spec.Auth.OauthSecret = \"\"\n\t\tif err := r.UpdateCheCRSpec(instance, \"delete oAuth secret name\", \"\"); err != nil {\n\t\t\tinstance, _ = r.GetCR(request)\n\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t}\n\t\tinstance.Spec.Auth.OauthClientName = \"\"\n\t\tif err := r.UpdateCheCRSpec(instance, \"delete oAuth client name\", \"\"); err != nil {\n\t\t\tinstance, _ = r.GetCR(request)\n\t\t\treturn reconcile.Result{Requeue: true, RequeueAfter: time.Second * 1}, err\n\t\t}\n\t}\n\n\tif cmUpdated {\n\t\t// sometimes an old cm resource version is returned ie get happens too fast - before server updates CM\n\t\ttime.Sleep(time.Duration(1) * time.Second)\n\t\tcm := r.GetEffectiveConfigMap(instance, cheConfigMap.Name)\n\t\tcmResourceVersion := cm.ResourceVersion\n\t\tcheDeployment, err := deploy.NewCheDeployment(instance, cheImageRepo, cheImageTag, cmResourceVersion, isOpenShift)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"An error occurred: %s\", err)\n\t\t}\n\t\tif err := controllerutil.SetControllerReference(instance, cheDeployment, r.scheme); err != nil {\n\t\t\tlogrus.Errorf(\"An error occurred: %s\", err)\n\t\t}\n\t\tif err := r.client.Update(context.TODO(), cheDeployment); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\tdeployment, _ = r.GetEffectiveDeployment(instance, cheDeployment.Name)\n\tactualMemRequest := deployment.Spec.Template.Spec.Containers[0].Resources.Requests[corev1.ResourceMemory]\n\tactualMemLimit := deployment.Spec.Template.Spec.Containers[0].Resources.Limits[corev1.ResourceMemory]\n\tlimitStr := actualMemLimit.String()\n\trequestStr := actualMemRequest.String()\n\tdesiredRequest := util.GetValue(instance.Spec.Server.ServerMemoryRequest, deploy.DefaultServerMemoryRequest)\n\tdesiredLimit := util.GetValue(instance.Spec.Server.ServerMemoryLimit, deploy.DefaultServerMemoryLimit)\n\tif desiredRequest != requestStr || desiredLimit != limitStr {\n\t\tcheDeployment, err := deploy.NewCheDeployment(instance, cheImageRepo, cheImageTag, cmResourceVersion, isOpenShift)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"An error occurred: %s\", err)\n\t\t}\n\t\tif err := controllerutil.SetControllerReference(instance, cheDeployment, r.scheme); err != nil {\n\t\t\tlogrus.Errorf(\"An error occurred: %s\", err)\n\t\t}\n\t\tlogrus.Infof(\"Updating deployment %s with new memory settings. Request: %s, limit: %s\", cheDeployment.Name, desiredRequest, desiredLimit)\n\t\tif err := r.client.Update(context.TODO(), cheDeployment); err != nil {\n\t\t\tlogrus.Errorf(\"Failed to update deployment: %s\", err)\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t}\n\treturn reconcile.Result{}, nil\n}", "title": "" }, { "docid": "c3a30c001f07c0f8c10e90aee41d6690", "score": "0.41794088", "text": "func (o *RepoGetAllCommitsParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "9f03b4b74a005d680d80ff9df85e0716", "score": "0.41778937", "text": "func (db *DB) SetConnMaxLifetime(d time.Duration) {\n\tdb.master.SetConnMaxLifetime(d)\n\tfor _, r := range db.readreplicas {\n\t\tr.SetConnMaxLifetime(d)\n\t}\n}", "title": "" }, { "docid": "6d158e89d318a0f613c277f97efc363b", "score": "0.4169639", "text": "func (c *txnClientCtx) commitTimeout(waitmsync bool) time.Duration {\n\tif waitmsync {\n\t\treturn c.timeout.host + c.timeout.netw\n\t}\n\treturn c.timeout.netw\n}", "title": "" }, { "docid": "aed4b073023614d3dae49ff4aa879081", "score": "0.41565606", "text": "func (o *GetTerraformConfigurationSourcesUsingGET1Params) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "a2a61ed8c2d3dd48377501d3edfeaca4", "score": "0.41522127", "text": "func getElectionTimeout() time.Duration {\n\treturn time.Duration(rand.Intn(300) + 150)\n}", "title": "" }, { "docid": "d23aa33a7f83e3f3ae8fdbec99e1ac2d", "score": "0.41492677", "text": "func (r *FederatedCloneSetReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {\n\t_ = log.FromContext(ctx)\n\n\t// your logic here\n\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "d2fdb60caad7209b79f443b7734af487", "score": "0.41420206", "text": "func WithMaxConnTime(t time.Duration) PoolOption {\n\treturn func(p *Pool) {\n\t\tp.maxConnTime = t\n\t}\n}", "title": "" }, { "docid": "bf75b5275771c8f12f08ec6c65799c09", "score": "0.4136878", "text": "func recalcRecommit(minRecommit, prev time.Duration, target float64, inc bool) time.Duration {\n\tvar (\n\t\tprevF = float64(prev.Nanoseconds())\n\t\tnext float64\n\t)\n\tif inc {\n\t\tnext = prevF*(1-intervalAdjustRatio) + intervalAdjustRatio*(target+intervalAdjustBias)\n\t\tmax := float64(maxRecommitInterval.Nanoseconds())\n\t\tif next > max {\n\t\t\tnext = max\n\t\t}\n\t} else {\n\t\tnext = prevF*(1-intervalAdjustRatio) + intervalAdjustRatio*(target-intervalAdjustBias)\n\t\tmin := float64(minRecommit.Nanoseconds())\n\t\tif next < min {\n\t\t\tnext = min\n\t\t}\n\t}\n\treturn time.Duration(int64(next))\n}", "title": "" }, { "docid": "1745ba3e7db59a9773a451595f51cbc1", "score": "0.4135608", "text": "func Fetch(fileUrl string, destFile string) {\n\n\treferenceFileIndex, checksumLookup, fileSize, _ := fetchIndex(\"somewhere\")\n\n\tblockCount := fileSize / BLOCK_SIZE\n\tif fileSize%BLOCK_SIZE != 0 {\n\t\tblockCount++\n\t}\n\n\tfs := &gosync.BasicSummary{\n\t\tChecksumIndex: referenceFileIndex,\n\t\tChecksumLookup: checksumLookup,\n\t\tBlockCount: uint(blockCount),\n\t\tBlockSize: uint(BLOCK_SIZE),\n\t\tFileSize: fileSize,\n\t}\n\n\trsyncObject, err := gosync.MakeRSync(\n\t\tdestFile,\n\t\tfileUrl,\n\t\tdestFile,\n\t\tfs,\n\t)\n\n\terr = rsyncObject.Patch()\n\n\tif err != nil {\n\t\tfmt.Printf(\"Error: %v\\n\", err)\n\t\treturn\n\t}\n\n\terr = rsyncObject.Close()\n\n\tif err != nil {\n\t\tfmt.Printf(\"Error: %v\\n\", err)\n\t\treturn\n\t}\n\n}", "title": "" }, { "docid": "88c504277cdf22f84c2b690aa8b7dc75", "score": "0.4133009", "text": "func (o *CreateRepoNotificationParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "9a38290210170546730ab4b960286f1d", "score": "0.41325077", "text": "func (c *Connect) setRTimeout() {\n\tif c.readTimeout != 0 {\n\t\tc.nc.SetReadDeadline(time.Now().Add(c.readTimeout))\n\t}\n}", "title": "" }, { "docid": "5fe427242e2eee3227e375242b007008", "score": "0.41268688", "text": "func (r *ReconcileModelDeployment) reconcileDeploymentPullConnection(\n\tlog logr.Logger,\n\tmd *legionv1alpha1.ModelDeployment,\n) error {\n\tmdConnID := *md.Spec.ImagePullConnectionID\n\tif len(*md.Spec.ImagePullConnectionID) == 0 {\n\t\tlog.Info(\"Model deployment connection name is empty. Skip reconcile deployment secrets\")\n\n\t\treturn nil\n\t}\n\n\tlog = log.WithValues(legion.ConnectionIDLogPrefix, mdConnID)\n\n\tmdConn, err := r.connRepo.GetDecryptedConnection(\n\t\tmdConnID,\n\t\tviper.GetString(conn_conf.DecryptToken),\n\t)\n\tif err != nil {\n\t\tlog.Error(err, \"Cannot retrieve connection\")\n\n\t\treturn err\n\t}\n\n\tswitch mdConn.Spec.Type {\n\tcase connection.DockerType:\n\t\treturn r.reconcileDockerDeploymentSecret(log, md, mdConn, DockerSecret{\n\t\t\tEmail: \"\",\n\t\t\tUsername: mdConn.Spec.Username,\n\t\t\tPassword: mdConn.Spec.Password,\n\t\t})\n\tcase connection.EcrType:\n\t\t// Check whether the credentials needs to be updated.\n\t\tif md.Status.LastCredsUpdatedTime != nil &&\n\t\t\ttime.Until(md.Status.LastCredsUpdatedTime.Add(\n\t\t\t\tPeriodUpdatingDockerConnectionToken,\n\t\t\t)) < PeriodUpdatingDockerConnectionToken {\n\n\t\t\tlog.Info(\"Skip updating token\", \"last_updated_time\", md.Status.LastCredsUpdatedTime)\n\n\t\t\treturn nil\n\t\t}\n\n\t\tuser, password, err := aws.ExtractEcrCreds(mdConn.Spec)\n\t\tif err != nil {\n\t\t\tlog.Error(err, \"Can not create a token for ecr\")\n\n\t\t\treturn err\n\t\t}\n\n\t\tlog.Info(\"Deployment token was updated\")\n\n\t\terr = r.reconcileDockerDeploymentSecret(log, md, mdConn, DockerSecret{\n\t\t\tEmail: \"\",\n\t\t\tUsername: user,\n\t\t\tPassword: password,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmd.Status.LastCredsUpdatedTime = &metav1.Time{Time: time.Now()}\n\n\t\treturn r.Update(context.TODO(), md)\n\tdefault:\n\t\t// impossible situation\n\t\treturn fmt.Errorf(\"unexpected connection type: %s\", mdConn.Spec.Type)\n\t}\n}", "title": "" }, { "docid": "0df0b168325ceb7eea4a6be4dfc0cb79", "score": "0.41094992", "text": "func (uc *UpstreamConfig) ConnectTimeout() time.Duration {\n\tif ms, ok := uc.Config[\"connect_timeout_ms\"].(int); ok {\n\t\treturn time.Duration(ms) * time.Millisecond\n\t}\n\treturn 10000 * time.Millisecond\n}", "title": "" }, { "docid": "1c6e9f75130d393b0756de02cbd47455", "score": "0.41091454", "text": "func (r *updateReconciler) Reconcile(ctx context.Context, in *v2.CatalogSourceConfig) (out *v2.CatalogSourceConfig, nextPhase *shared.Phase, err error) {\n\tout = in.DeepCopy()\n\n\t// The TargetNamespace of the CatalogSourceConfig object has changed\n\tif r.targetChanged {\n\t\t// Best case attempt at deleting the objects in the old TargetNamespace\n\t\t// If the csc is not cached we don't want to fail because there are\n\t\t// cases where we won't be able to find the objects.\n\t\tr.deleteObjects(in)\n\t}\n\n\t// Remove it from the cache so that it does not get picked up during\n\t// the \"Configuring\" phase\n\tr.cache.Evict(in)\n\n\t// Drop existing Status field so that reconciliation can start anew.\n\tout.Status = v2.CatalogSourceConfigStatus{}\n\tnextPhase = phase.GetNext(phase.Configuring)\n\n\tr.log.Info(\"Spec has changed, scheduling for configuring\")\n\n\treturn\n}", "title": "" }, { "docid": "b7944a89ff63339711fe87f9cc45abae", "score": "0.40861028", "text": "func (m *MockRPCConfig) RPCTimeout() (r time.Duration) {\n\tif m.RPCTimeoutFunc != nil {\n\t\treturn m.RPCTimeoutFunc()\n\t}\n\treturn\n}", "title": "" }, { "docid": "4fd41685f01937b7b3037874f4fdd297", "score": "0.40853003", "text": "func (r *ReconcileHiveConfig) Reconcile(request reconcile.Request) (reconcile.Result, error) {\n\n\thLog := log.WithField(\"controller\", \"hive\")\n\thLog.Info(\"Reconciling Hive components\")\n\n\t// Fetch the Hive instance\n\tinstance := &hivev1.HiveConfig{}\n\t// NOTE: ignoring the Namespace that seems to get set on request when syncing on namespaced objects,\n\t// when our HiveConfig is ClusterScoped.\n\terr := r.Get(context.TODO(), types.NamespacedName{Name: request.NamespacedName.Name}, instance)\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Object not found, return. Created objects are automatically garbage collected.\n\t\t\t// For additional cleanup logic use finalizers.\n\t\t\thLog.Debug(\"HiveConfig not found, deleted?\")\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\thLog.WithError(err).Error(\"error reading HiveConfig\")\n\t\treturn reconcile.Result{}, err\n\t}\n\n\t// We only support one HiveConfig per cluster, and it must be called \"hive\". This prevents installing\n\t// Hive more than once in the cluster.\n\tif instance.Name != hiveConfigName {\n\t\thLog.WithField(\"hiveConfig\", instance.Name).Warn(\"invalid HiveConfig name, only one HiveConfig supported per cluster and must be named 'hive'\")\n\t\treturn reconcile.Result{}, nil\n\t}\n\n\trecorder := events.NewRecorder(r.kubeClient.CoreV1().Events(constants.HiveNamespace), \"hive-operator\", &corev1.ObjectReference{\n\t\tName: request.Name,\n\t\tNamespace: constants.HiveNamespace,\n\t})\n\n\tif r.syncAggregatorCA {\n\t\t// We use the configmap lister and not the regular client which only watches resources in the hive namespace\n\t\taggregatorCAConfigMap, err := r.managedConfigCMLister.ConfigMaps(managedConfigNamespace).Get(aggregatorCAConfigMapName)\n\t\t// If an error other than not found, retry. If not found, it means we don't need to do anything with\n\t\t// admission pods yet.\n\t\tcmLog := hLog.WithField(\"configmap\", fmt.Sprintf(\"%s/%s\", managedConfigNamespace, aggregatorCAConfigMapName))\n\t\tswitch {\n\t\tcase errors.IsNotFound(err):\n\t\t\tcmLog.Warningf(\"configmap was not found, will not sync aggregator CA with admission pods\")\n\t\tcase err != nil:\n\t\t\tcmLog.WithError(err).Errorf(\"cannot retrieve configmap\")\n\t\t\treturn reconcile.Result{}, err\n\t\tdefault:\n\t\t\tcaHash := computeHash(aggregatorCAConfigMap.Data)\n\t\t\tcmLog.WithField(\"hash\", caHash).Debugf(\"computed hash for configmap\")\n\t\t\tif instance.Status.AggregatorClientCAHash != caHash {\n\t\t\t\tcmLog.WithField(\"oldHash\", instance.Status.AggregatorClientCAHash).\n\t\t\t\t\tInfo(\"configmap has changed, admission pods will restart on the next sync\")\n\t\t\t\tinstance.Status.AggregatorClientCAHash = caHash\n\t\t\t\tcmLog.Debugf(\"updating status with new aggregator CA configmap hash\")\n\t\t\t\terr = r.Status().Update(context.TODO(), instance)\n\t\t\t\tif err != nil {\n\t\t\t\t\tcmLog.WithError(err).Error(\"cannot update hash in config status\")\n\t\t\t\t}\n\t\t\t\treturn reconcile.Result{}, err\n\t\t\t}\n\t\t\tcmLog.Debug(\"configmap unchanged, nothing to do\")\n\t\t}\n\t}\n\n\th := resource.NewHelperFromRESTConfig(r.restConfig, hLog)\n\n\tif err := deployManagedDomainsConfigMap(h, instance); err != nil {\n\t\thLog.WithError(err).Error(\"error deploying managed domains configmap\")\n\t\treturn reconcile.Result{}, err\n\t}\n\n\terr = r.deployHive(hLog, h, instance, recorder)\n\tif err != nil {\n\t\thLog.WithError(err).Error(\"error deploying Hive\")\n\t\treturn reconcile.Result{}, err\n\t}\n\n\terr = r.deployHiveAdmission(hLog, h, instance, recorder)\n\tif err != nil {\n\t\thLog.WithError(err).Error(\"error deploying HiveAdmission\")\n\t\treturn reconcile.Result{}, err\n\t}\n\n\tif err := r.teardownLegacyExternalDNS(hLog); err != nil {\n\t\thLog.WithError(err).Error(\"error tearing down legacy ExternalDNS\")\n\t\treturn reconcile.Result{}, err\n\t}\n\n\treturn reconcile.Result{}, nil\n}", "title": "" }, { "docid": "2622106f9acc64334878e6145bd10246", "score": "0.40712458", "text": "func Racer(url1, url2 string) (winner string, err error) {\n\treturn ConfigurableRacer(url1, url2, tenSecondTimeout)\n}", "title": "" }, { "docid": "747f7df7ca1f9dc371efdd3f920ef30b", "score": "0.4070639", "text": "func ConnectUseMaxWait() Option {\n\treturn func(qs *querySettings) {\n\t\t// see the definition of iSecurityFlags in\n\t\t// https://docs.microsoft.com/en-us/windows/win32/wmisdk/swbemlocator-connectserver\n\t\tqs.connectSecurityFlags = qs.connectSecurityFlags & 128\n\t}\n}", "title": "" }, { "docid": "6edca2ed8e03c37a6fdd5d158f6c8899", "score": "0.4069764", "text": "func NetResetLimit(mgr network.ResourceManager, repo repo.Repo, scope string) (rcmgr.BaseLimit, error) {\n\tvar result rcmgr.BaseLimit\n\n\tsetLimit := func(s network.ResourceScope, l rcmgr.Limit) error {\n\t\tlimiter, ok := s.(rcmgr.ResourceScopeLimiter)\n\t\tif !ok {\n\t\t\treturn ErrNoResourceMgr\n\t\t}\n\n\t\tlimiter.SetLimit(l)\n\t\treturn nil\n\t}\n\n\tcfg, err := repo.Config()\n\tif err != nil {\n\t\treturn result, fmt.Errorf(\"reading config to reset limit: %w\", err)\n\t}\n\n\tdefaults, err := createDefaultLimitConfig(cfg.Swarm)\n\tif err != nil {\n\t\treturn result, fmt.Errorf(\"creating default limit config: %w\", err)\n\t}\n\n\tif cfg.Swarm.ResourceMgr.Limits == nil {\n\t\tcfg.Swarm.ResourceMgr.Limits = &rcmgr.LimitConfig{}\n\t}\n\tconfigLimits := cfg.Swarm.ResourceMgr.Limits\n\n\tvar setConfigFunc func() rcmgr.BaseLimit\n\tswitch {\n\tcase scope == config.ResourceMgrSystemScope:\n\t\terr = mgr.ViewSystem(func(s network.ResourceScope) error { return setLimit(s, &defaults.System) })\n\t\tsetConfigFunc = func() rcmgr.BaseLimit {\n\t\t\tconfigLimits.System = defaults.System\n\t\t\treturn defaults.System\n\t\t}\n\tcase scope == config.ResourceMgrTransientScope:\n\t\terr = mgr.ViewTransient(func(s network.ResourceScope) error { return setLimit(s, &defaults.Transient) })\n\t\tsetConfigFunc = func() rcmgr.BaseLimit {\n\t\t\tconfigLimits.Transient = defaults.Transient\n\t\t\treturn defaults.Transient\n\t\t}\n\tcase strings.HasPrefix(scope, config.ResourceMgrServiceScopePrefix):\n\t\tsvc := strings.TrimPrefix(scope, config.ResourceMgrServiceScopePrefix)\n\n\t\terr = mgr.ViewService(svc, func(s network.ServiceScope) error { return setLimit(s, &defaults.ServiceDefault) })\n\t\tsetConfigFunc = func() rcmgr.BaseLimit {\n\t\t\tif configLimits.Service == nil {\n\t\t\t\tconfigLimits.Service = map[string]rcmgr.BaseLimit{}\n\t\t\t}\n\t\t\tconfigLimits.Service[svc] = defaults.ServiceDefault\n\t\t\treturn defaults.ServiceDefault\n\t\t}\n\tcase strings.HasPrefix(scope, config.ResourceMgrProtocolScopePrefix):\n\t\tproto := strings.TrimPrefix(scope, config.ResourceMgrProtocolScopePrefix)\n\n\t\terr = mgr.ViewProtocol(protocol.ID(proto), func(s network.ProtocolScope) error { return setLimit(s, &defaults.ProtocolDefault) })\n\t\tsetConfigFunc = func() rcmgr.BaseLimit {\n\t\t\tif configLimits.Protocol == nil {\n\t\t\t\tconfigLimits.Protocol = map[protocol.ID]rcmgr.BaseLimit{}\n\t\t\t}\n\t\t\tconfigLimits.Protocol[protocol.ID(proto)] = defaults.ProtocolDefault\n\n\t\t\treturn defaults.ProtocolDefault\n\t\t}\n\tcase strings.HasPrefix(scope, config.ResourceMgrPeerScopePrefix):\n\t\tp := strings.TrimPrefix(scope, config.ResourceMgrPeerScopePrefix)\n\n\t\tvar pid peer.ID\n\t\tpid, err = peer.Decode(p)\n\t\tif err != nil {\n\t\t\treturn result, fmt.Errorf(\"invalid peer ID: %q: %w\", p, err)\n\t\t}\n\n\t\terr = mgr.ViewPeer(pid, func(s network.PeerScope) error { return setLimit(s, &defaults.PeerDefault) })\n\t\tsetConfigFunc = func() rcmgr.BaseLimit {\n\t\t\tif configLimits.Peer == nil {\n\t\t\t\tconfigLimits.Peer = map[peer.ID]rcmgr.BaseLimit{}\n\t\t\t}\n\t\t\tconfigLimits.Peer[pid] = defaults.PeerDefault\n\n\t\t\treturn defaults.PeerDefault\n\t\t}\n\tdefault:\n\t\treturn result, fmt.Errorf(\"invalid scope %q\", scope)\n\t}\n\n\tif err != nil {\n\t\treturn result, fmt.Errorf(\"resetting new limits on resource manager: %w\", err)\n\t}\n\n\tresult = setConfigFunc()\n\n\tif err := repo.SetConfig(cfg); err != nil {\n\t\treturn result, fmt.Errorf(\"writing new limits to repo config: %w\", err)\n\t}\n\n\treturn result, nil\n}", "title": "" }, { "docid": "d9a6f0de6c33648ae5f0156259259853", "score": "0.4068765", "text": "func (o *ImagePruneParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "d6866be971d746fce0e45492ad292dae", "score": "0.40657791", "text": "func (lc *ActivityDumpLoadController) reduceDumpTimeout(new *ActivityDump) error {\n\tnewTimeout := new.LoadConfig.Timeout * 3 / 4 // reduce by 25%\n\tif minTimeout := lc.adm.config.RuntimeSecurity.ActivityDumpLoadControlMinDumpTimeout; newTimeout < minTimeout {\n\t\tnewTimeout = minTimeout\n\t}\n\tnew.SetTimeout(newTimeout)\n\n\t// send metric\n\treturn lc.sendLoadControllerTriggeredMetric([]string{\"reduction:dump_timeout\"})\n}", "title": "" }, { "docid": "0e3511e4e3b252c7130bc629684cf0b3", "score": "0.40592512", "text": "func (r *TerminalReconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {\n\tif err := r.increaseCounterForNamespace(req.Namespace); err != nil {\n\t\tr.Log.Info(\"maximum parallel reconciles reached for namespace - requeuing the req\", \"namespace\", req.Namespace, \"name\", req.Name)\n\n\t\treturn ctrl.Result{\n\t\t\tRequeueAfter: wait.Jitter(time.Duration(int64(100*time.Millisecond)), 50), // requeue after 100ms - 5s\n\t\t}, nil\n\t}\n\n\tres, err := r.handleRequest(req)\n\n\tr.decreaseCounterForNamespace(req.Namespace)\n\n\treturn res, err\n}", "title": "" }, { "docid": "883e53364cd98120274e7b85953d92f1", "score": "0.40555525", "text": "func (c *FromCommand) ConnectionRetries() int {\n\tstr := ExpandEnv(c.cmd.args[\"retries\"])\n\tval, err := strconv.Atoi(str)\n\tif err != nil {\n\t\tval = 30\n\t}\n\treturn val\n}", "title": "" }, { "docid": "ca908968cd00dd07c2ebcb6525a9e100", "score": "0.40554032", "text": "func (as *AppStatusHandler) reconcileChefVersion() {\n\t// do it now and then again every 15 mins.s\n\tas.updateChefVersion()\n\tticker := time.NewTicker(time.Minute * 15)\n\tfor {\n\t\tselect {\n\t\tcase <-ticker.C:\n\t\t\tas.updateChefVersion()\n\t\t}\n\t}\n}", "title": "" }, { "docid": "8990c3ae13e3a4580b361a2d0530b902", "score": "0.4054375", "text": "func (r *ReconcileComplianceScan) reconcileReplicatedTailoringConfigMap(scan *compv1alpha1.ComplianceScan, origName, origNs, privName, privNs, scanName string, logger logr.Logger) error {\n\tlogger.Info(\"Reconciling Tailoring ConfigMap\", \"ConfigMap.Name\", origName, \"ConfigMap.Namespace\", origNs)\n\n\torigCM := &corev1.ConfigMap{}\n\torigKey := types.NamespacedName{Name: origName, Namespace: origNs}\n\terr := r.client.Get(context.TODO(), origKey, origCM)\n\t// Tailoring ConfigMap not found\n\tif err != nil && errors.IsNotFound(err) {\n\t\t// We previously had dealt with this issue, just requeue\n\t\tif strings.HasPrefix(scan.Status.ErrorMessage, tailoringNotFoundPrefix) {\n\t\t\treturn common.NewRetriableCtrlErrorWithCustomHandler(func() (reconcile.Result, error) {\n\t\t\t\t// A ConfigMap not being found might be a temporary issue\n\t\t\t\tif r.recorder != nil {\n\t\t\t\t\tr.recorder.Eventf(\n\t\t\t\t\t\tscan, corev1.EventTypeWarning, \"TailoringError\",\n\t\t\t\t\t\t\"Tailoring ConfigMap '%s' not found\", origKey,\n\t\t\t\t\t)\n\t\t\t\t}\n\n\t\t\t\treturn reconcile.Result{RequeueAfter: requeueAfterDefault, Requeue: true}, nil\n\t\t\t}, \"Tailoring ConfigMap not found\")\n\t\t}\n\t\t// A ConfigMap not being found might be a temporary issue (update and let the reconcile loop requeue)\n\t\treturn common.NewRetriableCtrlErrorWithCustomHandler(func() (reconcile.Result, error) {\n\t\t\tif r.recorder != nil {\n\t\t\t\tr.recorder.Eventf(\n\t\t\t\t\tscan, corev1.EventTypeWarning, \"TailoringError\",\n\t\t\t\t\t\"Tailoring ConfigMap '%s' not found\", origKey,\n\t\t\t\t)\n\t\t\t}\n\n\t\t\tlog.Info(\"Updating scan status due to missing Tailoring ConfigMap\")\n\t\t\tscanCopy := scan.DeepCopy()\n\t\t\tscanCopy.Status.ErrorMessage = tailoringNotFoundPrefix + err.Error()\n\t\t\tscanCopy.Status.Result = compv1alpha1.ResultError\n\t\t\tif updateerr := r.client.Status().Update(context.TODO(), scanCopy); updateerr != nil {\n\t\t\t\tlog.Error(updateerr, \"Failed to update a scan\")\n\t\t\t\treturn reconcile.Result{}, updateerr\n\t\t\t}\n\t\t\treturn reconcile.Result{RequeueAfter: requeueAfterDefault, Requeue: true}, nil\n\t\t}, \"Tailoring ConfigMap not found\")\n\t} else if err != nil {\n\t\tlog.Error(err, \"Failed to get spec tailoring ConfigMap\", \"ConfigMap.Name\", origName, \"ConfigMap.Namespace\", origNs)\n\t\treturn err\n\t} else if scan.Status.Result == compv1alpha1.ResultError {\n\t\t// We had an error caused by a previously not found configmap. Let's remove it\n\t\tif strings.HasPrefix(scan.Status.ErrorMessage, tailoringNotFoundPrefix) {\n\t\t\treturn common.NewRetriableCtrlErrorWithCustomHandler(func() (reconcile.Result, error) {\n\t\t\t\tlog.Info(\"Updating scan status since Tailoring ConfigMap was now found\")\n\t\t\t\tscanCopy := scan.DeepCopy()\n\t\t\t\tscanCopy.Status.ErrorMessage = \"\"\n\t\t\t\tscanCopy.Status.Result = compv1alpha1.ResultNotAvailable\n\t\t\t\tif updateerr := r.client.Status().Update(context.TODO(), scanCopy); updateerr != nil {\n\t\t\t\t\tlog.Error(updateerr, \"Failed to update a scan\")\n\t\t\t\t\treturn reconcile.Result{}, updateerr\n\t\t\t\t}\n\t\t\t\treturn reconcile.Result{RequeueAfter: requeueAfterDefault, Requeue: true}, nil\n\t\t\t}, \"Tailoring ConfigMap previously not found, was now found\")\n\t\t}\n\t}\n\n\torigData, ok := origCM.Data[\"tailoring.xml\"]\n\tif !ok {\n\t\treturn common.NewNonRetriableCtrlError(\"Tailoring ConfigMap missing `tailoring.xml` key\")\n\t}\n\tif origData == \"\" {\n\t\treturn common.NewNonRetriableCtrlError(\"Tailoring ConfigMap's key `tailoring.xml` is empty\")\n\t}\n\n\tprivCM := &corev1.ConfigMap{}\n\tprivKey := types.NamespacedName{Name: privName, Namespace: privNs}\n\terr = r.client.Get(context.TODO(), privKey, privCM)\n\tif err != nil && errors.IsNotFound(err) {\n\t\tnewCM := &corev1.ConfigMap{}\n\t\tnewCM.SetName(privName)\n\t\tnewCM.SetNamespace(privNs)\n\t\tif newCM.Labels == nil {\n\t\t\tnewCM.Labels = make(map[string]string)\n\t\t}\n\t\tnewCM.Labels[compv1alpha1.ComplianceScanLabel] = scanName\n\t\tnewCM.Labels[compv1alpha1.ScriptLabel] = \"\"\n\t\tif newCM.Data == nil {\n\t\t\tnewCM.Data = make(map[string]string)\n\t\t}\n\t\tnewCM.Data[\"tailoring.xml\"] = origData\n\t\tlogger.Info(\"Creating private Tailoring ConfigMap\", \"ConfigMap.Name\", privName, \"ConfigMap.Namespace\", privNs)\n\t\terr = r.client.Create(context.TODO(), newCM)\n\t\t// Ignore error if CM already exists\n\t\tif err != nil && !errors.IsAlreadyExists(err) {\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t} else if err != nil {\n\t\tlog.Error(err, \"Failed to get private tailoring ConfigMap\", \"ConfigMap.Name\", privName, \"ConfigMap.Namespace\", privNs)\n\t\treturn err\n\t}\n\tprivData, _ := privCM.Data[\"tailoring.xml\"]\n\n\t// privCM needs update\n\tif privData != origData {\n\t\tupdatedCM := privCM.DeepCopy()\n\t\tif updatedCM.Data == nil {\n\t\t\tupdatedCM.Data = make(map[string]string)\n\t\t}\n\t\tif updatedCM.Labels == nil {\n\t\t\tupdatedCM.Labels = make(map[string]string)\n\t\t}\n\t\tupdatedCM.Labels[compv1alpha1.ComplianceScanLabel] = scanName\n\t\tupdatedCM.Labels[compv1alpha1.ScriptLabel] = \"\"\n\t\tupdatedCM.Data[\"tailoring.xml\"] = origData\n\t\tlogger.Info(\"Updating private Tailoring ConfigMap\", \"ConfigMap.Name\", privName, \"ConfigMap.Namespace\", privNs)\n\t\treturn r.client.Update(context.TODO(), updatedCM)\n\t}\n\tlogger.Info(\"Private Tailoring ConfigMap is up-to-date\", \"ConfigMap.Name\", privName, \"ConfigMap.Namespace\", privNs)\n\treturn nil\n}", "title": "" }, { "docid": "deeebda7c30fb4f555e6376e749a8fa7", "score": "0.4051281", "text": "func (fs *flowControlConnStats) configure(mtu, shared uint64) {\n\tfs.mtu = mtu\n\tfs.shared, fs.maxShared = shared, shared\n\tbytesPerCounter := binaryEncodeUintSize(fs.bytesBufferedPerFlow)\n\tfs.releaseMessageLimit = int(mtu) / (bytesPerFlowID + bytesPerCounter)\n}", "title": "" }, { "docid": "91a65cbde3f520e890ce0386830ed63e", "score": "0.40413094", "text": "func refineRetentionConfiguration(ctx context.Context, store *basestore.Store, repositoryID int, maxAgeForNonStaleBranches, maxAgeForNonStaleTags time.Duration) (_, _ time.Duration, err error) {\n\trows, err := store.Query(ctx, sqlf.Sprintf(retentionConfigurationQuery, repositoryID))\n\tif err != nil {\n\t\treturn 0, 0, err\n\t}\n\tdefer func() { err = basestore.CloseRows(rows, err) }()\n\n\tfor rows.Next() {\n\t\tvar v1, v2 int\n\t\tif err := rows.Scan(&v1, &v2); err != nil {\n\t\t\treturn 0, 0, err\n\t\t}\n\n\t\tmaxAgeForNonStaleBranches = time.Second * time.Duration(v1)\n\t\tmaxAgeForNonStaleTags = time.Second * time.Duration(v2)\n\t}\n\n\treturn maxAgeForNonStaleBranches, maxAgeForNonStaleTags, nil\n}", "title": "" }, { "docid": "0d1b8a9eca224cc31ea6490448752247", "score": "0.40388384", "text": "func (o *SyncCmOnDatalakeClusterParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "cfe88da9915527de2ab3f59043109d6e", "score": "0.4036651", "text": "func (r *ClusterReconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {\n\n\tdefer func() {\n\t\tif err := recover(); err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t}()\n\n\tctx := context.WithValue(context.Background(), requestId, uuid.New())\n\tlog := log.Logger(ctx, \"controllers\", \"cluster_controller\", \"Reconcile\")\n\tlog.WithValues(\"cluster\", req.NamespacedName)\n\tlog.Info(\"Start of the request\")\n\t//Get the resource\n\tvar cluster managerv1alpha1.Cluster\n\tif err := r.Get(ctx, req.NamespacedName, &cluster); err != nil {\n\t\treturn ctrl.Result{}, ignoreNotFound(err)\n\t}\n\n\t// Retrieve k8s secret\n\t// Get the \"best\" Bearer token\n\t// Get the ManagedCluster k8s client\n\n\tstate := managerv1alpha1.Warning\n\n\tif cluster.Status.RetryCount > 3 {\n\t\tstate = managerv1alpha1.Error\n\t}\n\n\tsecret, err := r.K8sClient.GetK8sSecret(ctx, cluster.Spec.Config.BearerTokenSecret, cluster.ObjectMeta.Namespace)\n\tif err != nil {\n\t\tlog.Error(err, \"unable to retrieve the bearer token for the given cluster\")\n\t\tdesc := fmt.Sprintf(\"unable to retrieve the bearer token for the given cluster due to error %s\", err.Error())\n\t\tr.Recorder.Event(&cluster, v1.EventTypeWarning, string(state), desc)\n\t\treturn r.UpdateStatus(ctx, &cluster, managerv1alpha1.ClusterStatus{RetryCount: cluster.Status.RetryCount + 1, ErrorDescription: desc}, state, errRequeueTime)\n\t}\n\tcfg, err := utils.PrepareK8sRestConfigFromClusterCR(ctx, &cluster, secret)\n\tif err != nil {\n\t\tlog.Error(err, \"unable to prepare the rest config for the target cluster\", \"cluster\", cluster.Spec.Name)\n\t\tdesc := fmt.Sprintf(\"unable to prepare the rest config for the target cluster due to error %s\", err.Error())\n\t\tr.Recorder.Event(&cluster, v1.EventTypeWarning, string(state), desc)\n\t\treturn r.UpdateStatus(ctx, &cluster, managerv1alpha1.ClusterStatus{RetryCount: cluster.Status.RetryCount + 1, ErrorDescription: desc}, state, errRequeueTime)\n\t}\n\n\t// Isit being deleted?\n\tif cluster.ObjectMeta.DeletionTimestamp.IsZero() {\n\t\t//Good. This is not Delete use case\n\t\t//Lets check if this is very first time use case\n\t\tif !utils.ContainsString(cluster.ObjectMeta.Finalizers, finalizerName) {\n\t\t\tlog.Info(\"New cluster resource. Adding the finalizer\", \"finalizer\", finalizerName)\n\t\t\tcluster.ObjectMeta.Finalizers = append(cluster.ObjectMeta.Finalizers, finalizerName)\n\t\t\tr.UpdateMeta(ctx, &cluster)\n\t\t}\n\t\treturn r.HandleReconcile(ctx, req, &cluster, cfg)\n\n\t} else {\n\t\t//oh oh.. This is delete use case\n\t\t//Lets make sure to clean up the iam role\n\t\tif cluster.Status.RetryCount != 0 {\n\t\t\tcluster.Status.RetryCount = cluster.Status.RetryCount + 1\n\t\t}\n\t\tlog.Info(\"Cluster delete request\")\n\t\tif err := removeRBACInManagedCluster(ctx, cfg); err != nil {\n\t\t\tlog.Error(err, \"Unable to delete the cluster\")\n\t\t\tr.UpdateStatus(ctx, &cluster, managerv1alpha1.ClusterStatus{RetryCount: cluster.Status.RetryCount + 1, ErrorDescription: err.Error()}, managerv1alpha1.Error)\n\t\t\tr.Recorder.Event(&cluster, v1.EventTypeWarning, string(managerv1alpha1.Error), \"unable to delete the cluster due to \"+err.Error())\n\t\t\treturn ctrl.Result{RequeueAfter: 30 * time.Second}, nil\n\t\t}\n\n\t\t// Ok. Lets delete the finalizer so controller can delete the custom object\n\t\tlog.Info(\"Removing finalizer from Cluster\")\n\t\tcluster.ObjectMeta.Finalizers = utils.RemoveString(cluster.ObjectMeta.Finalizers, finalizerName)\n\t\tr.UpdateMeta(ctx, &cluster)\n\t\tlog.Info(\"Successfully deleted cluster\")\n\t\tr.Recorder.Event(&cluster, v1.EventTypeNormal, \"Deleted\", \"Successfully deleted cluster\")\n\t}\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "c6493d627bc4eb9295140bedc13b4ee0", "score": "0.4035794", "text": "func (j *DSRocketchat) CalculateTimeToReset(ctx *Ctx, rateLimit, rateLimitReset int) (seconds int) {\n\tseconds = (int(int64(rateLimitReset)-(time.Now().UnixNano()/int64(1000000))) / 1000) + 1\n\tif seconds < 0 {\n\t\tseconds = 0\n\t}\n\tif ctx.Debug > 1 {\n\t\tPrintf(\"CalculateTimeToReset(%d,%d) -> %d\\n\", rateLimit, rateLimitReset, seconds)\n\t}\n\treturn\n}", "title": "" }, { "docid": "0fba9935a76f182a0a8138d281b6ecef", "score": "0.40280378", "text": "func (o *GetConfigsClusterParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "bf7e5ccf1c718a2fd1bc95fe1efa04d2", "score": "0.40268227", "text": "func (o *NetworkPruneParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "f766fab3000dbbd3cb63f63e57d7e11f", "score": "0.4023174", "text": "func (o *CustomerGatewayUpdateOwnershipParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "1bda647e50278c03f01460969bdaf8dd", "score": "0.40230048", "text": "func (_m *SQLDatabase) SetConnMaxLifetime(d time.Duration) {\n\t_m.Called(d)\n}", "title": "" }, { "docid": "7b4a0a0f4c044003f988ebee79bc4358", "score": "0.40178496", "text": "func (r *MilvusClusterReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {\n\tif !config.IsDebug() {\n\t\tdefer func() {\n\t\t\tif err := recover(); err != nil {\n\t\t\t\tr.logger.Error(err.(error), \"reconcile panic\")\n\t\t\t}\n\t\t}()\n\t}\n\n\tmilvuscluster := &milvusiov1alpha1.MilvusCluster{}\n\tif err := r.Get(ctx, req.NamespacedName, milvuscluster); err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// The resource may have be deleted after reconcile request coming in\n\t\t\t// Reconcile is done\n\t\t\treturn ctrl.Result{}, nil\n\t\t}\n\n\t\treturn ctrl.Result{}, fmt.Errorf(\"error get milvus cluster: %w\", err)\n\t}\n\n\t// Finalize\n\tif milvuscluster.ObjectMeta.DeletionTimestamp.IsZero() {\n\t\tif !controllerutil.ContainsFinalizer(milvuscluster, MCFinalizerName) {\n\t\t\tcontrollerutil.AddFinalizer(milvuscluster, MCFinalizerName)\n\t\t\tif err := r.Update(ctx, milvuscluster); err != nil {\n\t\t\t\treturn ctrl.Result{}, err\n\t\t\t}\n\t\t}\n\n\t} else {\n\t\tif controllerutil.ContainsFinalizer(milvuscluster, MCFinalizerName) {\n\t\t\tif err := r.Finalize(ctx, *milvuscluster); err != nil {\n\t\t\t\treturn ctrl.Result{}, err\n\t\t\t}\n\t\t\tcontrollerutil.RemoveFinalizer(milvuscluster, MCFinalizerName)\n\t\t\tif err := r.Update(ctx, milvuscluster); err != nil {\n\t\t\t\treturn ctrl.Result{}, err\n\t\t\t}\n\t\t}\n\t\t// Stop reconciliation as the item is being deleted\n\t\treturn ctrl.Result{}, nil\n\t}\n\n\t// Start reconcile\n\tr.logger.Info(\"start reconcile\")\n\told := milvuscluster.DeepCopy()\n\n\tif err := r.SetDefault(ctx, milvuscluster); err != nil {\n\t\treturn ctrl.Result{}, err\n\t}\n\n\tif !IsEqual(old.Spec, milvuscluster.Spec) {\n\t\treturn ctrl.Result{}, r.Update(ctx, milvuscluster)\n\t}\n\n\tif err := r.ReconcileAll(ctx, *milvuscluster); err != nil {\n\t\treturn ctrl.Result{}, err\n\t}\n\n\tif err := r.UpdateStatus(ctx, milvuscluster); err != nil {\n\t\treturn ctrl.Result{}, err\n\t}\n\n\tif milvuscluster.Status.Status == v1alpha1.StatusUnHealthy {\n\t\treturn ctrl.Result{RequeueAfter: 30 * time.Second}, nil\n\t}\n\n\tif config.IsDebug() {\n\t\tdiff, err := client.MergeFrom(old).Data(milvuscluster)\n\t\tif err != nil {\n\t\t\tr.logger.Info(\"Update diff\", \"diff\", string(diff))\n\t\t}\n\t}\n\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "9fc3845ae267a30df5288a3ae01c45b2", "score": "0.40167415", "text": "func (o *ListRepositoryImagesParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "02e1199f934004d8e34ebfa99526c874", "score": "0.40046716", "text": "func setRandomizedElectionTimeout(r *raft, v int) {\n\tr.randomizedElectionTimeout = v\n}", "title": "" }, { "docid": "0661e81ebc96fcac55fcff4ef93a9010", "score": "0.4003269", "text": "func Remote(s *scen.Scenario, stdout, stderr io.Writer, addr string) (float64, error) {\n\treturn RemoteTimeout(s, stdout, stderr, addr, DefaultTimeout)\n}", "title": "" }, { "docid": "39a3d57d16c8a2bbfbd2cc0dd14e81a8", "score": "0.40012679", "text": "func GetReconcilitationConfig() *ReconciliationConfig {\n\t// default reconciliation loop time is 2 minutes\n\ttimeString := \"120\"\n\ttimeInt, _ := strconv.Atoi(timeString)\n\treconciliationTime := time.Second * time.Duration(timeInt)\n\treturn &ReconciliationConfig{\n\t\tTime: reconciliationTime,\n\t}\n}", "title": "" }, { "docid": "a220ad040a257147fa6e00d3c1633580", "score": "0.40004355", "text": "func (r *MultiClusterObservabilityReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {\n\treqLogger := log.WithValues(\"Request.Namespace\", req.Namespace, \"Request.Name\", req.Name)\n\treqLogger.Info(\"Reconciling MultiClusterObservability\")\n\n\t// Fetch the MultiClusterObservability instance\n\tinstance := &mcov1beta1.MultiClusterObservability{}\n\terr := r.Client.Get(context.TODO(), types.NamespacedName{\n\t\tName: config.GetMonitoringCRName(),\n\t}, instance)\n\tif err != nil {\n\t\tif apierrors.IsNotFound(err) {\n\t\t\t// Request object not found, could have been deleted after reconcile request.\n\t\t\t// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.\n\t\t\t// Return and don't requeue\n\t\t\treturn ctrl.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the request.\n\t\treturn ctrl.Result{}, err\n\t}\n\n\t// Init finalizers\n\tisTerminating, err := r.initFinalization(instance)\n\tif err != nil {\n\t\treturn ctrl.Result{}, err\n\t} else if isTerminating {\n\t\treqLogger.Info(\"MCO instance is in Terminating status, skip the reconcile\")\n\t\treturn ctrl.Result{}, err\n\t}\n\t//read image manifest configmap to be used to replace the image for each component.\n\tif _, err = config.ReadImageManifestConfigMap(r.Client); err != nil {\n\t\treturn ctrl.Result{}, err\n\t}\n\n\t// Do not reconcile objects if this instance of mch is labeled \"paused\"\n\tif config.IsPaused(instance.GetAnnotations()) {\n\t\treqLogger.Info(\"MCO reconciliation is paused. Nothing more to do.\")\n\t\treturn ctrl.Result{}, nil\n\t}\n\n\tstorageClassSelected, err := getStorageClass(instance, r.Client)\n\tif err != nil {\n\t\treturn ctrl.Result{}, err\n\t}\n\n\t//instance.Namespace = config.GetDefaultNamespace()\n\tinstance.Spec.StorageConfig.StatefulSetStorageClass = storageClassSelected\n\t//Render the templates with a specified CR\n\trenderer := rendering.NewRenderer(instance)\n\ttoDeploy, err := renderer.Render(r.Client)\n\tif err != nil {\n\t\treqLogger.Error(err, \"Failed to render multiClusterMonitoring templates\")\n\t\treturn ctrl.Result{}, err\n\t}\n\tdeployer := deploying.NewDeployer(r.Client)\n\t//Deploy the resources\n\tfor _, res := range toDeploy {\n\t\tif res.GetNamespace() == config.GetDefaultNamespace() {\n\t\t\tif err := controllerutil.SetControllerReference(instance, res, r.Scheme); err != nil {\n\t\t\t\treqLogger.Error(err, \"Failed to set controller reference\")\n\t\t\t}\n\t\t}\n\t\tif err := deployer.Deploy(res); err != nil {\n\t\t\treqLogger.Error(err, fmt.Sprintf(\"Failed to deploy %s %s/%s\",\n\t\t\t\tres.GetKind(), config.GetDefaultNamespace(), res.GetName()))\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\t}\n\n\t// expose observatorium api gateway\n\tresult, err := GenerateAPIGatewayRoute(r.Client, r.Scheme, instance)\n\tif result != nil {\n\t\treturn *result, err\n\t}\n\n\t// create the certificates\n\terr = createObservabilityCertificate(r.Client, r.Scheme, instance)\n\tif err != nil {\n\t\treturn ctrl.Result{}, err\n\t}\n\n\t// create an Observatorium CR\n\tresult, err = GenerateObservatoriumCR(r.Client, r.Scheme, instance)\n\tif result != nil {\n\t\treturn *result, err\n\t}\n\n\t// generate grafana datasource to point to observatorium api gateway\n\tresult, err = GenerateGrafanaDataSource(r.Client, r.Scheme, instance)\n\tif result != nil {\n\t\treturn *result, err\n\t}\n\n\tenableManagedCluster, found := os.LookupEnv(\"ENABLE_MANAGED_CLUSTER\")\n\tif !found || enableManagedCluster != \"false\" {\n\t\t// create the placementrule\n\t\terr = createPlacementRule(r.Client, r.Scheme, instance)\n\t\tif err != nil {\n\t\t\treturn ctrl.Result{}, err\n\t\t}\n\t}\n\n\tresult, err = r.UpdateStatus(instance)\n\tif result != nil {\n\t\treturn *result, err\n\t}\n\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "75d6f6ccd30b778347415cf9202e121d", "score": "0.3999379", "text": "func (s *PerforceDepotSyncer) Fetch(ctx context.Context, remoteURL *vcs.URL, dir GitDir) error {\n\tusername, password, host, _, err := decomposePerforceRemoteURL(remoteURL)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"decompose\")\n\t}\n\n\terr = p4pingWithTrust(ctx, host, username, password)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"ping with trust\")\n\t}\n\n\t// Example: git p4 sync --max-changes 1000\n\targs := []string{\"p4\", \"sync\"}\n\tif s.MaxChanges > 0 {\n\t\targs = append(args, \"--max-changes\", strconv.Itoa(s.MaxChanges))\n\t}\n\n\tcmd := exec.CommandContext(ctx, \"git\", args...)\n\tcmd.Env = append(os.Environ(),\n\t\t\"P4PORT=\"+host,\n\t\t\"P4USER=\"+username,\n\t\t\"P4PASSWD=\"+password,\n\t)\n\tdir.Set(cmd)\n\tif output, err := runWith(ctx, cmd, false, nil); err != nil {\n\t\treturn errors.Wrapf(err, \"failed to update with output %q\", newURLRedactor(remoteURL).redact(string(output)))\n\t}\n\n\t// Force update \"master\" to \"refs/remotes/p4/master\" where changes are synced into\n\tcmd = exec.CommandContext(ctx, \"git\", \"branch\", \"-f\", \"master\", \"refs/remotes/p4/master\")\n\tcmd.Env = append(os.Environ(),\n\t\t\"P4PORT=\"+host,\n\t\t\"P4USER=\"+username,\n\t\t\"P4PASSWD=\"+password,\n\t)\n\tdir.Set(cmd)\n\tif output, err := runWith(ctx, cmd, false, nil); err != nil {\n\t\treturn errors.Wrapf(err, \"failed to force update branch with output %q\", string(output))\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "9512a5ba4fd1ee25e96b1bc44822216f", "score": "0.39955246", "text": "func TestMaxTransfer(t *testing.T) {\n\tctx := context.Background()\n\tctx, ci := fs.AddConfig(ctx)\n\tci.MaxTransfer = 3 * 1024\n\tci.Transfers = 1\n\tci.Checkers = 1\n\tci.CutoffMode = fs.CutoffModeHard\n\n\ttest := func(t *testing.T, cutoff fs.CutoffMode) {\n\t\tr := fstest.NewRun(t)\n\t\tci.CutoffMode = cutoff\n\n\t\tif r.Fremote.Name() != \"local\" {\n\t\t\tt.Skip(\"This test only runs on local\")\n\t\t}\n\n\t\t// Create file on source\n\t\tfile1 := r.WriteFile(\"file1\", string(make([]byte, 5*1024)), t1)\n\t\tfile2 := r.WriteFile(\"file2\", string(make([]byte, 2*1024)), t1)\n\t\tfile3 := r.WriteFile(\"file3\", string(make([]byte, 3*1024)), t1)\n\t\tr.CheckLocalItems(t, file1, file2, file3)\n\t\tr.CheckRemoteItems(t)\n\n\t\taccounting.GlobalStats().ResetCounters()\n\n\t\terr := Sync(ctx, r.Fremote, r.Flocal, false)\n\t\texpectedErr := fserrors.FsError(accounting.ErrorMaxTransferLimitReachedFatal)\n\t\tif cutoff != fs.CutoffModeHard {\n\t\t\texpectedErr = accounting.ErrorMaxTransferLimitReachedGraceful\n\t\t}\n\t\tfserrors.Count(expectedErr)\n\t\tassert.Equal(t, expectedErr, err)\n\t}\n\n\tt.Run(\"Hard\", func(t *testing.T) { test(t, fs.CutoffModeHard) })\n\tt.Run(\"Soft\", func(t *testing.T) { test(t, fs.CutoffModeSoft) })\n\tt.Run(\"Cautious\", func(t *testing.T) { test(t, fs.CutoffModeCautious) })\n}", "title": "" }, { "docid": "90e691a534dc324997b49b4fa69b3c63", "score": "0.39948687", "text": "func (o *CopyRecipeToMyRecipesWithChangesParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "594b610e7864f35842ed5e60252df80a", "score": "0.39895433", "text": "func (o *UpdateSyncRuleParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "87670653eaf09f4bcb420af0a4ae2b4d", "score": "0.3989361", "text": "func RedisOptMaxIdle(maxIdle int) RedisOpt {\n\treturn func(p *redis.Pool) {\n\t\tp.MaxIdle = maxIdle\n\t}\n}", "title": "" }, { "docid": "537c5d0580c99d401458ad35ac6816a9", "score": "0.398188", "text": "func (o *ListRepoLogsParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "b6b7525ce37b95a8937765146a49c0ad", "score": "0.39804026", "text": "func (o *PostLolGameflowV1ReconnectParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "c554098e460f0fcc0ce1c8d08ea4ca2b", "score": "0.39731026", "text": "func (drj *DelRepoJob) MaxFails() uint {\n\treturn 3\n}", "title": "" }, { "docid": "597cfa2c6d948a7252566262466d0b18", "score": "0.3971562", "text": "func ConfigurableRacer(a, b string, timeout time.Duration) (winner string, error error) {\n\tselect {\n\tcase <-ping(a):\n\t\treturn a, nil\n\tcase <-ping(b):\n\t\treturn b, nil\n\tcase <-time.After(timeout):\n\t\treturn \"\", fmt.Errorf(\"timed out waiting for %s and %s\", a, b)\n\t}\n}", "title": "" }, { "docid": "0c9b3d58118133af1367cf1dc5daa79e", "score": "0.39558658", "text": "func (o ProxyDefaultTargetGroupConnectionPoolConfigPtrOutput) ConnectionBorrowTimeout() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v *ProxyDefaultTargetGroupConnectionPoolConfig) *int {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.ConnectionBorrowTimeout\n\t}).(pulumi.IntPtrOutput)\n}", "title": "" }, { "docid": "a44d7b5dc03f72d8ff491c83ad5370c1", "score": "0.3952748", "text": "func (c GraphQLCost) RetryAfterSeconds() float64 {\n\tvar diff float64\n\n\tif c.ActualQueryCost != nil {\n\t\tdiff = c.ThrottleStatus.CurrentlyAvailable - float64(*c.ActualQueryCost)\n\t} else {\n\t\tdiff = c.ThrottleStatus.CurrentlyAvailable - float64(c.RequestedQueryCost)\n\t}\n\n\tif diff < 0 {\n\t\treturn -diff / c.ThrottleStatus.RestoreRate\n\t}\n\n\treturn 0\n}", "title": "" }, { "docid": "248d22d5d64870506fec0bea6a459e88", "score": "0.39504033", "text": "func UpdateIdleConnectionsTimeout(client *http.Client) error {\n\thttpTransport, ok := client.Transport.(*http.Transport)\n\tif ok {\n\t\thttpTransport.IdleConnTimeout = 10 * time.Second\n\t\treturn nil\n\t}\n\n\toauth2Transport, ok := client.Transport.(*oauth2.Transport)\n\tif ok && oauth2Transport.Base == nil {\n\t\toauth2Transport.Base = DefaultTransport\n\n\t\treturn nil\n\t}\n\n\treturn ErrCannotUpdateIdleConnTimeout\n}", "title": "" }, { "docid": "bf1494b5304df2715f28a06d2d055cc1", "score": "0.39499488", "text": "func UseMaxConnections(max int) {\n\tsemaphoreMaxConnections = make(chan struct{}, max)\n}", "title": "" }, { "docid": "8dc505dd6c642460e20e5320b35cb6e7", "score": "0.3948382", "text": "func CfgMaxLifeTime(lifeTime int64) ManagerConfigOpt {\n\treturn func(config *ManagerConfig) {\n\t\tconfig.Maxlifetime = lifeTime\n\t}\n}", "title": "" }, { "docid": "7811c500cc9668f71160b7fb7bdae002", "score": "0.39404213", "text": "func (r *MachineTester) Reconcile(req ctrl.Request) (ctrl.Result, error) {\n\treturn ctrl.Result{}, nil\n}", "title": "" }, { "docid": "1f5d540c2a6cd47d4703ea58ad7d8fb3", "score": "0.39391172", "text": "func (p *WorkPool) SetTimeout(timeout time.Duration) { // 设置超时时间\n\tp.timeout = timeout\n}", "title": "" }, { "docid": "fc7f8f60e512d8bd43876d65cfa0b7ec", "score": "0.39377895", "text": "func (r *TransferReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {\n\ttransfer := accountv1.Transfer{}\n\tif err := r.Get(ctx, req.NamespacedName, &transfer); err != nil {\n\t\treturn ctrl.Result{}, client.IgnoreNotFound(err)\n\t}\n\ttransfer.Spec.From = getUsername(transfer.Namespace)\n\tif time.Since(transfer.CreationTimestamp.Time) > time.Minute*3 {\n\t\treturn ctrl.Result{}, r.Delete(ctx, &transfer)\n\t}\n\tpipeLine := []func(ctx context.Context, transfer *accountv1.Transfer) error{\n\t\tr.check,\n\t\tr.TransferOutSaver,\n\t\tr.TransferInSaver,\n\t}\n\tfor _, f := range pipeLine {\n\t\tif err := f(ctx, &transfer); err != nil {\n\t\t\ttransfer.Status.Reason = err.Error()\n\t\t\ttransfer.Status.Progress = accountv1.TransferStateFailed\n\t\t\tbreak\n\t\t}\n\t}\n\tif transfer.Status.Progress != accountv1.TransferStateFailed {\n\t\ttransfer.Status.Progress = accountv1.TransferStateCompleted\n\t}\n\tif err := r.Status().Update(ctx, &transfer); err != nil {\n\t\treturn ctrl.Result{}, fmt.Errorf(\"update transfer status failed: %w\", err)\n\t}\n\treturn ctrl.Result{RequeueAfter: 3 * time.Minute}, nil\n}", "title": "" } ]
4209a998cf410900a5028f831fe4e78d
Convert_v1alpha1_FilesystemServingConfig_To_webhook_FilesystemServingConfig is an autogenerated conversion function.
[ { "docid": "0c6e4a2e6d65cbc5ef8c7dd6c381f9c8", "score": "0.8487815", "text": "func Convert_v1alpha1_FilesystemServingConfig_To_webhook_FilesystemServingConfig(in *v1alpha1.FilesystemServingConfig, out *webhook.FilesystemServingConfig, s conversion.Scope) error {\n\treturn autoConvert_v1alpha1_FilesystemServingConfig_To_webhook_FilesystemServingConfig(in, out, s)\n}", "title": "" } ]
[ { "docid": "164d79e1a662034179589d8a028281ae", "score": "0.6143104", "text": "func Convert_webhook_FilesystemServingConfig_To_v1alpha1_FilesystemServingConfig(in *webhook.FilesystemServingConfig, out *v1alpha1.FilesystemServingConfig, s conversion.Scope) error {\n\treturn autoConvert_webhook_FilesystemServingConfig_To_v1alpha1_FilesystemServingConfig(in, out, s)\n}", "title": "" }, { "docid": "6fccab5d3cfd06d01fcf5be37af22018", "score": "0.50939256", "text": "func Convert_v1alpha1_DynamicServingConfig_To_webhook_DynamicServingConfig(in *v1alpha1.DynamicServingConfig, out *webhook.DynamicServingConfig, s conversion.Scope) error {\n\treturn autoConvert_v1alpha1_DynamicServingConfig_To_webhook_DynamicServingConfig(in, out, s)\n}", "title": "" }, { "docid": "be0b366fda34a7f8615a0c94999a4cb2", "score": "0.5049684", "text": "func withFilesystemConfigService(c *router.Context, next router.Handler) {\n\tiface, err := filesystem.New(\"../devcfg\")\n\tif err != nil {\n\t\tc.Writer.WriteHeader(http.StatusInternalServerError)\n\t\tlogging.WithError(err).Errorf(c.Context, \"Failed to load local config files.\")\n\t\treturn\n\t}\n\tc.Context = config.WithConfigService(c.Context, iface)\n\tnext(c)\n}", "title": "" }, { "docid": "07188bde261b9b347e617be87ebfe295", "score": "0.44723174", "text": "func (cs *ConfigSchema) ToConfig() (*Config, error) {\n\terrs := multierror.NewMultiError(\"config\")\n\n\tmessageBusServers := []cfConfig.MessageBusServer{}\n\tfor _, messageBusServer := range cs.NatsServers {\n\t\tserver, err := messageBusServer.ToConfig()\n\t\tif err != nil {\n\t\t\terrs.Add(err)\n\t\t} else {\n\t\t\tmessageBusServers = append(messageBusServers, server)\n\t\t}\n\t}\n\n\tif len(cs.NatsServers) == 0 {\n\t\terrs.Add(missingOptionError(\"nats_servers\", \"at least 1 nats server is required\"))\n\t}\n\n\tif len(cs.RoutingApiUrl) == 0 {\n\t\terrs.Add(missingOptionError(\"routing_api_url\", \"can not be blank\"))\n\t}\n\n\tif len(cs.CloudFoundryAppDomainName) == 0 {\n\t\terrs.Add(missingOptionError(\"app_domain_name\", \"can not be blank\"))\n\t}\n\n\tif len(cs.UaaApiUrl) == 0 {\n\t\terrs.Add(missingOptionError(\"uaa_api_url\", \"can not be blank\"))\n\t}\n\n\tif len(cs.RoutingApiUsername) == 0 {\n\t\terrs.Add(missingOptionError(\"routing_api_username\", \"can not be blank\"))\n\t}\n\n\tif len(cs.RoutingApiClientSecret) == 0 {\n\t\terrs.Add(missingOptionError(\"routing_api_client_secret\", \"can not be blank\"))\n\t}\n\n\tif len(cs.KubeConfigPath) == 0 {\n\t\terrs.Add(missingOptionError(\"kube_config_path\", \"can not be blank\"))\n\t}\n\n\tcfg := &Config{\n\t\tNatsServers: messageBusServers,\n\t\tRoutingApiUrl: cs.RoutingApiUrl,\n\t\tCloudFoundryAppDomainName: cs.CloudFoundryAppDomainName,\n\t\tUaaApiUrl: cs.UaaApiUrl,\n\t\tRoutingApiUsername: cs.RoutingApiUsername,\n\t\tRoutingApiClientSecret: cs.RoutingApiClientSecret,\n\t\tSkipTlsVerification: cs.SkipTlsVerification,\n\t\tKubeConfigPath: cs.KubeConfigPath,\n\t}\n\n\tif errs.Length() > 0 {\n\t\treturn nil, errs\n\t}\n\n\treturn cfg, nil\n}", "title": "" }, { "docid": "4f7d349b90102228e2cc76594a20b456", "score": "0.43808717", "text": "func (c *Controller) ServeConfig(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\") // Set the header to tell the requester what format the daya is in\n\n\tvars := mux.Vars(r) // Get the parameteres in the request. In this case, the service name only\n\tservice, ok := vars[\"service\"]\n\tif !ok {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tfmt.Fprintf(w, \"error\")\n\t}\n\n\tconfig, err := c.Config.GetConfig(service)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Fprintf(w, fmt.Sprintf(\"%v\", err))\n\t}\n\n\trsp, err := json.Marshal(&config)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Fprintf(w, fmt.Sprintf(\"%v\", err))\n\t}\n\n\tw.WriteHeader(http.StatusOK)\n\tfmt.Fprintf(w, string(rsp))\n}", "title": "" }, { "docid": "19641a7c7fe23a4738567e6b52239425", "score": "0.43121415", "text": "func (p *Provider) Serve() {\n\tif err := p.EnsureConfigFile(); err != nil {\n\t\tp.broadcastError(\"Error loading config file\", err)\n\t\treturn\n\t}\n\twatcher, err := fsnotify.NewWatcher()\n\tif err != nil {\n\t\tp.broadcastError(\"Unable to setup FS watcher\", err)\n\t\treturn\n\t}\n\tdefer watcher.Close()\n\n\terr = watcher.Add(p.configFile)\n\tif err != nil {\n\t\tp.broadcastError(\"Unable to setup FS watcher\", err)\n\t\treturn\n\t}\n\tdefer close(p.stopDone)\n\n\tp.logger.Info(\"Running\")\n\tclose(p.ready)\n\n\tfor {\n\t\tselect {\n\t\tcase event, ok := <-watcher.Events:\n\t\t\tif !ok {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif event.Op&fsnotify.Write == fsnotify.Write {\n\t\t\t\tp.eventBatcher.Notify()\n\t\t\t}\n\t\tcase err, ok := <-watcher.Errors:\n\t\t\tif !ok {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tp.broadcastError(\"FS watcher error\", err)\n\t\tcase <-p.eventBatcher.BatchedEvents():\n\t\t\t// Ignore write event if it was an internal write\n\t\t\tswapped := atomic.CompareAndSwapInt32(&p.internalWriteEvent, 1, 0)\n\t\t\tif swapped {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tp.logger.Info(\"Detected config file change\")\n\t\t\terr = p.readConfig()\n\t\t\tif err != nil {\n\t\t\t\tp.broadcastError(\"Unable to read config file\", err)\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tp.logger.Info(\"Successfully applied config change\")\n\t\tcase <-p.stop:\n\t\t\tp.logger.Info(\"Stopping...\")\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "91c8db91e2ffbda0d0635d4f49f2e203", "score": "0.4303874", "text": "func (f *ConfigFlags) WithWrapConfigFn(wrapConfigFn func(*rest.Config) *rest.Config) *ConfigFlags {\n\tf.WrapConfigFn = wrapConfigFn\n\treturn f\n}", "title": "" }, { "docid": "27a598ea4b4e8b3eb82398bc8eed68c3", "score": "0.42814842", "text": "func (a *Agent) SerfConfig() *serf.Config {\n\treturn a.conf\n}", "title": "" }, { "docid": "27a598ea4b4e8b3eb82398bc8eed68c3", "score": "0.42814842", "text": "func (a *Agent) SerfConfig() *serf.Config {\n\treturn a.conf\n}", "title": "" }, { "docid": "f4bd10ca820a7e08738fb41cec2b4372", "score": "0.42556906", "text": "func (f *Conf) Serve() error {\n\treturn nil\n}", "title": "" }, { "docid": "a9ee1c81741917f92cda59a3409c31e4", "score": "0.42017385", "text": "func LookupServingConfig(ctx *pulumi.Context, args *LookupServingConfigArgs, opts ...pulumi.InvokeOption) (*LookupServingConfigResult, error) {\n\topts = internal.PkgInvokeDefaultOpts(opts)\n\tvar rv LookupServingConfigResult\n\terr := ctx.Invoke(\"google-native:retail/v2alpha:getServingConfig\", args, &rv, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &rv, nil\n}", "title": "" }, { "docid": "1d453748ad381c3d47c7dd8c38283549", "score": "0.4029727", "text": "func GetServingConfig() cserving.ServingV1alpha1Interface {\n\tconfig, err := rest.InClusterConfig()\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to get in cluster config: %s\", err)\n\t}\n\tclient, err := serving.NewForConfig(config)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to setup serving client: %s\", err)\n\t}\n\treturn client\n}", "title": "" }, { "docid": "5623c06331366e89aef35f7d36ed2aeb", "score": "0.40119854", "text": "func (b *Builder) WithFilesystem(fs fs.FileSystem) *Builder {\n\tb.fs = fs\n\treturn b\n}", "title": "" }, { "docid": "e9e2c7ab10e10a12e784f1f4a8740f25", "score": "0.40063772", "text": "func APIServerServingCerts() *APIServerServingCertsApplyConfiguration {\n\treturn &APIServerServingCertsApplyConfiguration{}\n}", "title": "" }, { "docid": "d62a34351073175c611036df42603ac5", "score": "0.40045673", "text": "func (c *IPFSConfig) WriteConfig(path string) error {\n\n\t// Convert config to map\n\tvar mapped map[string]interface{}\n\terr := mapstructure.Decode(c, &mapped)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Remove PrivKey\n\tif _, ok := mapped[\"Identity\"]; ok {\n\t\tidentity := mapped[\"Identity\"].(map[string]interface{})\n\t\tif key, ok := identity[\"PrivKey\"]; ok && key == \"\" {\n\t\t\tdelete(identity, \"PrivKey\")\n\t\t}\n\t}\n\n\t// Marshal JSON indented\n\tbuf, err := json.MarshalIndent(mapped, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Write config file\n\terr = ioutil.WriteFile(path, buf, 0600)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error writing config file: %s\", err.Error())\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "a3fa66ecbdde50a7cefdcbfab944cc6a", "score": "0.39480627", "text": "func (f FilesystemConfiguration) Validate() error {\n\tif f.WriteBufferSize != nil && *f.WriteBufferSize < 1 {\n\t\treturn fmt.Errorf(\n\t\t\t\"fs writeBufferSize is set to: %d, but must be at least 1\",\n\t\t\t*f.WriteBufferSize)\n\t}\n\n\tif f.DataReadBufferSize != nil && *f.DataReadBufferSize < 1 {\n\t\treturn fmt.Errorf(\n\t\t\t\"fs dataReadBufferSize is set to: %d, but must be at least 1\",\n\t\t\t*f.DataReadBufferSize)\n\t}\n\n\tif f.InfoReadBufferSize != nil && *f.InfoReadBufferSize < 1 {\n\t\treturn fmt.Errorf(\n\t\t\t\"fs infoReadBufferSize is set to: %d, but must be at least 1\",\n\t\t\t*f.InfoReadBufferSize)\n\t}\n\n\tif f.SeekReadBufferSize != nil && *f.SeekReadBufferSize < 1 {\n\t\treturn fmt.Errorf(\n\t\t\t\"fs seekReadBufferSize is set to: %d, but must be at least 1\",\n\t\t\t*f.SeekReadBufferSize)\n\t}\n\n\tif f.ThroughputLimitMbps != nil && *f.ThroughputLimitMbps < 1 {\n\t\treturn fmt.Errorf(\n\t\t\t\"fs throughputLimitMbps is set to: %f, but must be at least 1\",\n\t\t\t*f.ThroughputLimitMbps)\n\t}\n\n\tif f.ThroughputCheckEvery != nil && *f.ThroughputCheckEvery < 1 {\n\t\treturn fmt.Errorf(\n\t\t\t\"fs throughputCheckEvery is set to: %d, but must be at least 1\",\n\t\t\t*f.ThroughputCheckEvery)\n\t}\n\tif f.BloomFilterFalsePositivePercent != nil &&\n\t\t(*f.BloomFilterFalsePositivePercent < 0 || *f.BloomFilterFalsePositivePercent > 1) {\n\t\treturn fmt.Errorf(\n\t\t\t\"fs bloomFilterFalsePositivePercent is set to: %f, but must be between 0.0 and 1.0\",\n\t\t\t*f.BloomFilterFalsePositivePercent)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "c09b133bca2d8eae7ea66fd9c4f62ed9", "score": "0.39476973", "text": "func (i ServerConfigService) SaveServerConfigToLocalJsonFile() error {\n\treturn utils.IO().StructToJsonFile(i.getConfigFilePath(), i.ConfigObject)\n}", "title": "" }, { "docid": "f1013c587da255a6b34d94ae6939c6aa", "score": "0.39460424", "text": "func WriteConfig(config *Config, filepath string) (err error) {\n\tf, err := os.Create(filepath)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer f.Close()\n\n\tencoder := json.NewEncoder(f)\n\terr = encoder.Encode(config)\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "0f9528315d90e632cda855e11bb00c4d", "score": "0.3934328", "text": "func SerfDefaultConfig() *serf.Config {\n\tbase := serf.DefaultConfig()\n\n\t// This effectively disables the annoying queue depth warnings.\n\tbase.QueueDepthWarning = 1000000\n\n\t// This enables dynamic sizing of the message queue depth based on the\n\t// cluster size.\n\tbase.MinQueueDepth = 4096\n\n\t// This gives leaves some time to propagate through the cluster before\n\t// we shut down. The value was chosen to be reasonably short, but to\n\t// allow a leave to get to over 99.99% of the cluster with 100k nodes\n\t// (using https://www.serf.io/docs/internals/simulator.html).\n\tbase.LeavePropagateDelay = 3 * time.Second\n\n\treturn base\n}", "title": "" }, { "docid": "65bc6c7db209b8a0a397343812901459", "score": "0.39303714", "text": "func NewFSFetcherConfig(configDir string) FetcherConfig {\n\tlogger.Info(nil, \"Initializing fetcher config: %v\", configDir)\n\treturn &fsFetcherConfig{configDir}\n}", "title": "" }, { "docid": "b9e14ebecff4875130ac6df6c35e4d2d", "score": "0.39254242", "text": "func (f *ConfigFlags) ToRESTConfig() (*rest.Config, error) {\n\tc, err := f.ToRawKubeConfigLoader().ClientConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif f.WrapConfigFn != nil {\n\t\treturn f.WrapConfigFn(c), nil\n\t}\n\treturn c, nil\n}", "title": "" }, { "docid": "99981803d75b2c7669624baa64e59b8e", "score": "0.3895378", "text": "func (o AppImageConfigKernelGatewayImageConfigOutput) FileSystemConfig() AppImageConfigFileSystemConfigPtrOutput {\n\treturn o.ApplyT(func(v AppImageConfigKernelGatewayImageConfig) *AppImageConfigFileSystemConfig {\n\t\treturn v.FileSystemConfig\n\t}).(AppImageConfigFileSystemConfigPtrOutput)\n}", "title": "" }, { "docid": "d82600659b56ce3150e6ee679adf34c8", "score": "0.38910508", "text": "func (this *Server) writeConfig(config *Config) error {\n\tf, err := os.Create(CONFIG)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\terr = json.NewEncoder(f).Encode(config)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "85d31765072f80cec5f4b504cdb9729b", "score": "0.38704708", "text": "func (c *safeConfig) SetFs(fs afero.Fs) {\n\tc.Lock()\n\tdefer c.Unlock()\n\tc.Viper.SetFs(fs)\n}", "title": "" }, { "docid": "7299ba28d6c178c327eb2515a718dc1f", "score": "0.386593", "text": "func (in *SpotFleetRequestConfig) DeepCopy() *SpotFleetRequestConfig {\n\tif in == nil { return nil }\n\tout := new(SpotFleetRequestConfig)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "title": "" }, { "docid": "481527f12b79387af3400c014258cbd7", "score": "0.38623607", "text": "func writeConfig() error {\n\ttoken := viper.Get(\"token\").(*oauth2.Token)\n\tc := config{Token: token}\n\n\ty, err := yaml.Marshal(c)\n\tif err != nil {\n\t\terrors.Wrap(err, \"failed to marshal yaml\")\n\t}\n\n\tf, err := os.Create(viper.ConfigFileUsed())\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to create config file\")\n\t}\n\tdefer f.Close()\n\n\tf.Chmod(0600)\n\t_, err = f.Write(y)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to write config file\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8660b57204d98d3defc7c359479f2d25", "score": "0.38623497", "text": "func (o *AddMealPlanTemplate200ResponseItemsInnerValue) SetServings(v float32) {\n\to.Servings = &v\n}", "title": "" }, { "docid": "2130e6d6782d4ba67d2f15d27b8a1a80", "score": "0.3861646", "text": "func ToServer(val interface{}) (*ServerConfig, error) {\n\tvar device ServerConfig\n\tinrec, err := json.Marshal(val)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = json.Unmarshal(inrec, &device)\n\treturn &device, err\n}", "title": "" }, { "docid": "4e2d9f89426ca71a60651849fec12cbe", "score": "0.38432318", "text": "func (o *TiltServerOptions) Config() (*apiserver.Config, error) {\n\tserverConfig := genericapiserver.NewRecommendedConfig(apiserver.Codecs)\n\tserverConfig = ApplyRecommendedConfigFns(serverConfig)\n\n\textraConfig := apiserver.ExtraConfig{}\n\terr := o.ServingOptions.ApplyTo(&extraConfig.DeprecatedInsecureServingInfo)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tserving := extraConfig.DeprecatedInsecureServingInfo\n\tserverConfig.ExternalAddress = serving.Listener.Addr().String()\n\n\tloopbackConfig, err := serving.NewLoopbackClientConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tserverConfig.LoopbackClientConfig = loopbackConfig\n\tserverConfig.RESTOptionsGetter = o\n\n\tconfig := &apiserver.Config{\n\t\tGenericConfig: serverConfig,\n\t\tExtraConfig: extraConfig,\n\t}\n\n\treturn config, nil\n}", "title": "" }, { "docid": "83bf53e2476ef7d1a163d30d9041137b", "score": "0.38183624", "text": "func (fs *Filesystem) Save() error {\n\tfsJson := ConvertFilesystemToJSON(*fs)\n\t//fmt.Println(string(fsJson))\n\tfmt.Println(\"FS Saved\")\n\n\terr := ioutil.WriteFile(fs.fsJsonPath, fsJson, 0644)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "593a837d081429935413cfc98ea8e5be", "score": "0.38148695", "text": "func (b *kubeAuthBackend) pathConfigWrite() framework.OperationFunc {\n\treturn func(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) {\n\t\thost := data.Get(\"kubernetes_host\").(string)\n\t\tif host == \"\" {\n\t\t\treturn logical.ErrorResponse(\"no host provided\"), nil\n\t\t}\n\n\t\tpemList := data.Get(\"pem_keys\").([]string)\n\t\tcaCert := data.Get(\"kubernetes_ca_cert\").(string)\n\t\tif len(pemList) == 0 && len(caCert) == 0 {\n\t\t\treturn logical.ErrorResponse(\"one of pem_keys or kubernetes_ca_cert must be set\"), nil\n\t\t}\n\n\t\ttokenReviewer := data.Get(\"token_reviewer_jwt\").(string)\n\t\tif len(tokenReviewer) > 0 {\n\t\t\t// Validate it's a JWT\n\t\t\t_, err := jws.ParseJWT([]byte(tokenReviewer))\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\n\t\tconfig := &kubeConfig{\n\t\t\tPublicKeys: make([]interface{}, len(pemList)),\n\t\t\tPEMKeys: pemList,\n\t\t\tHost: host,\n\t\t\tCACert: caCert,\n\t\t\tTokenReviewerJWT: tokenReviewer,\n\t\t}\n\n\t\tvar err error\n\t\tfor i, pem := range pemList {\n\t\t\tconfig.PublicKeys[i], err = parsePublicKeyPEM([]byte(pem))\n\t\t\tif err != nil {\n\t\t\t\treturn logical.ErrorResponse(err.Error()), nil\n\t\t\t}\n\t\t}\n\n\t\tentry, err := logical.StorageEntryJSON(configPath, config)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif err := req.Storage.Put(ctx, entry); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, nil\n\t}\n}", "title": "" }, { "docid": "05d90653e531522f780affafc2b9d7ff", "score": "0.3813552", "text": "func SaveConfig(conf *types.NetConf, args *skel.CmdArgs, data *OvsSavedData) error {\n\n\t// Current implementation is to write data to a file with the name:\n\t// /var/run/ovs/cni/data/local-<ContainerId:12>-<IfName>.json\n\n\tfileName := fmt.Sprintf(\"local-%s-%s.json\", args.ContainerID[:12], args.IfName)\n\tif dataBytes, err := json.Marshal(data); err == nil {\n\t\tlocalDir := annotations.DefaultLocalCNIDir\n\n\t\tif _, err := os.Stat(localDir); err != nil {\n\t\t\tif os.IsNotExist(err) {\n\t\t\t\tif err := os.MkdirAll(localDir, 0700); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\tpath := filepath.Join(localDir, fileName)\n\n\t\treturn os.WriteFile(path, dataBytes, 0644)\n\t} else {\n\t\treturn fmt.Errorf(\"ERROR: serializing delegate OVS saved data: %v\", err)\n\t}\n}", "title": "" }, { "docid": "c8b4b653ee26d18d61171f061ad45e2c", "score": "0.37886235", "text": "func (fs *Filesystem) VirtualFilesystem() *VirtualFilesystem {\n\treturn fs.vfs\n}", "title": "" }, { "docid": "9cdd169c1005c2bb959922bee258da17", "score": "0.37798068", "text": "func NewLoader(ctx context.Context, folder string) (fnloader.Loader, error) {\n\tl := new(loader)\n\tif folder == \"\" {\n\t\tfolder = DefaultPath\n\t}\n\tl.file = filepath.Join(folder, ConfigFile)\n\tif l.loadConfig() {\n\t\tl.c = closedSigChan\n\t\treturn l, nil\n\t}\n\n\t// file does not ready, start a watcher\n\tl.c = make(chan struct{})\n\twatcher, err := fsnotify.NewWatcher()\n\tif err != nil {\n\t\tklog.Errorf(\"(fsloader) failed to create watcher, %v\", err)\n\t\treturn nil, err\n\t}\n\terr = watcher.Add(folder)\n\tif err != nil {\n\t\tklog.Errorf(\"(fsloader) failed to watch, %v\", err)\n\t\twatcher.Close()\n\t\treturn nil, err\n\t}\n\t// start watcher\n\tgo func() {\n\t\tdefer close(l.c)\n\t\tdefer watcher.Close()\n\t\twanted := fsnotify.Write | fsnotify.Create\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\tcase event, ok := <-watcher.Events:\n\t\t\t\tif !ok {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\t// verbose for debug\n\t\t\t\tif klog.V(4) {\n\t\t\t\t\tklog.V(4).Infof(\"(fsloader) recived event: %q\", func() string {\n\t\t\t\t\t\tif event.Op&fsnotify.Create == fsnotify.Create {\n\t\t\t\t\t\t\treturn \"Create: \" + event.Name\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn \"Write: \" + event.Name\n\t\t\t\t\t}())\n\t\t\t\t}\n\t\t\t\tif event.Op&wanted != 0x0 && strings.HasSuffix(event.Name, ConfigFile) && l.loadConfig() {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase err, ok := <-watcher.Errors:\n\t\t\t\tif !ok {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tklog.Warningf(\"(fsloader) failed to watching config, %v\", err)\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn l, nil\n}", "title": "" }, { "docid": "abc12288d0eb04d21da55f9e35e7e1bb", "score": "0.37758297", "text": "func Serve(c *fuse.Conn, fs FS) error {\n\tserver := New(c, nil)\n\treturn server.Serve(fs)\n}", "title": "" }, { "docid": "79ea630116a8d9271ce6c5272ac92144", "score": "0.3770003", "text": "func (o AppImageConfigKernelGatewayImageConfigPtrOutput) FileSystemConfig() AppImageConfigFileSystemConfigPtrOutput {\n\treturn o.ApplyT(func(v *AppImageConfigKernelGatewayImageConfig) *AppImageConfigFileSystemConfig {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.FileSystemConfig\n\t}).(AppImageConfigFileSystemConfigPtrOutput)\n}", "title": "" }, { "docid": "f8b929e76cd22e901a0a8ec2a22f677a", "score": "0.37462112", "text": "func RegisterConversions(s *runtime.Scheme) error {\n\tif err := s.AddGeneratedConversionFunc((*v1alpha1.DynamicServingConfig)(nil), (*webhook.DynamicServingConfig)(nil), func(a, b interface{}, scope conversion.Scope) error {\n\t\treturn Convert_v1alpha1_DynamicServingConfig_To_webhook_DynamicServingConfig(a.(*v1alpha1.DynamicServingConfig), b.(*webhook.DynamicServingConfig), scope)\n\t}); err != nil {\n\t\treturn err\n\t}\n\tif err := s.AddGeneratedConversionFunc((*webhook.DynamicServingConfig)(nil), (*v1alpha1.DynamicServingConfig)(nil), func(a, b interface{}, scope conversion.Scope) error {\n\t\treturn Convert_webhook_DynamicServingConfig_To_v1alpha1_DynamicServingConfig(a.(*webhook.DynamicServingConfig), b.(*v1alpha1.DynamicServingConfig), scope)\n\t}); err != nil {\n\t\treturn err\n\t}\n\tif err := s.AddGeneratedConversionFunc((*v1alpha1.FilesystemServingConfig)(nil), (*webhook.FilesystemServingConfig)(nil), func(a, b interface{}, scope conversion.Scope) error {\n\t\treturn Convert_v1alpha1_FilesystemServingConfig_To_webhook_FilesystemServingConfig(a.(*v1alpha1.FilesystemServingConfig), b.(*webhook.FilesystemServingConfig), scope)\n\t}); err != nil {\n\t\treturn err\n\t}\n\tif err := s.AddGeneratedConversionFunc((*webhook.FilesystemServingConfig)(nil), (*v1alpha1.FilesystemServingConfig)(nil), func(a, b interface{}, scope conversion.Scope) error {\n\t\treturn Convert_webhook_FilesystemServingConfig_To_v1alpha1_FilesystemServingConfig(a.(*webhook.FilesystemServingConfig), b.(*v1alpha1.FilesystemServingConfig), scope)\n\t}); err != nil {\n\t\treturn err\n\t}\n\tif err := s.AddGeneratedConversionFunc((*v1alpha1.TLSConfig)(nil), (*webhook.TLSConfig)(nil), func(a, b interface{}, scope conversion.Scope) error {\n\t\treturn Convert_v1alpha1_TLSConfig_To_webhook_TLSConfig(a.(*v1alpha1.TLSConfig), b.(*webhook.TLSConfig), scope)\n\t}); err != nil {\n\t\treturn err\n\t}\n\tif err := s.AddGeneratedConversionFunc((*webhook.TLSConfig)(nil), (*v1alpha1.TLSConfig)(nil), func(a, b interface{}, scope conversion.Scope) error {\n\t\treturn Convert_webhook_TLSConfig_To_v1alpha1_TLSConfig(a.(*webhook.TLSConfig), b.(*v1alpha1.TLSConfig), scope)\n\t}); err != nil {\n\t\treturn err\n\t}\n\tif err := s.AddGeneratedConversionFunc((*v1alpha1.WebhookConfiguration)(nil), (*webhook.WebhookConfiguration)(nil), func(a, b interface{}, scope conversion.Scope) error {\n\t\treturn Convert_v1alpha1_WebhookConfiguration_To_webhook_WebhookConfiguration(a.(*v1alpha1.WebhookConfiguration), b.(*webhook.WebhookConfiguration), scope)\n\t}); err != nil {\n\t\treturn err\n\t}\n\tif err := s.AddGeneratedConversionFunc((*webhook.WebhookConfiguration)(nil), (*v1alpha1.WebhookConfiguration)(nil), func(a, b interface{}, scope conversion.Scope) error {\n\t\treturn Convert_webhook_WebhookConfiguration_To_v1alpha1_WebhookConfiguration(a.(*webhook.WebhookConfiguration), b.(*v1alpha1.WebhookConfiguration), scope)\n\t}); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f19cb6642c00cf0a524749313f778dab", "score": "0.37408763", "text": "func ProtoToGkehubAlphaFeatureSpecFleetobservabilityLoggingConfig(p *alphapb.GkehubAlphaFeatureSpecFleetobservabilityLoggingConfig) *alpha.FeatureSpecFleetobservabilityLoggingConfig {\n\tif p == nil {\n\t\treturn nil\n\t}\n\tobj := &alpha.FeatureSpecFleetobservabilityLoggingConfig{\n\t\tDefaultConfig: ProtoToGkehubAlphaFeatureSpecFleetobservabilityLoggingConfigDefaultConfig(p.GetDefaultConfig()),\n\t\tFleetScopeLogsConfig: ProtoToGkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfig(p.GetFleetScopeLogsConfig()),\n\t}\n\treturn obj\n}", "title": "" }, { "docid": "89a774ee2950e84f9bd56a65bbe4ab57", "score": "0.37378246", "text": "func GkehubAlphaFeatureSpecFleetobservabilityLoggingConfigToProto(o *alpha.FeatureSpecFleetobservabilityLoggingConfig) *alphapb.GkehubAlphaFeatureSpecFleetobservabilityLoggingConfig {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &alphapb.GkehubAlphaFeatureSpecFleetobservabilityLoggingConfig{}\n\tp.SetDefaultConfig(GkehubAlphaFeatureSpecFleetobservabilityLoggingConfigDefaultConfigToProto(o.DefaultConfig))\n\tp.SetFleetScopeLogsConfig(GkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfigToProto(o.FleetScopeLogsConfig))\n\treturn p\n}", "title": "" }, { "docid": "e90aac3e2a2af90ec24f07bd7d402a8e", "score": "0.3732112", "text": "func WriteConfig(fs afero.Fs, config *Config, path string) error {\n\tconfMap, err := toMap(config)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn checkAndWrite(fs, confMap, path)\n}", "title": "" }, { "docid": "74ed6f18645baf30594129e4a20033b8", "score": "0.37315813", "text": "func (o *ConfigurationServiceAPI) Serve(builder middleware.Builder) http.Handler {\n\to.Init()\n\n\tif o.Middleware != nil {\n\t\treturn o.Middleware(builder)\n\t}\n\treturn o.context.APIHandler(builder)\n}", "title": "" }, { "docid": "ee9d9dea7d73fcf4ae2ab969f40262dd", "score": "0.37312263", "text": "func (setting *FileSetting) ToStruct(jsonData string) error {\n\tjsonMap := FileSetting{}\n\terr := json.Unmarshal([]byte(jsonData), &jsonMap)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*setting = jsonMap\n\treturn nil\n}", "title": "" }, { "docid": "34525418d5e0fea71d7a602b0a9a4027", "score": "0.37301743", "text": "func (config *Config) SaveToFile() error {\n\tconfig.SecretKey = base64.StdEncoding.EncodeToString([]byte(config.SecretKey))\n\tconfigBytes, err := json.Marshal(&config)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn saveFile(ConfigFilePath, configBytes)\n}", "title": "" }, { "docid": "0fafc46512c663018674b714148ca382", "score": "0.37167093", "text": "func (w *Workflow) SaveConfig() error {\n\tconfigFilePath := w.buildConfigFilePath()\n\tdata, err := json.Marshal(&w.Config)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = os.MkdirAll(filepath.Dir(configFilePath), 0700)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn ioutil.WriteFile(configFilePath, data, 0666)\n}", "title": "" }, { "docid": "a44166293efa7fd48473c73831c26c08", "score": "0.37141857", "text": "func (m *Windows10EndpointProtectionConfiguration) SetFirewallBlockStatefulFTP(value *bool)() {\n err := m.GetBackingStore().Set(\"firewallBlockStatefulFTP\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "500b6a0b8e1886b864e796f4d6e7410d", "score": "0.36951238", "text": "func pipeConfig(opts *options) {\n\n\tconfigChan := make(chan interface{})\n\n\tgo func() {\n\t\tfor {\n\t\t\tcfg := <-configChan\n\t\t\topts.dispatch(cfg)\n\t\t}\n\t}()\n\tconfigPath, err := client.InConfigDir(opts.saveDir, opts.name)\n\tif err != nil {\n\t\tlog.Errorf(\"Could not get config path? %v\", err)\n\t}\n\n\tlog.Tracef(\"Obfuscating %v\", opts.obfuscate)\n\tconf := newConfig(configPath, opts.obfuscate, opts.yamlTemplater)\n\n\tif saved, proxyErr := conf.saved(); proxyErr != nil {\n\t\tlog.Debugf(\"Could not load stored config %v\", proxyErr)\n\t\tif embedded, errr := conf.embedded(opts.embeddedData, opts.name); errr != nil {\n\t\t\tlog.Errorf(\"Could not load embedded config %v\", errr)\n\t\t} else {\n\t\t\tlog.Debugf(\"Sending embedded config for %v\", name)\n\t\t\tconfigChan <- embedded\n\t\t}\n\t} else {\n\t\tlog.Debugf(\"Sending saved config for %v\", name)\n\t\tconfigChan <- saved\n\t}\n\n\t// Now continually poll for new configs and pipe them back to the dispatch\n\t// function.\n\tgo conf.poll(opts.userConfig, configChan, opts.urls, opts.sleep)\n}", "title": "" }, { "docid": "467cf0e5ebb94aaa39897c35aec66d98", "score": "0.3686015", "text": "func (h *Handler) WriteConfig(config Config) error {\n\tfile, err := json.MarshalIndent(config, \"\", \" \")\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = ioutil.WriteFile(\"config.json\", file, 0644)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "db7e62c1d74f42bfe778ef3552c1d99f", "score": "0.36666828", "text": "func (s *SimpleFSHandler) SimpleFSSetFolderSyncConfig(\n\tctx context.Context, arg keybase1.SimpleFSSetFolderSyncConfigArg) error {\n\tcli, err := s.client(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tctx, cancel := s.wrapContextWithTimeout(ctx)\n\tdefer cancel()\n\treturn cli.SimpleFSSetFolderSyncConfig(ctx, arg)\n}", "title": "" }, { "docid": "9852a9fefed5a53221dc077abf7f7407", "score": "0.36625108", "text": "func (conf CBFSConfig) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(conf.ToMap())\n}", "title": "" }, { "docid": "01d2afa176a888475cf32037f9810454", "score": "0.36529294", "text": "func (b *ServerStorageVersionApplyConfiguration) WithServedVersions(values ...string) *ServerStorageVersionApplyConfiguration {\n\tfor i := range values {\n\t\tb.ServedVersions = append(b.ServedVersions, values[i])\n\t}\n\treturn b\n}", "title": "" }, { "docid": "14d98989704782864d9a79fd3e8566df", "score": "0.36521322", "text": "func WriteConfig() error {\n\t// We recreate a new viper instance, as globalViper.WriteConfig()\n\t// writes both default values and set values back to disk while we only\n\t// want the latter to be written\n\tv := viper.New()\n\tv.SetConfigFile(constants.ConfigPath)\n\tv.SetConfigType(\"json\")\n\terr := syncViperState(v)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn v.WriteConfig()\n}", "title": "" }, { "docid": "5a0497bc40d014bb192dc9e6981e4e32", "score": "0.36491084", "text": "func (cfg *Config) WriteToDisk() error {\n\tenc, err := yaml.Marshal(&cfg)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = os.WriteFile(cfg.FileUsed, enc, 0600)\n\treturn err\n}", "title": "" }, { "docid": "880e9a21f58faad0d3261e996d546717", "score": "0.36431402", "text": "func (c Config) SaveConfig(path string) {\n\tdata, err := json.MarshalIndent(c, \"\", \" \")\n\tif err != nil {\n\t\tlogrus.Fatalf(\"couldn't marshal config, %s\", err)\n\t}\n\tif err := ioutil.WriteFile(path, data, 0644); err != nil {\n\t\tlogrus.Fatalf(\"cloudn't write config to %s, %s\", path, data)\n\t}\n}", "title": "" }, { "docid": "20b0a48337310721b1118734fb6cd831", "score": "0.36415258", "text": "func ProtoToGkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfig(p *alphapb.GkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfig) *alpha.FeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfig {\n\tif p == nil {\n\t\treturn nil\n\t}\n\tobj := &alpha.FeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfig{\n\t\tMode: ProtoToGkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfigModeEnum(p.GetMode()),\n\t}\n\treturn obj\n}", "title": "" }, { "docid": "e97830a2cb87a670fccaad14bb197126", "score": "0.36374837", "text": "func (this *HookFs) Serve() error {\n\tserver, err := newHookServer(this)\n\tif err != nil {\n\t\treturn err\n\t}\n\tserver.Serve()\n\treturn nil\n}", "title": "" }, { "docid": "6c28190e56762ad7a142768d32109ed9", "score": "0.3622498", "text": "func (c Configuration) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(map[string]interface{}{\n\t\t\"Address\": c.Address,\n\t\t\"static\": c.StaticPath,\n\t\t\"index\": c.IndexPath,\n\t\t\"Server\": map[string]interface{}{\n\t\t\t\"Addr\": c.Server.Addr,\n\t\t\t\"ReadHeaderTimeout\": c.Server.ReadHeaderTimeout,\n\t\t},\n\t\t\"cert\": c.Cert,\n\t\t\"close_time\": c.GracefulTimeout,\n\t\t\"basic_timeout\": c.BasicTimeout,\n\t\t\"file_timeout_rate\": c.FileTimeoutRate,\n\t\t\"max_file_timeout\": c.MaxFileTimeout,\n\t\t\"min_file_timeout\": c.MinFileTimeout,\n\t\t\"ActiveFileProcessing\": c.ActiveFileProcessing,\n\t\t\"db_type\": c.DatabaseType,\n\t\t\"db\": c.Database,\n\t\t\"db_clear\": c.DatabaseReset,\n\t\t\"tika\": c.Tika,\n\t\t\"gotenpath\": c.GotenPath,\n\t\t\"filelimit\": c.FileLimit,\n\t\t\"total_free_space\": c.FreeSpace,\n\t\t\"maxfilecount\": c.MaxFileCount,\n\t\t\"AdminKey\": c.AdminKey,\n\t\t\"GuestUser\": c.GuestUser,\n\t\t\"SetupTimeout\": c.SetupTimeout,\n\t\t\"UserTimeouts\": c.UserTimeouts,\n\t\t\"Email\": c.Email,\n\t\t\"error_email\": c.ErrorEmail,\n\t\t\"log_path\": c.LogPath,\n\t\t\"PrivateMode\": c.PrivateMode,\n\t})\n}", "title": "" }, { "docid": "9fee3e07310250eb3600e86e9c94f536", "score": "0.36096743", "text": "func marshalConfig() config {\n\n\t// Grab the settings.json file\n\tconfigString, err := ioutil.ReadFile(\"config.json\")\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\tos.Exit(1)\n\t}\n\t// Marshal it into a setting object\n\tvar c config\n\terr2 := json.Unmarshal(configString, &c)\n\tif err2 != nil {\n\t\tfmt.Println(err2.Error())\n\t}\n\n\treturn c\n}", "title": "" }, { "docid": "50a1da866f15cf2453daec003f09bce4", "score": "0.3603731", "text": "func WriteJSONConfig(a *Accessor, f string) error {\n\tif content, err := json.MarshalIndent(a.JSONData, \"\", \" \"); err != nil {\n\t\treturn err\n\t} else if err = ioutil.WriteFile(f, content, 0644); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n\n}", "title": "" }, { "docid": "1a0db3bbfec00a48f8a53fabf4b35974", "score": "0.3596671", "text": "func (f *fileBackend) LoadConfig(o Options) error {\n\tif o.DefaultConfig == nil {\n\t\t//this should not be happen\n\t\tpanic(\"default config can not be nil\")\n\t}\n\tu, err := url.Parse(o.URL)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif f.path == \"\" {\n\t\tf.path = u.Host + u.Path\n\t}\n\tf.instance = o.DefaultConfig\n\tf.onLoaded = o.OnLoaded\n\n\tcfg := o.DefaultConfig\n\text := filepath.Ext(f.path)\n\tbytes, err := ioutil.ReadFile(f.path)\n\tif err != nil {\n\t\tif !os.IsNotExist(err) {\n\t\t\treturn err\n\t\t}\n\t\tfileDir := filepath.Dir(f.path)\n\t\tif _, pathStatErr := os.Stat(fileDir); pathStatErr != nil {\n\t\t\tif !os.IsNotExist(pathStatErr) {\n\t\t\t\treturn fmt.Errorf(\"try to open file error %s, try to stat dir %s, error %s\", err, fileDir, pathStatErr)\n\t\t\t}\n\t\t\tif mkdirError := os.MkdirAll(fileDir, os.FileMode(0700)); mkdirError != nil {\n\t\t\t\treturn fmt.Errorf(\"try to open file %s, try to mkdir %s, error %s\", err, f.path, mkdirError)\n\t\t\t}\n\t\t}\n\t\tif writeErr := ioutil.WriteFile(f.path, marshal(cfg, ext), os.FileMode(0700)); writeErr != nil {\n\t\t\treturn fmt.Errorf(\"try to open file %s, try to write default config config file rror %s\", err, writeErr)\n\t\t}\n\t\to.OnLoaded(cfg)\n\t\treturn nil\n\t}\n\terr = unmarshal(bytes, cfg, ext)\n\tif err != nil {\n\t\treturn err\n\t}\n\tprefixKeys := GetPrefixKeys(f.path, o.DefaultConfig)\n\tif o.Watch && !f.loaded {\n\t\tgo f.watch(context.Background(), f.path, prefixKeys)\n\t}\n\tf.loaded = true\n\to.OnLoaded(cfg)\n\treturn nil\n}", "title": "" }, { "docid": "3cbd46698459d60b936484b282217e54", "score": "0.35954148", "text": "func saveToConfigFile(config jsonConfigurable, filename string) error {\n\tdata, err := json.MarshalIndent(&config, \"\", \" \")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot marshal config data: %s\", err.Error())\n\t}\n\n\tf, err := ioutil.TempFile(filepath.Dir(filename), \"config\")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot create empty config file %s: %s\", filename, err.Error())\n\t}\n\tdefer os.Remove(f.Name())\n\tdefer f.Close()\n\n\t_, err = f.Write(data)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"default config file: %s could not be written: %s\", filename, err.Error())\n\t}\n\n\t_, err = f.WriteString(\"\\n\")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"default config file: %s could not be written: %s\", filename, err.Error())\n\t}\n\n\tf.Close()\n\tos.Rename(f.Name(), filename)\n\n\treturn nil\n}", "title": "" }, { "docid": "3137af9471d5a8aaeed5eaaaf2222424", "score": "0.35939378", "text": "func (o *MapIngredientsToGroceryProductsRequest) SetServings(v float32) {\n\to.Servings = v\n}", "title": "" }, { "docid": "41d6e616fcfb3412aff5641e405e3c78", "score": "0.35935184", "text": "func Serve(l net.Listener, cfg *config.Config) (*Server, error) {\n\tif cfg.HomeDir == \"\" {\n\t\tcfg.HomeDir = config.DefaultHomeDir\n\t}\n\tif cfg.Gossip == nil {\n\t\treturn nil, errors.New(\"Gossip configuration cannot be nil\")\n\t}\n\tif cfg.Identity == nil {\n\t\treturn nil, errors.New(\"Identity configuration cannot be nil\")\n\t}\n\terr := validateGossipConfig(cfg.Gossip)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsrv := &Server{cfg: cfg}\n\n\tsrv.chainFilePath = filepath.Join(srv.cfg.HomeDir, \"channels\")\n\tif s, err := os.Stat(srv.chainFilePath); err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\tif err := os.MkdirAll(srv.chainFilePath, 0755); err != nil {\n\t\t\t\treturn nil, errors.Errorf(\"Could not create chain file path: %s\", err)\n\t\t\t}\n\t\t} else {\n\t\t\treturn nil, errors.Errorf(\"Could not stat chain file path: %s\", err)\n\t\t}\n\t} else if !s.IsDir() {\n\t\treturn nil, errors.Errorf(\"RKSync chain file path exists but not a dir: %s\", srv.chainFilePath)\n\t}\n\n\tif srv.cfg.Server == nil {\n\t\tsrv.cfg.Server = &config.ServerConfig{\n\t\t\tSecOpts: &config.TLSConfig{},\n\t\t\tKaOpts: &config.KeepaliveConfig{},\n\t\t}\n\t}\n\n\tif cfg.Server.SecOpts.UseTLS {\n\t\tsrv.clientCreds, err = clientTransportCredentials(srv.cfg)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Errorf(\"Failed to set TLS client certificate (%s)\", err)\n\t\t}\n\t}\n\n\tsrv.selfIdentity, err = serializeIdentity(cfg.Identity, cfg.HomeDir)\n\tif err != nil {\n\t\treturn nil, errors.Errorf(\"Failed serializing self identity: %v\", err)\n\t}\n\n\tgrpcServer, err := server.NewGRPCServerFromListener(l, srv.cfg.Server)\n\tif err != nil {\n\t\tlogging.Errorf(\"Failed to create grpc server (%s)\", err)\n\t\treturn nil, err\n\t}\n\n\tsrv.gossip, err = gossip.NewGossipService(srv.cfg.Gossip, srv.cfg.Identity, grpcServer.Server(), srv.selfIdentity, func() []grpc.DialOption {\n\t\treturn srv.secureDialOpts(srv.cfg.Server)\n\t})\n\tif err != nil {\n\t\treturn nil, errors.Errorf(\"Failed creating RKSync service (%s)\", err)\n\t}\n\n\tgo func() {\n\t\tif err := grpcServer.Start(); err != nil {\n\t\t\tlogging.Errorf(\"grpc server exited with error: %s\", err)\n\t\t}\n\t}()\n\n\tgo srv.initializeChannel()\n\treturn srv, nil\n}", "title": "" }, { "docid": "51941478d31762a98fdc9399f482d36d", "score": "0.35873052", "text": "func WithConfigsFolder(f string) func(k *Kustomize) {\n\treturn func(k *Kustomize) {\n\t\tk.configsFolder = f\n\t}\n}", "title": "" }, { "docid": "94053ce91a560e02da45fab6d9d696ea", "score": "0.3584933", "text": "func (in *FilesystemVirtiofs) DeepCopy() *FilesystemVirtiofs {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(FilesystemVirtiofs)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "title": "" }, { "docid": "e3a7785baac8479b0c0eca7c80aaf9cc", "score": "0.35811836", "text": "func (s *Server) SetServingStatus(service string, status healthpb.HealthCheckResponse_ServingStatus) {\n\ts.mu.Lock()\n\ts.statusMap[service] = status\n\ts.mu.Unlock()\n}", "title": "" }, { "docid": "4b7ad4daf577458fe4f5ebcaedfd3c8e", "score": "0.35766444", "text": "func (client Client) ListFilesystemSender(req *http.Request) (*http.Response, error) {\n\tsd := autorest.GetSendDecorators(req.Context(), autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))\n\treturn autorest.SendWithSender(client, req, sd...)\n}", "title": "" }, { "docid": "f12da9e506d3a88842b9bc34f9790ffc", "score": "0.3573348", "text": "func writeToFile(config runtime.Object, configPath ...string) error {\n\tfPath := PathFromEnv()\n\tif configPath != nil {\n\t\tfPath = configPath[0]\n\t}\n\n\tif config == nil {\n\t\tklog.Errorf(\"could not write to '%s': config can't be nil\", fPath)\n\t}\n\n\t// encode config to YAML\n\tdata, err := runtime.Encode(latest.Codec, config)\n\tif err != nil {\n\t\treturn errors.Errorf(\"could not write to '%s': failed to encode config: %v\", fPath, err)\n\t}\n\n\t// create parent dir if doesn't exist\n\tdir := filepath.Dir(fPath)\n\tif _, err := os.Stat(dir); os.IsNotExist(err) {\n\t\tif err = os.MkdirAll(dir, 0755); err != nil {\n\t\t\treturn errors.Wrapf(err, \"Error creating directory: %s\", dir)\n\t\t}\n\t}\n\n\t// write with restricted permissions\n\tif err := lock.WriteFile(fPath, data, 0600); err != nil {\n\t\treturn errors.Wrapf(err, \"Error writing file %s\", fPath)\n\t}\n\n\tif err := pkgutil.MaybeChownDirRecursiveToMinikubeUser(dir); err != nil {\n\t\treturn errors.Wrapf(err, \"Error recursively changing ownership for dir: %s\", dir)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "e2d499e983fa3b07b2e7350edd908e94", "score": "0.35703102", "text": "func generateFileConfig(c *Config) {\n\tviper.SetConfigFile(\"./config.json\")\n\tviper.AutomaticEnv()\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Printf(\"Error reading config file, %s\\n\", err)\n\t\tlog.Panicln(\"could not read the file\")\n\t}\n\t// Confirm which config file is used\n\t//fmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\n\terr := viper.Unmarshal(c)\n\tif err != nil {\n\t\tlog.Info(\"could not unmarshall %v:\", err)\n\n\t}\n}", "title": "" }, { "docid": "8147566da482883d0e6b3974e124c76d", "score": "0.35689667", "text": "func (o ServingConfigOutput) TrailingSlashBehavior() ServingConfigTrailingSlashBehaviorPtrOutput {\n\treturn o.ApplyT(func(v ServingConfig) *ServingConfigTrailingSlashBehavior { return v.TrailingSlashBehavior }).(ServingConfigTrailingSlashBehaviorPtrOutput)\n}", "title": "" }, { "docid": "73c96cced3c279086829859cf313abbf", "score": "0.35680097", "text": "func (o *AnalyzeRecipeRequest1) SetServings(v int32) {\n\to.Servings = &v\n}", "title": "" }, { "docid": "59ee07a6415919361a33844e56c06b06", "score": "0.35629833", "text": "func (s *federationConfigLister) FederationConfigs(namespace string) FederationConfigNamespaceLister {\n\treturn federationConfigNamespaceLister{indexer: s.indexer, namespace: namespace}\n}", "title": "" }, { "docid": "0791bec38e13ae052c98becfdedf41b7", "score": "0.3562068", "text": "func Convert_webhook_DynamicServingConfig_To_v1alpha1_DynamicServingConfig(in *webhook.DynamicServingConfig, out *v1alpha1.DynamicServingConfig, s conversion.Scope) error {\n\treturn autoConvert_webhook_DynamicServingConfig_To_v1alpha1_DynamicServingConfig(in, out, s)\n}", "title": "" }, { "docid": "4330b675ed5aad0bbfa25a1fc6cb3c07", "score": "0.35595542", "text": "func (this *Server) WithPersistentConfig(fn func(*Config) error) error {\n\tconfigLock.Lock()\n\tdefer configLock.Unlock()\n\n\tcfg, err := this.getConfig(false)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = fn(cfg)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = this.writeConfig(cfg)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2b17a4115f0cf29e7aaccdc7f5979127", "score": "0.35592383", "text": "func WriteConfig(params *options.ParameterBag) (err error) {\n\tconfigLocation := getConfigLocation()\n\n\tconfigDir := filepath.Dir(configLocation)\n\tif _, e := os.Stat(configDir); os.IsNotExist(e) {\n\t\terr = os.MkdirAll(configDir, 0755)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tconfigToWrite := map[string]interface{}{\n\t\tServerURL: params.ReadString(ServerURL, \"\"),\n\t\tToken: params.ReadString(Token, \"\"),\n\t}\n\n\terr = DeleteConfig()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfileToWrite, err := os.OpenFile(configLocation, os.O_CREATE|os.O_RDWR, 0600)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tencoder := json.NewEncoder(fileToWrite)\n\terr = encoder.Encode(configToWrite)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlogrus.Infof(\"created config at %s\", configLocation)\n\n\treturn nil\n}", "title": "" }, { "docid": "c11fac825e4bdc37f51555ccb44c805d", "score": "0.35579184", "text": "func (this *Server) WithConfig(fn func(*Config) error) error {\n\tcfg, err := this.getConfig(true)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = fn(cfg)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "08548dc9f8bca2b75284fa9ba0f8583f", "score": "0.35556105", "text": "func (f *Folders) watchFSNotify() {\n\tfor {\n\t\tselect {\n\t\tcase err, ok := <-f.Watcher.Errors:\n\t\t\tif !ok {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tf.Printf(\"[ERROR] fsnotify: %v\", err)\n\t\tcase event, ok := <-f.Watcher.Events:\n\t\t\tif !ok {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tif strings.HasSuffix(event.Name, suffix) {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\t// Send this event to processEvent().\n\t\t\tfor _, cnfg := range f.Config {\n\t\t\t\t// cnfg.Path: \"/Users/Documents/watched_folder\"\n\t\t\t\t// event.Name: \"/Users/Documents/watched_folder/new_folder/file.rar\"\n\t\t\t\t// eventData.name: \"new_folder\"\n\t\t\t\tif !strings.HasPrefix(event.Name, cnfg.Path) {\n\t\t\t\t\tcontinue // Not the configured folder for the event we just got.\n\t\t\t\t} else if p := filepath.Dir(event.Name); p == cnfg.Path {\n\t\t\t\t\tf.Events <- &eventData{name: filepath.Base(event.Name), cnfg: cnfg, file: event.Name}\n\t\t\t\t} else {\n\t\t\t\t\tf.Events <- &eventData{name: filepath.Base(p), cnfg: cnfg, file: event.Name}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "9348477d088e5432e1f09833235d3a4a", "score": "0.35543326", "text": "func ReadInConfigFS(configFile string, fs afero.Fs) (*Config, error) {\n\tc, err := readInConfig(configFile, fs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tc1, err := setupSecrets(c, fs)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"%w: %s\", err, c.SecretsFile)\n\t}\n\treturn c1, err\n}", "title": "" }, { "docid": "0896b81c83f14396c94e8bd3c36dfd43", "score": "0.35464826", "text": "func (settings *FileSettings) ToStruct(jsonData string) error {\n\tjsonMap := []FileSetting{}\n\terr := json.Unmarshal([]byte(jsonData), &jsonMap)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*settings = jsonMap\n\treturn nil\n}", "title": "" }, { "docid": "c0bffd0df353a8a50e4597ee08476a0e", "score": "0.35436323", "text": "func (teamsCfg *TeamsConfig) configHandler(w http.ResponseWriter, r *http.Request) {\n\tb, err := json.MarshalIndent(teamsCfg.Connectors, \"\", \" \")\n\tif err != nil {\n\t\tlog.Errorf(\"Failed unmarshalling Teams Connectors config: %v\", err)\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tfmt.Fprintf(w, string(b))\n}", "title": "" }, { "docid": "fc4c35ec9f9a15e05fcce4bd86f41ac2", "score": "0.35430515", "text": "func SetConfiguration(f string) (Configuration, error) {\n\tc := Configuration{}\n\tfilePath := \"\"\n\n\tfilePath, err := filepath.Abs(f)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\tif c.File == \"\" {\n\t\tc.File = filePath\n\t}\n\tcontent, err := ioutil.ReadFile(c.File)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\terr = yaml.Unmarshal(content, &c)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\n\tif c.Payload.Timeout == 0 {\n\t\tc.Payload.Timeout = 5\n\t}\n\tif c.LogLevel == 0 {\n\t\tlog.SetLevel(4)\n\t\tc.LogLevel = log.GetLevel()\n\t} else {\n\t\tlog.SetLevel(c.LogLevel)\n\t}\n\tif c.Payload.Host == \"\" {\n\t\treturn c, errors.New(\"Host is empty. [Check Payload]\")\n\t}\n\tif c.Payload.Token == \"\" {\n\t\treturn c, errors.New(\"Splunk token is empty. [Check Payload]\")\n\t}\n\tif c.Payload.Port == 0 {\n\t\tc.Payload.Port = 8088\n\t}\n\tif c.Payload.Proto == \"\" {\n\t\tc.Payload.Proto = \"https\"\n\t}\n\tif c.Payload.Proto != \"http\" && c.Payload.Proto != \"https\" {\n\t\treturn c, errors.New(\"Splunk url protocal is unknown. [Check Payload]\")\n\t}\n\tif c.Payload.Endpoints.Health == \"\" {\n\t\treturn c, errors.New(\"Splunk Health API is empty. [Check Payload]\")\n\t}\n\tif c.Payload.Endpoints.Raw == \"\" {\n\t\treturn c, errors.New(\"Splunk API Raw is empty . [Check Payload]\")\n\t}\n\treturn c, nil\n}", "title": "" }, { "docid": "77f22bd78161ef5703a9bfaa59cdf850", "score": "0.35370487", "text": "func Serve(f PluginFactory) {\n\tlogger := log.New(&log.LoggerOptions{\n\t\tLevel: log.Trace,\n\t\tJSONFormat: true,\n\t})\n\n\tplugin := f(logger)\n\tserve(plugin, logger)\n}", "title": "" }, { "docid": "7c869b5126d904bf478c4171224c4ccb", "score": "0.35333115", "text": "func Serve(cfg *config.Config) {\n\n\tstore, err := store.New(cfg)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\tdefer store.Close()\n\n\tmailer, err := mailer.New(cfg)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\tcache := cache.New(cfg)\n\n\tfeedparser := feedparser.New()\n\n\tenv := &api.Env{\n\t\tStore: store,\n\t\tCache: cache,\n\t\tMailer: mailer,\n\t\tConfig: cfg,\n\t\tFeedparser: feedparser,\n\t}\n\n\tif err := api.Run(env); err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n}", "title": "" }, { "docid": "de879b84deee6a6948bddc1d98656231", "score": "0.35324696", "text": "func (s HomeJSONConfigStore) Put(cfg Config) error {\n\tpath, err := localConfigPath()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := os.MkdirAll(path, 0o774); err != nil {\n\t\treturn err\n\t}\n\n\tcfgBytes, err := json.Marshal(cfg)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn atomicfile.WriteFile(filepath.Join(path, configFile), cfgBytes, 0o774)\n}", "title": "" }, { "docid": "562d34f6e25c5831d5158f114d7f1a91", "score": "0.3530695", "text": "func oldMarshalConfig(config *kotsv1beta1.Config) (string, error) {\n\ts := json.NewYAMLSerializer(json.DefaultMetaFactory, scheme.Scheme, scheme.Scheme)\n\n\tvar marshalled bytes.Buffer\n\tif err := s.Encode(config, &marshalled); err != nil {\n\t\treturn \"\", errors.Wrap(err, \"failed to marshal config\")\n\t}\n\treturn string(marshalled.Bytes()), nil\n}", "title": "" }, { "docid": "15f625da3da66311c6451c3168943006", "score": "0.3524821", "text": "func GenerateFilteringConfigMap(instance *sfv1alpha1.SplunkForwarder, namespacedName types.NamespacedName) *corev1.ConfigMap {\n\tvar data = map[string]string{}\n\tdata[\"local.meta\"] = `\n[]\naccess = read : [ * ], write : [ admin ]\nexport = system\n`\n\tdata[\"inputs.conf\"] = `\n[splunktcp]\nroute = has_key:_replicationBucketUUID:replicationQueue;has_key:_dstrx:typingQueue;has_key:_linebreaker:typingQueue;absent_key:_linebreaker:parsingQueue\n\n[splunktcp://:9997]\nconnection_host = dns\n`\n\n\tdata[\"limits.conf\"] = `\n[thruput]\nmaxKBps = 0\n`\n\n\tdata[\"props.conf\"] = fmt.Sprintf(`\n[_json]\nTRUNCATE = %d\n`, MaxEventSize)\n\n\tif len(instance.Spec.Filters) > 0 {\n\t\tdata[\"transforms.conf\"] = \"\"\n\n\t\tdata[\"props.conf\"] += \"TRANSFORMS-null =\"\n\n\t\tfor _, filter := range instance.Spec.Filters {\n\t\t\tdata[\"transforms.conf\"] += \"[filter_\" + filter.Name + \"]\\n\"\n\t\t\tdata[\"transforms.conf\"] += \"DEST_KEY = queue\\n\"\n\t\t\tdata[\"transforms.conf\"] += \"FORMAT = nullQueue\\n\"\n\t\t\tdata[\"transforms.conf\"] += \"REGEX = \" + filter.Filter + \"\\n\\n\"\n\t\t\tdata[\"props.conf\"] += \"filter_\" + filter.Name + \" \"\n\t\t}\n\t}\n\n\tret := &corev1.ConfigMap{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: instance.Name + \"-hfconfig\",\n\t\t\tNamespace: namespacedName.Namespace,\n\t\t\tLabels: map[string]string{\n\t\t\t\t\"app\": namespacedName.Name,\n\t\t\t},\n\t\t\tAnnotations: map[string]string{\n\t\t\t\t\"genVersion\": strconv.FormatInt(instance.Generation, 10),\n\t\t\t},\n\t\t},\n\t\tData: data,\n\t}\n\n\treturn ret\n}", "title": "" }, { "docid": "2981ec9b4072d1d18303e21e4737cae2", "score": "0.35233027", "text": "func WriteConfigToDisk(cfg *kubeadmapi.ClusterConfiguration, kubeletDir, patchesDir string, output io.Writer) error {\n\tkubeletCfg, ok := cfg.ComponentConfigs[componentconfigs.KubeletGroup]\n\tif !ok {\n\t\treturn errors.New(\"no kubelet component config found\")\n\t}\n\n\tif err := kubeletCfg.Mutate(); err != nil {\n\t\treturn err\n\t}\n\n\tkubeletBytes, err := kubeletCfg.Marshal()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Apply patches to the KubeletConfiguration\n\tif len(patchesDir) != 0 {\n\t\tkubeletBytes, err = applyKubeletConfigPatches(kubeletBytes, patchesDir, output)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"could not apply patches to the KubeletConfiguration\")\n\t\t}\n\t}\n\n\treturn writeConfigBytesToDisk(kubeletBytes, kubeletDir)\n}", "title": "" }, { "docid": "c14efd6fb167b88a7d401abba1fa9fa4", "score": "0.35228142", "text": "func ValidatingWebhook(mgr manager.Manager) (webhook.Webhook, error) {\n\tcommon.Log.Info(\"Setting up validating webhook for KnativeServing\")\n\treturn builder.NewWebhookBuilder().\n\t\tName(\"validating.knativeserving.openshift.io\").\n\t\tValidating().\n\t\tOperations(admissionregistrationv1beta1.Create, admissionregistrationv1beta1.Update).\n\t\tWithManager(mgr).\n\t\tForType(&servingv1alpha1.KnativeServing{}).\n\t\tHandlers(&KnativeServingValidator{}).\n\t\tBuild()\n}", "title": "" }, { "docid": "d244934419de8b042cd2c71e79cb40de", "score": "0.3519058", "text": "func WithAgentConfig(storageForDriver agent.StorageForDriver, options agent.Options) func(f dependentHandler) handlerutil.WithParams {\n\treturn func(f dependentHandler) handlerutil.WithParams {\n\t\treturn func(w http.ResponseWriter, req *http.Request, params handlerutil.Params) {\n\t\t\tnamespace := params[namespaceParam]\n\t\t\ttoken := auth.ExtractToken(req.Header.Get(authHeader))\n\t\t\trestConfig, err := NewInClusterConfig(token)\n\t\t\tif err != nil {\n\t\t\t\t// TODO log details rather than return potentially sensitive details in error.\n\t\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tkubeClient, err := kubernetes.NewForConfig(restConfig)\n\t\t\tif err != nil {\n\t\t\t\t// TODO log details rather than return potentially sensitive details in error.\n\t\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tappRepoClient, err := appRepo.NewForConfig(restConfig)\n\t\t\tif err != nil {\n\t\t\t\t// TODO log details rather than return potentially sensitive details in error.\n\t\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tactionConfig, err := agent.NewActionConfig(storageForDriver, restConfig, kubeClient, namespace)\n\t\t\tif err != nil {\n\t\t\t\t// TODO log details rather than return potentially sensitive details in error.\n\t\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tcfg := agent.Config{\n\t\t\t\tAgentOptions: options,\n\t\t\t\tActionConfig: actionConfig,\n\t\t\t\tChartClient: chartUtils.NewChartClient(kubeClient, appRepoClient, options.UserAgent),\n\t\t\t}\n\t\t\tf(cfg, w, req, params)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "44d4a9faf5fe273ab4609f148b761f33", "score": "0.35187325", "text": "func (configFile *ConfigFile) SaveToWriter(writer io.Writer) error {\n\tdata, err := json.MarshalIndent(configFile, \"\", \"\\t\")\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = writer.Write(data)\n\treturn err\n}", "title": "" }, { "docid": "73bd698d28fbaff85bec3edaa891360b", "score": "0.3516281", "text": "func GkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfigToProto(o *alpha.FeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfig) *alphapb.GkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfig {\n\tif o == nil {\n\t\treturn nil\n\t}\n\tp := &alphapb.GkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfig{}\n\tp.SetMode(GkehubAlphaFeatureSpecFleetobservabilityLoggingConfigFleetScopeLogsConfigModeEnumToProto(o.Mode))\n\treturn p\n}", "title": "" }, { "docid": "18188058dbe67e160fa7bc406e35ab69", "score": "0.3510122", "text": "func (a App) WriteConfig() error {\n\tif a.AppData == nil {\n\t\treturn nil\n\t}\n\tdata, err := a.AppData.ConfigData()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn WriteFile(filepath.Join(a.RootDir, a.Config.String()), 0666, bytes.NewReader(data))\n}", "title": "" }, { "docid": "b31223aa64877b6b2029dfadcd5472c5", "score": "0.3505839", "text": "func NewFs(ctx context.Context, name, root string, m configmap.Mapper) (fs.Fs, error) {\n\t// pacer is not used in NewFs()\n\t_mapper = m\n\n\t// Parse config into Options struct\n\topt := new(Options)\n\terr := configstruct.Set(m, opt)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif opt.Password != \"\" {\n\t\tvar err error\n\t\topt.Password, err = obscure.Reveal(opt.Password)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"couldn't decrypt password: %w\", err)\n\t\t}\n\t}\n\n\tci := fs.GetConfig(ctx)\n\n\troot = strings.Trim(root, \"/\")\n\n\tf := &Fs{\n\t\tname: name,\n\t\troot: root,\n\t\topt: *opt,\n\t\tci: ci,\n\t\tpacer: fs.NewPacer(ctx, pacer.NewDefault(pacer.MinSleep(minSleep), pacer.MaxSleep(maxSleep), pacer.DecayConstant(decayConstant))),\n\t}\n\n\tf.features = (&fs.Features{\n\t\tReadMimeType: true,\n\t\tCanHaveEmptyDirectories: true,\n\t\t/* can't have multiple threads downloading\n\t\tThe raw file is split into equally-sized (currently 4MB, but it might change in the future, say to 8MB, 16MB, etc.) blocks, except the last one which might be smaller than 4MB.\n\t\tEach block is encrypted separately, where the size and sha1 after the encryption is performed on the block is added to the metadata of the block, but the original block size and sha1 is not in the metadata.\n\t\tWe can make assumption and implement the chunker, but for now, we would rather be safe about it, and let the block being concurrently downloaded and decrypted in the background, to speed up the download operation!\n\t\t*/\n\t\tNoMultiThreading: true,\n\t}).Fill(ctx, f)\n\n\tprotonDrive, err := newProtonDrive(ctx, f, opt, m)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tf.protonDrive = protonDrive\n\n\troot = f.sanitizePath(root)\n\tf.dirCache = dircache.New(\n\t\troot, /* root folder path */\n\t\tprotonDrive.MainShare.LinkID, /* real root ID is the root folder, since we can't go past this folder */\n\t\tf,\n\t)\n\terr = f.dirCache.FindRoot(ctx, false)\n\tif err != nil {\n\t\t// if the root directory is not found, the initialization will still work\n\t\t// but if it's other kinds of error, then we raise it\n\t\tif err != fs.ErrorDirNotFound {\n\t\t\treturn nil, fmt.Errorf(\"couldn't initialize a new root remote: %w\", err)\n\t\t}\n\n\t\t// Assume it is a file (taken and modified from box.go)\n\t\tnewRoot, remote := dircache.SplitPath(root)\n\t\ttempF := *f\n\t\ttempF.dirCache = dircache.New(newRoot, protonDrive.MainShare.LinkID, &tempF)\n\t\ttempF.root = newRoot\n\t\t// Make new Fs which is the parent\n\t\terr = tempF.dirCache.FindRoot(ctx, false)\n\t\tif err != nil {\n\t\t\t// No root so return old f\n\t\t\treturn f, nil\n\t\t}\n\t\t_, err := tempF.newObjectWithLink(ctx, remote, nil)\n\t\tif err != nil {\n\t\t\tif err == fs.ErrorObjectNotFound {\n\t\t\t\t// File doesn't exist so return old f\n\t\t\t\treturn f, nil\n\t\t\t}\n\t\t\treturn nil, err\n\t\t}\n\t\tf.features.Fill(ctx, &tempF)\n\t\t// XXX: update the old f here instead of returning tempF, since\n\t\t// `features` were already filled with functions having *f as a receiver.\n\t\t// See https://github.com/rclone/rclone/issues/2182\n\t\tf.dirCache = tempF.dirCache\n\t\tf.root = tempF.root\n\t\t// return an error with an fs which points to the parent\n\t\treturn f, fs.ErrorIsFile\n\t}\n\n\treturn f, nil\n}", "title": "" }, { "docid": "c2cf626976465f3c3af2465d04ca5a89", "score": "0.3499126", "text": "func NewConfigHandlerWrapper(service micro.Service) server.HandlerWrapper {\n\treturn func(h server.HandlerFunc) server.HandlerFunc {\n\t\treturn func(ctx context.Context, req server.Request, rsp interface{}) error {\n\t\t\tconfigMap := servicecontext.GetConfig(service.Options().Context)\n\t\t\tctx = servicecontext.WithConfig(ctx, configMap)\n\t\t\treturn h(ctx, req, rsp)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "01f4a756467bf7abd188c9eb6b20abe4", "score": "0.34936884", "text": "func WithConfig(val string) Option {\n\treturn func(c *config) {\n\t\tc.file = val\n\t}\n}", "title": "" }, { "docid": "8476b79621d769cb0303713e618f4690", "score": "0.34927553", "text": "func RegisterFS(fs http.FileSystem) {\n\tFS = fs\n}", "title": "" }, { "docid": "fd22880db6d4f317557375deddc65af6", "score": "0.34899476", "text": "func (t *VyosConfigTree) Setf(f string, args ...interface{}) bool {\n\tif args != nil {\n\t\treturn t.Set(fmt.Sprintf(f, args...))\n\t} else {\n\t\treturn t.Set(f)\n\t}\n}", "title": "" }, { "docid": "eb99dcbde0a7d307a1044d1d77fc77c3", "score": "0.3488177", "text": "func NewFederationConfigLister(indexer cache.Indexer) FederationConfigLister {\n\treturn &federationConfigLister{indexer: indexer}\n}", "title": "" }, { "docid": "cbed91d653d37c7cc48d11d51db02c43", "score": "0.3485443", "text": "func Serve(target string, r io.Reader, w io.Writer) error {\n\tsConn := conn.NewService(r, w, 5*time.Second)\n\n\tfd, err := unix.FanotifyInit(unix.FAN_CLASS_NOTIF, unix.O_RDONLY)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"fanotify_init: %w\", err)\n\t}\n\n\t// This blocks until the client tells us to start monitoring the target mountpoint.\n\tif err := sConn.WaitStart(); err != nil {\n\t\treturn fmt.Errorf(\"waiting for start inst: %w\", err)\n\t}\n\n\t// Start monitoring the target mountpoint.\n\tif err := unix.FanotifyMark(fd,\n\t\tunix.FAN_MARK_ADD|unix.FAN_MARK_MOUNT,\n\t\tunix.FAN_ACCESS|unix.FAN_OPEN,\n\t\tunix.AT_FDCWD,\n\t\ttarget,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"fanotify_mark: %w\", err)\n\t}\n\n\t// Notify \"started\" state to the client.\n\tif err := sConn.SendStarted(); err != nil {\n\t\treturn fmt.Errorf(\"failed to send started message: %w\", err)\n\t}\n\n\tnr := bufio.NewReader(os.NewFile(uintptr(fd), \"\"))\n\tfor {\n\t\tevent := &unix.FanotifyEventMetadata{}\n\t\tif err := binary.Read(nr, binary.LittleEndian, event); err != nil {\n\t\t\tif err == io.EOF {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\treturn fmt.Errorf(\"read fanotify fd: %w\", err)\n\t\t}\n\t\tif event.Vers != unix.FANOTIFY_METADATA_VERSION {\n\t\t\treturn fmt.Errorf(\"Fanotify version mismatch %d(got) != %d(want)\",\n\t\t\t\tevent.Vers, unix.FANOTIFY_METADATA_VERSION)\n\t\t}\n\t\tif event.Fd < 0 {\n\t\t\t// queue overflow\n\t\t\t// TODO: do we need something special?\n\t\t\tfmt.Fprintf(os.Stderr, \"Warn: queue overflow\")\n\t\t\tcontinue\n\t\t}\n\n\t\t// Notify file descriptor.\n\t\t// NOTE: There is no guarantee that we have /proc in this mount namespace\n\t\t// (the target container's rootfs is mounted on \"/\") so we send file\n\t\t// descriptor and let the client resolve the path of this file using /proc of\n\t\t// this process.\n\t\tif err := sConn.SendFd(int(event.Fd)); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to send fd %d to client: %w\", fd, err)\n\t\t}\n\t\tif err := unix.Close(int(event.Fd)); err != nil {\n\t\t\treturn fmt.Errorf(\"Close(fd): %w\", err)\n\t\t}\n\n\t\tcontinue\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "e392621b350f82f8a185d70a9fbf26d7", "score": "0.3482046", "text": "func (v *ViperConf) Save() error {\n\tv.lock.Lock()\n\tdefer v.lock.Unlock()\n\n\tif err := os.MkdirAll(filepath.Dir(v.filename), os.ModePerm); err != nil {\n\t\treturn fmt.Errorf(\"unable to create directory: %w\", err)\n\t}\n\n\tif _, err := os.Create(v.filename); err != nil {\n\t\treturn fmt.Errorf(\"unable to create file: %w\", err)\n\t}\n\n\tif err := v.viper.WriteConfigAs(v.filename); err != nil {\n\t\treturn fmt.Errorf(\"unable to write out config: %w\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "661552bad996c7a23556b3b5b222dac4", "score": "0.34809646", "text": "func WithEventingService() Option {\n\treturn func(c *Config) {\n\t\tc.enabledServices = append(c.enabledServices, eventing)\n\t}\n}", "title": "" } ]
6a0fdb2e08d03c05c55df5cc30b5c635
RouteActivationState_Values returns all elements of the RouteActivationState enum
[ { "docid": "d7408dffb49d43f44a5275793c3cf018", "score": "0.86227524", "text": "func RouteActivationState_Values() []string {\n\treturn []string{\n\t\tRouteActivationStateActive,\n\t\tRouteActivationStateInactive,\n\t}\n}", "title": "" } ]
[ { "docid": "dbb4d7ab6fb4bde91c1a5c17dc21cd61", "score": "0.76713586", "text": "func RouteState_Values() []string {\n\treturn []string{\n\t\tRouteStateCreating,\n\t\tRouteStateActive,\n\t\tRouteStateDeleting,\n\t\tRouteStateFailed,\n\t\tRouteStateUpdating,\n\t\tRouteStateInactive,\n\t}\n}", "title": "" }, { "docid": "845a62b4cb2ee45d81740c1af4b2a4d2", "score": "0.6247476", "text": "func State_Values() []string {\n\treturn []string{\n\t\tStateActive,\n\t\tStateDeleted,\n\t}\n}", "title": "" }, { "docid": "9802a5d33eb45e5a49d46d52b271f5d6", "score": "0.6226029", "text": "func CEState_Values() []string {\n\treturn []string{\n\t\tCEStateEnabled,\n\t\tCEStateDisabled,\n\t}\n}", "title": "" }, { "docid": "65db52035c10f877b466700e1a6ccdef", "score": "0.61703724", "text": "func ApplicationState_Values() []string {\n\treturn []string{\n\t\tApplicationStateCreating,\n\t\tApplicationStateActive,\n\t\tApplicationStateDeleting,\n\t\tApplicationStateFailed,\n\t\tApplicationStateUpdating,\n\t}\n}", "title": "" }, { "docid": "1e911ebe261a4c826dd20c03ef12d1c7", "score": "0.61666095", "text": "func AuthPolicyState_Values() []string {\n\treturn []string{\n\t\tAuthPolicyStateActive,\n\t\tAuthPolicyStateInactive,\n\t}\n}", "title": "" }, { "docid": "710123479be11b958d3ecdcc24213a82", "score": "0.6132786", "text": "func StateValues() []State {\n\treturn _StateValues\n}", "title": "" }, { "docid": "f74046693a23e957b0efcebebdc50176", "score": "0.60887766", "text": "func (EventSourceState) Values() []EventSourceState {\n\treturn []EventSourceState{\n\t\t\"PENDING\",\n\t\t\"ACTIVE\",\n\t\t\"DELETED\",\n\t}\n}", "title": "" }, { "docid": "47c064ed769d9c3c3822677c05752077", "score": "0.6013934", "text": "func (CostEstimationServiceResourceState) Values() []CostEstimationServiceResourceState {\n\treturn []CostEstimationServiceResourceState{\n\t\t\"ACTIVE\",\n\t\t\"INACTIVE\",\n\t}\n}", "title": "" }, { "docid": "7f9606dee71c22a6cf172f62792c268f", "score": "0.6001001", "text": "func PossibleEffectiveRouteStateValues() []EffectiveRouteState {\n\treturn []EffectiveRouteState{\n\t\tEffectiveRouteStateActive,\n\t\tEffectiveRouteStateInvalid,\n\t}\n}", "title": "" }, { "docid": "3528898b5da366fd49f1b81c601985ed", "score": "0.5970399", "text": "func ServiceState_Values() []string {\n\treturn []string{\n\t\tServiceStateCreating,\n\t\tServiceStateActive,\n\t\tServiceStateDeleting,\n\t\tServiceStateFailed,\n\t}\n}", "title": "" }, { "docid": "63bb497581e0c70329d3786a229c0184", "score": "0.5959572", "text": "func (RouteType) Values() []RouteType {\n\treturn []RouteType{\n\t\t\"Transactional\",\n\t\t\"Promotional\",\n\t\t\"Premium\",\n\t}\n}", "title": "" }, { "docid": "dba205c09971f2e34cb9a656c2ef95ba", "score": "0.59567696", "text": "func ETLState_Values() []ETLState {\n\treturn []ETLState{\n\t\tETLStateOff,\n\t\tETLStateOn,\n\t\tETLStateReserved0,\n\t\tETLStateReserved1,\n\t}\n}", "title": "" }, { "docid": "138e5e833b338329c20d211cc23c0649", "score": "0.5944986", "text": "func (InstanceMetadataEndpointState) Values() []InstanceMetadataEndpointState {\n\treturn []InstanceMetadataEndpointState{\n\t\t\"disabled\",\n\t\t\"enabled\",\n\t}\n}", "title": "" }, { "docid": "68691e541a2c2f4e944723f20b7ca313", "score": "0.5918318", "text": "func ShipmentState_Values() []string {\n\treturn []string{\n\t\tShipmentStateReceived,\n\t\tShipmentStateReturned,\n\t}\n}", "title": "" }, { "docid": "565fdde50acf2b2514e9a6f35ac1c089", "score": "0.58863014", "text": "func (HsmState) Values() []HsmState {\n\treturn []HsmState{\n\t\t\"CREATE_IN_PROGRESS\",\n\t\t\"ACTIVE\",\n\t\t\"DEGRADED\",\n\t\t\"DELETE_IN_PROGRESS\",\n\t\t\"DELETED\",\n\t}\n}", "title": "" }, { "docid": "5a144e7b2ae7506afa17a3cdb4ce02a8", "score": "0.5825984", "text": "func EnvironmentState_Values() []string {\n\treturn []string{\n\t\tEnvironmentStateCreating,\n\t\tEnvironmentStateActive,\n\t\tEnvironmentStateDeleting,\n\t\tEnvironmentStateFailed,\n\t}\n}", "title": "" }, { "docid": "71d85896e2026fc58658f61655e9bbf4", "score": "0.5790802", "text": "func RouteType_Values() []string {\n\treturn []string{\n\t\tRouteTypeDefault,\n\t\tRouteTypeUriPath,\n\t}\n}", "title": "" }, { "docid": "7f692174b4894e513c49afbdfbdea93e", "score": "0.5788944", "text": "func ConfigurationState_Values() []string {\n\treturn []string{\n\t\tConfigurationStateActive,\n\t\tConfigurationStateDeleting,\n\t\tConfigurationStateDeleteFailed,\n\t}\n}", "title": "" }, { "docid": "73f018eb799948ef7969db93403ec6f8", "score": "0.5778573", "text": "func GetDrgRouteDistributionLifecycleStateEnumStringValues() []string {\n\treturn []string{\n\t\t\"PROVISIONING\",\n\t\t\"AVAILABLE\",\n\t\t\"TERMINATING\",\n\t\t\"TERMINATED\",\n\t}\n}", "title": "" }, { "docid": "5911d4b23ee8d93c87cc11a05ed77b48", "score": "0.5757704", "text": "func (RuleState) Values() []RuleState {\n\treturn []RuleState{\n\t\t\"ENABLED\",\n\t\t\"DISABLED\",\n\t}\n}", "title": "" }, { "docid": "05a160e7a1c30677319d828559cd0e2d", "score": "0.57351285", "text": "func (EnvironmentState) Values() []EnvironmentState {\n\treturn []EnvironmentState{\n\t\t\"READY_FOR_DEPLOYMENT\",\n\t\t\"DEPLOYING\",\n\t\t\"ROLLING_BACK\",\n\t\t\"ROLLED_BACK\",\n\t}\n}", "title": "" }, { "docid": "2cafbe430bf170e3843b6817df09d4cd", "score": "0.5729267", "text": "func (BackupState) Values() []BackupState {\n\treturn []BackupState{\n\t\t\"CREATE_IN_PROGRESS\",\n\t\t\"READY\",\n\t\t\"DELETED\",\n\t\t\"PENDING_DELETION\",\n\t}\n}", "title": "" }, { "docid": "3b894a48f081e4f9f5ec124c58c76b17", "score": "0.5700625", "text": "func PossibleStateValues() []State {\n\treturn []State{Activating, ActivationFailed, Active, Deleted, Deleting, DeletionFailed, FailingOver, FailoverFailed, Resuming, Suspended, Suspending, Transitioning}\n}", "title": "" }, { "docid": "262433883a01b39f0af2900de91e0612", "score": "0.5688622", "text": "func VerificationState_Values() []string {\n\treturn []string{\n\t\tVerificationStateFalsePositive,\n\t\tVerificationStateBenignPositive,\n\t\tVerificationStateTruePositive,\n\t\tVerificationStateUnknown,\n\t}\n}", "title": "" }, { "docid": "f509c7d086ccfbd5abf69a2ddf39febf", "score": "0.5653724", "text": "func (EndpointStatus) Values() []EndpointStatus {\n\treturn []EndpointStatus{\n\t\t\"PENDING\",\n\t\t\"AVAILABLE\",\n\t}\n}", "title": "" }, { "docid": "2050a54102f4ff688c586275cb7013ef", "score": "0.5637887", "text": "func (TrafficRoutingType) Values() []TrafficRoutingType {\n\treturn []TrafficRoutingType{\n\t\t\"TimeBasedCanary\",\n\t\t\"TimeBasedLinear\",\n\t\t\"AllAtOnce\",\n\t}\n}", "title": "" }, { "docid": "c36adce2eca4347e0532bbadc292fac6", "score": "0.5623975", "text": "func (ConfigurationState) Values() []ConfigurationState {\n\treturn []ConfigurationState{\n\t\t\"ACTIVE\",\n\t\t\"DELETING\",\n\t\t\"DELETE_FAILED\",\n\t}\n}", "title": "" }, { "docid": "b3610d04deafcdf5cc0e049fe947b456", "score": "0.5582539", "text": "func PossibleExpressRouteAuthorizationProvisioningStateValues() []ExpressRouteAuthorizationProvisioningState {\n\treturn []ExpressRouteAuthorizationProvisioningState{ExpressRouteAuthorizationProvisioningStateFailed, ExpressRouteAuthorizationProvisioningStateSucceeded, ExpressRouteAuthorizationProvisioningStateUpdating}\n}", "title": "" }, { "docid": "34ca721a69c55694a348f8d20aadcf8a", "score": "0.5563647", "text": "func PossibleStateValues() []State {\n\treturn []State{Active, ResolvedByPositiveResult, ResolvedByStateChange, ResolvedByTimer, ResolvedManually}\n}", "title": "" }, { "docid": "d84bab391e57e9c4c6530342f6e4bb76", "score": "0.5554506", "text": "func ModelEndpointStatus_Values() []string {\n\treturn []string{\n\t\tModelEndpointStatusAssociated,\n\t\tModelEndpointStatusDissociated,\n\t}\n}", "title": "" }, { "docid": "e4f91e278c9fb8737027fa52f4321367", "score": "0.55501205", "text": "func (DeploymentState) Values() []DeploymentState {\n\treturn []DeploymentState{\n\t\t\"BAKING\",\n\t\t\"VALIDATING\",\n\t\t\"DEPLOYING\",\n\t\t\"COMPLETE\",\n\t\t\"ROLLING_BACK\",\n\t\t\"ROLLED_BACK\",\n\t}\n}", "title": "" }, { "docid": "9404447013ca0c300ff400b1a2e418c4", "score": "0.5546994", "text": "func ClusterState_Values() []string {\n\treturn []string{\n\t\tClusterStateActive,\n\t\tClusterStateCreating,\n\t\tClusterStateDeleting,\n\t\tClusterStateFailed,\n\t\tClusterStateHealing,\n\t\tClusterStateMaintenance,\n\t\tClusterStateRebootingBroker,\n\t\tClusterStateUpdating,\n\t}\n}", "title": "" }, { "docid": "c067e4eac875d4a42243d1ce76318bf4", "score": "0.55277354", "text": "func FailbackState_Values() []string {\n\treturn []string{\n\t\tFailbackStateFailbackNotStarted,\n\t\tFailbackStateFailbackInProgress,\n\t\tFailbackStateFailbackReadyForLaunch,\n\t\tFailbackStateFailbackCompleted,\n\t\tFailbackStateFailbackError,\n\t\tFailbackStateFailbackNotReadyForLaunch,\n\t\tFailbackStateFailbackLaunchStateNotAvailable,\n\t}\n}", "title": "" }, { "docid": "8c64196b215195f832c50e731ae848f9", "score": "0.5527553", "text": "func PossibleEndpointResourceStateValues() []EndpointResourceState {\n\treturn []EndpointResourceState{\n\t\tEndpointResourceStateCreating,\n\t\tEndpointResourceStateDeleting,\n\t\tEndpointResourceStateRunning,\n\t\tEndpointResourceStateStarting,\n\t\tEndpointResourceStateStopped,\n\t\tEndpointResourceStateStopping,\n\t}\n}", "title": "" }, { "docid": "8022d7fb1aae29ad0c1ed16a1645fd3a", "score": "0.5524668", "text": "func (LifecycleState) Values() []LifecycleState {\n\treturn []LifecycleState{\n\t\t\"Pending\",\n\t\t\"Pending:Wait\",\n\t\t\"Pending:Proceed\",\n\t\t\"Quarantined\",\n\t\t\"InService\",\n\t\t\"Terminating\",\n\t\t\"Terminating:Wait\",\n\t\t\"Terminating:Proceed\",\n\t\t\"Terminated\",\n\t\t\"Detaching\",\n\t\t\"Detached\",\n\t\t\"EnteringStandby\",\n\t\t\"Standby\",\n\t\t\"Warmed:Pending\",\n\t\t\"Warmed:Pending:Wait\",\n\t\t\"Warmed:Pending:Proceed\",\n\t\t\"Warmed:Terminating\",\n\t\t\"Warmed:Terminating:Wait\",\n\t\t\"Warmed:Terminating:Proceed\",\n\t\t\"Warmed:Terminated\",\n\t\t\"Warmed:Stopped\",\n\t\t\"Warmed:Running\",\n\t\t\"Warmed:Hibernated\",\n\t}\n}", "title": "" }, { "docid": "38db6a082d314d74eb17a75e1a401dc8", "score": "0.5522823", "text": "func ClusterState_Values() []string {\n\treturn []string{\n\t\tClusterStateAwaitingQuorum,\n\t\tClusterStatePending,\n\t\tClusterStateInUse,\n\t\tClusterStateComplete,\n\t\tClusterStateCancelled,\n\t}\n}", "title": "" }, { "docid": "8d13f4ccb832622ca060d2e5cb44c3d7", "score": "0.54692125", "text": "func (StateValue) Values() []StateValue {\n\treturn []StateValue{\n\t\t\"OK\",\n\t\t\"ALARM\",\n\t\t\"INSUFFICIENT_DATA\",\n\t}\n}", "title": "" }, { "docid": "07516779198228f625d3a06128a15e27", "score": "0.54664147", "text": "func (AnomalyDetectorStateValue) Values() []AnomalyDetectorStateValue {\n\treturn []AnomalyDetectorStateValue{\n\t\t\"PENDING_TRAINING\",\n\t\t\"TRAINED_INSUFFICIENT_DATA\",\n\t\t\"TRAINED\",\n\t}\n}", "title": "" }, { "docid": "df013d43d76e2ae6506bb763ce0879f5", "score": "0.54598755", "text": "func GetDrgRouteDistributionLifecycleStateEnumValues() []DrgRouteDistributionLifecycleStateEnum {\n\tvalues := make([]DrgRouteDistributionLifecycleStateEnum, 0)\n\tfor _, v := range mappingDrgRouteDistributionLifecycleStateEnum {\n\t\tvalues = append(values, v)\n\t}\n\treturn values\n}", "title": "" }, { "docid": "eb50c65b7c75eee5c761cc11f12ed049", "score": "0.54562557", "text": "func DashboardUIState_Values() []string {\n\treturn []string{\n\t\tDashboardUIStateExpanded,\n\t\tDashboardUIStateCollapsed,\n\t}\n}", "title": "" }, { "docid": "e94878a4bb3fd147a8f5de8384b5f6b5", "score": "0.5439599", "text": "func PossibleStateValues() []State {\n\treturn []State{Active, Deleted}\n}", "title": "" }, { "docid": "91a49e29e6e295ce2e0eecd5c453970f", "score": "0.5437499", "text": "func (FreeTrialStatus) Values() []FreeTrialStatus {\n\treturn []FreeTrialStatus{\n\t\t\"ACTIVE\",\n\t\t\"INACTIVE\",\n\t}\n}", "title": "" }, { "docid": "e044ed3bca382df137486c335f284ff0", "score": "0.54318905", "text": "func (FutureState) Values() []string {\n\treturn []string{\n\t\tFutureStateInstall.String(),\n\t\tFutureStateRemove.String(),\n\t}\n}", "title": "" }, { "docid": "9aafd9d8ad9d36e1b4a7f5671cd18905", "score": "0.54300463", "text": "func AccessStatus_Values() []string {\n\treturn []string{\n\t\tAccessStatusEnabled,\n\t\tAccessStatusUnderChange,\n\t\tAccessStatusDisabled,\n\t}\n}", "title": "" }, { "docid": "48f7ca2aeb45e30a769fc7c6d637eabb", "score": "0.54300076", "text": "func (ListStateFilterAction) Values() []ListStateFilterAction {\n\treturn []ListStateFilterAction{\n\t\t\"include\",\n\t\t\"exclude\",\n\t\t\"ignore\",\n\t}\n}", "title": "" }, { "docid": "1f4f312b0ccd2aed9fdd698c64199c5e", "score": "0.54279333", "text": "func (InferenceSchedulerStatus) Values() []InferenceSchedulerStatus {\n\treturn []InferenceSchedulerStatus{\n\t\t\"PENDING\",\n\t\t\"RUNNING\",\n\t\t\"STOPPING\",\n\t\t\"STOPPED\",\n\t}\n}", "title": "" }, { "docid": "c62bff9172815f15c6bdff96681f8295", "score": "0.5421163", "text": "func SleepTimerStateValues() []SleepTimerState {\n\treturn _SleepTimerStateValues\n}", "title": "" }, { "docid": "0341f3dc494dbd2508d2bc03a52fe050", "score": "0.5416763", "text": "func (ArtifactsState) Values() []ArtifactsState {\n\treturn []ArtifactsState{\n\t\t\"Enabled\",\n\t\t\"Disabled\",\n\t}\n}", "title": "" }, { "docid": "e765b45537c7505a9f251f83d6f0ddac", "score": "0.5408688", "text": "func PossibleSiteAvailabilityStateValues() []SiteAvailabilityState {\n\treturn []SiteAvailabilityState{\n\t\tSiteAvailabilityStateNormal,\n\t\tSiteAvailabilityStateLimited,\n\t\tSiteAvailabilityStateDisasterRecoveryMode,\n\t}\n}", "title": "" }, { "docid": "6b652c38c6f7638ca84b9a58900e4f93", "score": "0.5399392", "text": "func (st *StateTable) GetAllStateTimes() (statusMap map[string]int64) {\n\tst.rLock()\n\tdefer st.rUnlock()\n\tstatusMap = make(map[string]int64)\n\tfor k, v := range st.Status {\n\t\tstatusMap[k] = v.RegisteredTime\n\t}\n\treturn statusMap\n}", "title": "" }, { "docid": "81452d025379923057228b0d4f801067", "score": "0.5373311", "text": "func (InstanceRefreshStatus) Values() []InstanceRefreshStatus {\n\treturn []InstanceRefreshStatus{\n\t\t\"Pending\",\n\t\t\"InProgress\",\n\t\t\"Successful\",\n\t\t\"Failed\",\n\t\t\"Cancelling\",\n\t\t\"Cancelled\",\n\t\t\"RollbackInProgress\",\n\t\t\"RollbackFailed\",\n\t\t\"RollbackSuccessful\",\n\t}\n}", "title": "" }, { "docid": "60ba7c935058cce11b9f58f7dc12b5a2", "score": "0.53445137", "text": "func InstalledComponentLifecycleState_Values() []string {\n\treturn []string{\n\t\tInstalledComponentLifecycleStateNew,\n\t\tInstalledComponentLifecycleStateInstalled,\n\t\tInstalledComponentLifecycleStateStarting,\n\t\tInstalledComponentLifecycleStateRunning,\n\t\tInstalledComponentLifecycleStateStopping,\n\t\tInstalledComponentLifecycleStateErrored,\n\t\tInstalledComponentLifecycleStateBroken,\n\t\tInstalledComponentLifecycleStateFinished,\n\t}\n}", "title": "" }, { "docid": "aff64ffe7cfb8452143dd8ecb4d482e6", "score": "0.53435445", "text": "func (RepositoryAssociationState) Values() []RepositoryAssociationState {\n\treturn []RepositoryAssociationState{\n\t\t\"Associated\",\n\t\t\"Associating\",\n\t\t\"Failed\",\n\t\t\"Disassociating\",\n\t\t\"Disassociated\",\n\t}\n}", "title": "" }, { "docid": "06ebf8eb67114ff197da5dfaaa764505", "score": "0.5322082", "text": "func DiscoveryIntegrationStatus_Values() []string {\n\treturn []string{\n\t\tDiscoveryIntegrationStatusEnabled,\n\t\tDiscoveryIntegrationStatusDisabled,\n\t}\n}", "title": "" }, { "docid": "85fae07481b392608c52712bfd409797", "score": "0.5295261", "text": "func (ClusterState) Values() []ClusterState {\n\treturn []ClusterState{\n\t\t\"ACTIVE\",\n\t\t\"CREATING\",\n\t\t\"DELETING\",\n\t\t\"FAILED\",\n\t\t\"HEALING\",\n\t\t\"MAINTENANCE\",\n\t\t\"REBOOTING_BROKER\",\n\t\t\"UPDATING\",\n\t}\n}", "title": "" }, { "docid": "69656576e45e708c7452b6c753288ebb", "score": "0.52945083", "text": "func AutoRegistrationStatus_Values() []string {\n\treturn []string{\n\t\tAutoRegistrationStatusEnable,\n\t\tAutoRegistrationStatusDisable,\n\t}\n}", "title": "" }, { "docid": "b48d55fd9587b95ba4ddf62d62558d57", "score": "0.5294176", "text": "func PossibleStateValues() []State {\n\treturn []State{StateAccepted, StateCompiling, StateEnded, StateNew, StatePaused, StateQueued, StateRunning, StateScheduling, StateStarting, StateWaitingForCapacity}\n}", "title": "" }, { "docid": "bad377dfaca1db3ce0141456dd59dece", "score": "0.52896565", "text": "func GetUserLifecycleStateEnumValues() []UserLifecycleStateEnum {\n\tvalues := make([]UserLifecycleStateEnum, 0)\n\tfor _, v := range mappingUserLifecycleState {\n\t\tvalues = append(values, v)\n\t}\n\treturn values\n}", "title": "" }, { "docid": "4871f778b42fbc1c76a1fbf6573c52ca", "score": "0.5288638", "text": "func ConnectorStatus_Values() []string {\n\treturn []string{\n\t\tConnectorStatusCreating,\n\t\tConnectorStatusActive,\n\t\tConnectorStatusDeleting,\n\t\tConnectorStatusFailed,\n\t}\n}", "title": "" }, { "docid": "ffc8a6867c81eaad7d0c5726e7903a64", "score": "0.52856904", "text": "func PossibleStateValues() []State {\n\treturn []State{StateDeprovisioning, StateFailed, StateNetworkSourceDeleted, StateProvisioning, StateSucceeded}\n}", "title": "" }, { "docid": "41b04cf0ea198af906c641b8a6d43328", "score": "0.5284486", "text": "func PossibleMigrationStateValues() []MigrationState {\n\treturn []MigrationState{MigrationStateCompleted, MigrationStateInProgress}\n}", "title": "" }, { "docid": "10a88d5fb02eb2d7a8ef71892e0be13b", "score": "0.527646", "text": "func (FleetStatus) Values() []FleetStatus {\n\treturn []FleetStatus{\n\t\t\"CREATING\",\n\t\t\"ACTIVE\",\n\t\t\"DELETING\",\n\t\t\"DELETED\",\n\t\t\"FAILED_TO_CREATE\",\n\t\t\"FAILED_TO_DELETE\",\n\t}\n}", "title": "" }, { "docid": "e12b056bd937e4a9a7f5013464eb185b", "score": "0.52704686", "text": "func CloudComponentState_Values() []string {\n\treturn []string{\n\t\tCloudComponentStateRequested,\n\t\tCloudComponentStateInitiated,\n\t\tCloudComponentStateDeployable,\n\t\tCloudComponentStateFailed,\n\t\tCloudComponentStateDeprecated,\n\t}\n}", "title": "" }, { "docid": "8a5b98772573faaac0efe5fb3284b297", "score": "0.5263424", "text": "func (CloudHsmObjectState) Values() []CloudHsmObjectState {\n\treturn []CloudHsmObjectState{\n\t\t\"READY\",\n\t\t\"UPDATING\",\n\t\t\"DEGRADED\",\n\t}\n}", "title": "" }, { "docid": "7f8f9699cd55f0c0d48b28d0f2407aab", "score": "0.5259423", "text": "func (VpcConnectionState) Values() []VpcConnectionState {\n\treturn []VpcConnectionState{\n\t\t\"CREATING\",\n\t\t\"AVAILABLE\",\n\t\t\"INACTIVE\",\n\t\t\"DEACTIVATING\",\n\t\t\"DELETING\",\n\t\t\"FAILED\",\n\t\t\"REJECTED\",\n\t\t\"REJECTING\",\n\t}\n}", "title": "" }, { "docid": "2bfcbb22d5b2f4e00eb6ab201a2deccf", "score": "0.52569735", "text": "func LaunchStatus_Values() []string {\n\treturn []string{\n\t\tLaunchStatusPending,\n\t\tLaunchStatusInProgress,\n\t\tLaunchStatusLaunched,\n\t\tLaunchStatusFailed,\n\t\tLaunchStatusTerminated,\n\t}\n}", "title": "" }, { "docid": "c21477302d5c280e0920944f0c432b62", "score": "0.5255529", "text": "func PossibleLeaseStateValues() []LeaseState {\n\treturn []LeaseState{LeaseStateAvailable, LeaseStateBreaking, LeaseStateBroken, LeaseStateExpired, LeaseStateLeased}\n}", "title": "" }, { "docid": "c5a554fb25b24a07ed1fa4fc31d253a9", "score": "0.525015", "text": "func PossibleStateValues() []State {\n\treturn []State{StateClosed, StateOpen, StateProposed, StateRemoved, StateResolved}\n}", "title": "" }, { "docid": "ddfe57a75bd5e429cfc01232973e1f6c", "score": "0.52495223", "text": "func Status_Values() []string {\n\treturn []string{\n\t\tStatusEnabled,\n\t\tStatusDisabled,\n\t}\n}", "title": "" }, { "docid": "f8678cb50b95251f51d35ad88981b55d", "score": "0.52414423", "text": "func (ArtifactsConcatenationState) Values() []ArtifactsConcatenationState {\n\treturn []ArtifactsConcatenationState{\n\t\t\"Enabled\",\n\t\t\"Disabled\",\n\t}\n}", "title": "" }, { "docid": "c9f165df76021a16184b59b3bc4729f7", "score": "0.5237278", "text": "func (WirelessGatewayTaskStatus) Values() []WirelessGatewayTaskStatus {\n\treturn []WirelessGatewayTaskStatus{\n\t\t\"PENDING\",\n\t\t\"IN_PROGRESS\",\n\t\t\"FIRST_RETRY\",\n\t\t\"SECOND_RETRY\",\n\t\t\"COMPLETED\",\n\t\t\"FAILED\",\n\t}\n}", "title": "" }, { "docid": "d70b466f1c392e6fe2bc4af04c8b1d15", "score": "0.52172923", "text": "func PossibleResourceStateValues() []ResourceState {\n\treturn []ResourceState{ResourceStateCreateFailed, ResourceStateCreating, ResourceStateDeleteFailed, ResourceStateDeleting, ResourceStateDisabled, ResourceStateDisableFailed, ResourceStateDisabling, ResourceStateEnableFailed, ResourceStateEnabling, ResourceStateRunning, ResourceStateUpdateFailed, ResourceStateUpdating}\n}", "title": "" }, { "docid": "a3fc24329265608736cf463dbe8700af", "score": "0.52154016", "text": "func PossibleSiteRuntimeStateValues() []SiteRuntimeState {\n\treturn []SiteRuntimeState{\n\t\tSiteRuntimeStateREADY,\n\t\tSiteRuntimeStateSTOPPED,\n\t\tSiteRuntimeStateUNKNOWN,\n\t}\n}", "title": "" }, { "docid": "5608cdc2c4252e206b9e3b6667c606d5", "score": "0.5212467", "text": "func (ScanStatusCode) Values() []ScanStatusCode {\n\treturn []ScanStatusCode{\n\t\t\"ACTIVE\",\n\t\t\"INACTIVE\",\n\t}\n}", "title": "" }, { "docid": "963a1261baf8a5cc46ac313236f18004", "score": "0.52103776", "text": "func GetLoadBalancerLifecycleStateEnumStringValues() []string {\n\treturn []string{\n\t\t\"CREATING\",\n\t\t\"FAILED\",\n\t\t\"ACTIVE\",\n\t\t\"DELETING\",\n\t\t\"DELETED\",\n\t}\n}", "title": "" }, { "docid": "7ffd434727db54d78c274add104f1b75", "score": "0.5203408", "text": "func PossibleStateValues() []State {\n\treturn []State{Deleted, Disabled, Enabled, PastDue, Warned}\n}", "title": "" }, { "docid": "26f0b9da3f12e70104abfd41a3d07def", "score": "0.5201195", "text": "func (EvaluationState) Values() []EvaluationState {\n\treturn []EvaluationState{\n\t\t\"PARTIAL_DATA\",\n\t}\n}", "title": "" }, { "docid": "05da32e1027a102140692f1c7d1e51b3", "score": "0.51988465", "text": "func MeetingFeatureStatus_Values() []string {\n\treturn []string{\n\t\tMeetingFeatureStatusAvailable,\n\t\tMeetingFeatureStatusUnavailable,\n\t}\n}", "title": "" }, { "docid": "f094f93cf465c00fb864c78528d0f1b3", "score": "0.5194724", "text": "func PossibleFirewallAllowAzureIpsStateValues() []FirewallAllowAzureIpsState {\n\treturn []FirewallAllowAzureIpsState{Disabled, Enabled}\n}", "title": "" }, { "docid": "bee6bf0abe043d9f807ba0d4adbc18de", "score": "0.5194098", "text": "func ActionStatus_Values() []string {\n\treturn []string{\n\t\tActionStatusScheduled,\n\t\tActionStatusPending,\n\t\tActionStatusRunning,\n\t\tActionStatusUnknown,\n\t}\n}", "title": "" }, { "docid": "85e729759304c328d5dbde5810a010ec", "score": "0.5186995", "text": "func PossibleProvisioningStateValues() []ProvisioningState {\n return []ProvisioningState{ProvisioningStateCanceled,ProvisioningStateCreating,ProvisioningStateDeleting,ProvisioningStateFailed,ProvisioningStateSucceeded,ProvisioningStateUpdating}\n }", "title": "" }, { "docid": "d7b575c12f2f685abc10c9d66be739a3", "score": "0.5183242", "text": "func (page EventRouteCollectionPage) Values() []EventRoute {\n\tif page.erc.IsEmpty() {\n\t\treturn nil\n\t}\n\treturn *page.erc.Value\n}", "title": "" }, { "docid": "41a4c855e0e3b764acbebcec79135ee3", "score": "0.5183043", "text": "func PossibleLeaseStateTypeValues() []LeaseStateType {\n\treturn []LeaseStateType{\n\t\tLeaseStateTypeAvailable,\n\t\tLeaseStateTypeLeased,\n\t\tLeaseStateTypeExpired,\n\t\tLeaseStateTypeBreaking,\n\t\tLeaseStateTypeBroken,\n\t}\n}", "title": "" }, { "docid": "21518495e43b277b1e3fdab20aaf4486", "score": "0.5176527", "text": "func getPossibleProcStates() []string {\n\tfields := []string{\n\t\t\"blocked\",\n\t\t\"zombie\",\n\t\t\"stopped\",\n\t\t\"running\",\n\t\t\"sleeping\",\n\t}\n\n\tswitch runtime.GOOS {\n\tcase \"windows\":\n\t\tfields = []string{\"running\"}\n\tcase \"freebsd\":\n\t\tfields = append(fields, \"idle\", \"wait\")\n\tcase \"darwin\":\n\t\tfields = append(fields, \"idle\")\n\tcase \"openbsd\":\n\t\tfields = append(fields, \"idle\")\n\tcase \"linux\":\n\t\tfields = append(fields, \"dead\", \"paging\", \"idle\")\n\t}\n\treturn fields\n}", "title": "" }, { "docid": "14292f2cb8a7dc1752de5f6d6c9e8e38", "score": "0.51759535", "text": "func GetPbfListingLifecycleStateEnumStringValues() []string {\n\treturn []string{\n\t\t\"ACTIVE\",\n\t\t\"INACTIVE\",\n\t\t\"DELETED\",\n\t}\n}", "title": "" }, { "docid": "082c6344a9419fc46e32f0956e5e051a", "score": "0.5171085", "text": "func Status_Values() []string {\n\treturn []string{\n\t\tStatusLatest,\n\t\tStatusStale,\n\t}\n}", "title": "" }, { "docid": "d116398b728622e63a00e65c49b7e546", "score": "0.51642054", "text": "func GetDashboardLifecycleStateEnumValues() []DashboardLifecycleStateEnum {\n\tvalues := make([]DashboardLifecycleStateEnum, 0)\n\tfor _, v := range mappingDashboardLifecycleStateEnum {\n\t\tvalues = append(values, v)\n\t}\n\treturn values\n}", "title": "" }, { "docid": "ef245cecced4b9618d4d7c5573521e45", "score": "0.5163134", "text": "func GetDashboardLifecycleStateEnumStringValues() []string {\n\treturn []string{\n\t\t\"CREATING\",\n\t\t\"UPDATING\",\n\t\t\"ACTIVE\",\n\t\t\"DELETING\",\n\t\t\"DELETED\",\n\t\t\"FAILED\",\n\t}\n}", "title": "" }, { "docid": "92f9d6d58eb96fe41331594e42c1bae6", "score": "0.5162078", "text": "func PossibleLivePipelineStateValues() []LivePipelineState {\n\treturn []LivePipelineState{LivePipelineStateActivating, LivePipelineStateActive, LivePipelineStateDeactivating, LivePipelineStateInactive}\n}", "title": "" }, { "docid": "e938d06d979fb45b7042e6e039228c2c", "score": "0.5158035", "text": "func PossibleDataLakeAnalyticsAccountStateValues() []DataLakeAnalyticsAccountState {\n\treturn []DataLakeAnalyticsAccountState{Active, Suspended}\n}", "title": "" }, { "docid": "5342ad9d383527bdd453d07d83a58f09", "score": "0.5157068", "text": "func PossibleEndpointProvisioningStateValues() []EndpointProvisioningState {\n\treturn []EndpointProvisioningState{\n\t\tEndpointProvisioningStateCreating,\n\t\tEndpointProvisioningStateDeleting,\n\t\tEndpointProvisioningStateFailed,\n\t\tEndpointProvisioningStateSucceeded,\n\t\tEndpointProvisioningStateUpdating,\n\t}\n}", "title": "" }, { "docid": "b001d7b4c74bc026a4b6c380410e3b77", "score": "0.51563334", "text": "func BackupLifecycle_Values() []string {\n\treturn []string{\n\t\tBackupLifecycleAvailable,\n\t\tBackupLifecycleCreating,\n\t\tBackupLifecycleTransferring,\n\t\tBackupLifecycleDeleted,\n\t\tBackupLifecycleFailed,\n\t\tBackupLifecyclePending,\n\t\tBackupLifecycleCopying,\n\t}\n}", "title": "" }, { "docid": "41006996857e5d7f4962ad62577b6550", "score": "0.5155414", "text": "func (jbobject *ServicesEc2ModelVpnState) Values() []*ServicesEc2ModelVpnState {\n\tjret, err := javabind.GetEnv().CallStaticMethod(\"com/amazonaws/services/ec2/model/VpnState\", \"values\", javabind.ObjectArrayType(\"com/amazonaws/services/ec2/model/VpnState\"))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tretconv := javabind.NewJavaToGoObjectArray(javabind.NewJavaToGoCallable(), \"com/amazonaws/services/ec2/model/VpnState\")\n\tdst := new([]*ServicesEc2ModelVpnState)\n\tretconv.Dest(dst)\n\tif err := retconv.Convert(javabind.ObjectRef(jret)); err != nil {\n\t\tpanic(err)\n\t}\n\tretconv.CleanUp()\n\treturn *dst\n}", "title": "" }, { "docid": "7265744919b53a7cbfab8474aabe669f", "score": "0.5150165", "text": "func PossibleSerialPortStateValues() []SerialPortState {\n\treturn []SerialPortState{\n\t\tSerialPortStateEnabled,\n\t\tSerialPortStateDisabled,\n\t}\n}", "title": "" }, { "docid": "e664bf272f6b228bde1ec7b3af9891f6", "score": "0.51482964", "text": "func AuthorizerStatus_Values() []string {\n\treturn []string{\n\t\tAuthorizerStatusActive,\n\t\tAuthorizerStatusInactive,\n\t}\n}", "title": "" }, { "docid": "882b542e1d625986af65c1d713762ff3", "score": "0.5146258", "text": "func (ReceivedStatus) Values() []ReceivedStatus {\n\treturn []ReceivedStatus{\n\t\t\"PENDING_WORKFLOW\",\n\t\t\"PENDING_ACCEPT\",\n\t\t\"REJECTED\",\n\t\t\"ACTIVE\",\n\t\t\"FAILED_WORKFLOW\",\n\t\t\"DELETED\",\n\t\t\"DISABLED\",\n\t\t\"WORKFLOW_COMPLETED\",\n\t}\n}", "title": "" }, { "docid": "85b6550febd8b3fbdc6901a3c827c384", "score": "0.5145324", "text": "func (ActivationOverrideBehavior) Values() []ActivationOverrideBehavior {\n\treturn []ActivationOverrideBehavior{\n\t\t\"DISTRIBUTED_GRANTS_ONLY\",\n\t\t\"ALL_GRANTS_PERMITTED_BY_ISSUER\",\n\t}\n}", "title": "" }, { "docid": "242d7d9e3651fa107fa85d06278b6c87", "score": "0.51399714", "text": "func PossibleVirtualNetworkPeeringStateValues() []VirtualNetworkPeeringState {\n\treturn []VirtualNetworkPeeringState{\n\t\tVirtualNetworkPeeringStateConnected,\n\t\tVirtualNetworkPeeringStateDisconnected,\n\t\tVirtualNetworkPeeringStateInitiated,\n\t}\n}", "title": "" }, { "docid": "cd104fd6db59c35c6bc33cdba4389886", "score": "0.513932", "text": "func PossibleAfdProvisioningStateValues() []AfdProvisioningState {\n\treturn []AfdProvisioningState{\n\t\tAfdProvisioningStateCreating,\n\t\tAfdProvisioningStateDeleting,\n\t\tAfdProvisioningStateFailed,\n\t\tAfdProvisioningStateSucceeded,\n\t\tAfdProvisioningStateUpdating,\n\t}\n}", "title": "" }, { "docid": "3c96ea82e7031f9c1c15ad7a46d44131", "score": "0.51261604", "text": "func (WarmPoolState) Values() []WarmPoolState {\n\treturn []WarmPoolState{\n\t\t\"Stopped\",\n\t\t\"Running\",\n\t\t\"Hibernated\",\n\t}\n}", "title": "" } ]
e6e6af860f3fafe68104257048ba5f05
modify the parameters's type to adapt the function input parameters' type
[ { "docid": "f5abcd7c7effea542e8400fe2ce12805", "score": "0.46649253", "text": "func parseParam(t reflect.Type, s string) (i interface{}, err error) {\n\tswitch t.Kind() {\n\tcase reflect.Int:\n\t\ti, err = strconv.Atoi(s)\n\tcase reflect.Int64:\n\t\tif wordsize == 32 {\n\t\t\treturn nil, ErrInt64On32\n\t\t}\n\t\ti, err = strconv.ParseInt(s, 10, 64)\n\tcase reflect.Int32:\n\t\tvar v int64\n\t\tv, err = strconv.ParseInt(s, 10, 32)\n\t\tif err == nil {\n\t\t\ti = int32(v)\n\t\t}\n\tcase reflect.Int16:\n\t\tvar v int64\n\t\tv, err = strconv.ParseInt(s, 10, 16)\n\t\tif err == nil {\n\t\t\ti = int16(v)\n\t\t}\n\tcase reflect.Int8:\n\t\tvar v int64\n\t\tv, err = strconv.ParseInt(s, 10, 8)\n\t\tif err == nil {\n\t\t\ti = int8(v)\n\t\t}\n\tcase reflect.String:\n\t\ti = s\n\tcase reflect.Ptr:\n\t\tif t.Elem().String() != \"regexp.Regexp\" {\n\t\t\terr = fmt.Errorf(\"not support %s\", t.Elem().String())\n\t\t\treturn\n\t\t}\n\t\ti, err = regexp.Compile(s)\n\tdefault:\n\t\terr = fmt.Errorf(\"not support %s\", t.Kind().String())\n\t}\n\treturn\n}", "title": "" } ]
[ { "docid": "d30e6831014355c1525d24052333dc07", "score": "0.63407797", "text": "func convertToParamTypes(ps []ast.Param) {\n\tfor i := range ps {\n\t\tp := &ps[i]\n\t\tp.Type = &ast.QualifiedId{Off: p.Off, Id: p.Name}\n\t\tp.Off = 0\n\t\tp.Name = \"\"\n\t}\n}", "title": "" }, { "docid": "1b57360c05e40a544aecfb5d9675ae75", "score": "0.6196674", "text": "func ParamTypeCovert(params map[string]interface{}) {\n\tfor k, p := range params {\n\t\tswitch k {\n\t\tcase \"page\", \"perPageNum\":\n\t\t\tswitch p.(type) {\n\t\t\tcase string:\n\t\t\t\tpage, _ := strconv.ParseUint(p.(string), 10, 64)\n\t\t\t\tparams[k] = uint16(page)\n\t\t\tcase int:\n\t\t\t\tparams[k] = uint16(p.(int))\n\t\t\tcase uint16:\n\t\t\t\tparams[k] = p\n\t\t\t}\n\t\tcase \"orderField\", \"orderType\", \"responseType\":\n\t\t\tparams[k] = p.(string)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "61256cc344c7ccd5a79d991385b9499f", "score": "0.59717923", "text": "func convertFunctionArg(fnctArgType reflect.Type, arg interface{}) reflect.Value {\n\tvar val reflect.Value\n\tswitch at := arg.(type) {\n\tcase Conditioner:\n\t\tif fnctArgType.Kind() == reflect.Interface {\n\t\t\t// Target is a Conditioner nothing to change\n\t\t\treturn reflect.ValueOf(at)\n\t\t}\n\t\tif fnctArgType == reflect.TypeOf(new(Condition)) {\n\t\t\t// Target is a pointer to an untyped Condition\n\t\t\treturn reflect.ValueOf(at.Underlying())\n\t\t}\n\t\tval = reflect.New(fnctArgType).Elem()\n\t\tval.Field(0).Set(reflect.ValueOf(at.Underlying()))\n\t\treturn val\n\tcase RecordData:\n\t\tvar fm RecordData\n\t\tif fnctArgType == reflect.TypeOf(fm) {\n\t\t\t// Target is a RecordData nothing to change\n\t\t\treturn reflect.ValueOf(at)\n\t\t}\n\t\tif fnctArgType == reflect.TypeOf(new(ModelData)) {\n\t\t\t// Target is a *ModelData so we send Underlying\n\t\t\treturn reflect.ValueOf(at.Underlying())\n\t\t}\n\t\t// => Target is a typed RecordData\n\t\tif md, ok := at.(*ModelData); ok {\n\t\t\t// Given arg is a ModelData, so we wrap it\n\t\t\tval = reflect.ValueOf(md.Wrap())\n\t\t\treturn val\n\t\t}\n\t\t// Given arg is already a typed ModelData\n\t\treturn reflect.ValueOf(arg)\n\tcase RecordSet:\n\t\tif fnctArgType == reflect.TypeOf((*RecordSet)(nil)).Elem() {\n\t\t\treturn reflect.ValueOf(at)\n\t\t}\n\t\tif fnctArgType == reflect.TypeOf(new(RecordCollection)) {\n\t\t\treturn reflect.ValueOf(at.Collection())\n\t\t}\n\t\treturn reflect.ValueOf(at.Collection().Wrap())\n\tcase nil:\n\t\treturn reflect.Zero(fnctArgType)\n\tdefault:\n\t\treturn reflect.ValueOf(arg)\n\t}\n}", "title": "" }, { "docid": "ce5697f0d6e28a2e54dbcb786555ddc3", "score": "0.5923937", "text": "func (s *BaseScalaListener) EnterFunctionArgTypes(ctx *FunctionArgTypesContext) {}", "title": "" }, { "docid": "3c4c51262d36796b2e3f546036a8c1d7", "score": "0.5738506", "text": "func typeFun(args ...object.Object) object.Object {\n\tif len(args) != 1 {\n\t\treturn newError(\"wrong number of arguments. got=%d, want=1\",\n\t\t\tlen(args))\n\t}\n\tswitch args[0].(type) {\n\tcase *object.String:\n\t\treturn &object.String{Value: \"string\"}\n\tcase *object.Regexp:\n\t\treturn &object.String{Value: \"regexp\"}\n\tcase *object.Boolean:\n\t\treturn &object.String{Value: \"bool\"}\n\tcase *object.Builtin:\n\t\treturn &object.String{Value: \"builtin\"}\n\tcase *object.File:\n\t\treturn &object.String{Value: \"file\"}\n\tcase *object.Array:\n\t\treturn &object.String{Value: \"array\"}\n\tcase *object.Function:\n\t\treturn &object.String{Value: \"function\"}\n\tcase *object.Integer:\n\t\treturn &object.String{Value: \"integer\"}\n\tcase *object.Float:\n\t\treturn &object.String{Value: \"float\"}\n\tcase *object.Hash:\n\t\treturn &object.String{Value: \"hash\"}\n\tdefault:\n\t\treturn newError(\"argument to `type` not supported, got=%s\",\n\t\t\targs[0].Type())\n\t}\n}", "title": "" }, { "docid": "6bbf74d0d972fd96e004e43dbe00d643", "score": "0.55974704", "text": "func (tx *Txn) generateTypedArgs(origParams []interface{}, method *abi.Method) (typedArgs []interface{}, err error) {\n\n\tparams, err := tx.flattenParams(origParams, method)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tmethodName := method.Name\n\tif methodName == \"\" {\n\t\tmethodName = \"<constructor>\"\n\t}\n\tlog.Debug(\"Parsing args for function: \", method)\n\tfor idx, inputArg := range method.Inputs {\n\t\tif idx >= len(params) {\n\t\t\terr = fmt.Errorf(\"Method '%s': Requires %d args (supplied=%d)\", methodName, len(method.Inputs), len(params))\n\t\t\treturn\n\t\t}\n\t\tparam := params[idx]\n\t\trequiredType := inputArg.Type.String()\n\t\tsuppliedType := reflect.TypeOf(param)\n\t\tif requiredType == \"string\" {\n\t\t\tif suppliedType.Kind() == reflect.String {\n\t\t\t\ttypedArgs = append(typedArgs, param.(string))\n\t\t\t} else {\n\t\t\t\terr = fmt.Errorf(\"Method '%s' param %d: Must supply a string\", methodName, idx)\n\t\t\t\tbreak\n\t\t\t}\n\t\t} else if strings.Contains(requiredType, \"int\") && strings.HasSuffix(requiredType, \"]\") {\n\t\t\ttypedArgs, err = processIntArray(typedArgs, methodName, idx, requiredType, suppliedType, param)\n\t\t} else if strings.Contains(requiredType, \"int\") {\n\t\t\ttypedArgs, err = processIntVal(typedArgs, methodName, idx, requiredType, suppliedType, param)\n\t\t} else if requiredType == \"bool\" {\n\t\t\tif suppliedType.Kind() == reflect.String {\n\t\t\t\ttypedArgs = append(typedArgs, strings.ToLower(param.(string)) == \"true\")\n\t\t\t} else if suppliedType.Kind() == reflect.Bool {\n\t\t\t\ttypedArgs = append(typedArgs, param.(bool))\n\t\t\t} else {\n\t\t\t\terr = fmt.Errorf(\"Method '%s' param %d is a %s: Must supply a boolean or a string\", methodName, idx, requiredType)\n\t\t\t}\n\t\t} else if requiredType == \"address\" {\n\t\t\tif suppliedType.Kind() == reflect.String {\n\t\t\t\tif !common.IsHexAddress(param.(string)) {\n\t\t\t\t\terr = fmt.Errorf(\"Method '%s' param %d: Could not be converted to a hex address\", methodName, idx)\n\t\t\t\t} else {\n\t\t\t\t\ttypedArgs = append(typedArgs, common.HexToAddress(param.(string)))\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\terr = fmt.Errorf(\"Method '%s' param %d is a %s: Must supply a hex address string\", methodName, idx, requiredType)\n\t\t\t}\n\t\t} else if strings.HasPrefix(requiredType, \"bytes\") {\n\t\t\tif suppliedType.Kind() == reflect.String {\n\t\t\t\tbSlice := common.FromHex(param.(string))\n\t\t\t\tif len(bSlice) == 0 {\n\t\t\t\t\ttypedArgs = append(typedArgs, [0]byte{})\n\t\t\t\t} else {\n\t\t\t\t\t// Create ourselves an array of the right size (ethereum won't accept a slice)\n\t\t\t\t\tbArrayType := reflect.ArrayOf(len(bSlice), reflect.TypeOf(bSlice[0]))\n\t\t\t\t\tbNewArray := reflect.New(bArrayType).Elem()\n\t\t\t\t\treflect.Copy(bNewArray, reflect.ValueOf(bSlice))\n\t\t\t\t\ttypedArgs = append(typedArgs, bNewArray.Interface())\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\terr = fmt.Errorf(\"Method '%s' param %d is a %s: Must supply a hex string\", methodName, idx, requiredType)\n\t\t\t}\n\t\t} else {\n\t\t\terr = fmt.Errorf(\"Type '%s' is not yet supported\", requiredType)\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"%s [Required=%s Supplied=%s Value=%s]\", err, requiredType, suppliedType, param)\n\t\t\treturn\n\t\t}\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "a40bc5716e44c4d36f31c09773b1e91a", "score": "0.55554277", "text": "func (s *BaseScalaListener) EnterFunTypeParamClause(ctx *FunTypeParamClauseContext) {}", "title": "" }, { "docid": "119c6ddbeffff4e73c0c9ec87284054f", "score": "0.55432993", "text": "func paramTypeError(v interface{}) error {\n\treturn errors.New(fmt.Sprintf(\"need %s\", reflect.TypeOf(v)))\n}", "title": "" }, { "docid": "e3747f89638b8fa246d781454d9868dd", "score": "0.5476099", "text": "func UpdateBoundParameters(configuration *Configuration) {\n\tfor parameterName, boundParameter := range boundParameters {\n\t\tswitch boundParameter.boundType {\n\t\tcase \"bool\":\n\t\t\t*(boundParameter.boundPointer.(*bool)) = configuration.Bool(parameterName)\n\t\tcase \"time.Duration\":\n\t\t\t*(boundParameter.boundPointer.(*time.Duration)) = configuration.Duration(parameterName)\n\t\tcase \"float32\":\n\t\t\t*(boundParameter.boundPointer.(*float32)) = float32(configuration.Float64(parameterName))\n\t\tcase \"float64\":\n\t\t\t*(boundParameter.boundPointer.(*float64)) = configuration.Float64(parameterName)\n\t\tcase \"int\":\n\t\t\t*(boundParameter.boundPointer.(*int)) = configuration.Int(parameterName)\n\t\tcase \"int8\":\n\t\t\t*(boundParameter.boundPointer.(*int8)) = int8(configuration.Int(parameterName))\n\t\tcase \"int16\":\n\t\t\t*(boundParameter.boundPointer.(*int16)) = int16(configuration.Int(parameterName))\n\t\tcase \"int32\":\n\t\t\t*(boundParameter.boundPointer.(*int32)) = int32(configuration.Int(parameterName))\n\t\tcase \"int64\":\n\t\t\t*(boundParameter.boundPointer.(*int64)) = configuration.Int64(parameterName)\n\t\tcase \"string\":\n\t\t\t*(boundParameter.boundPointer.(*string)) = configuration.String(parameterName)\n\t\tcase \"uint\":\n\t\t\t*(boundParameter.boundPointer.(*uint)) = uint(configuration.Int(parameterName))\n\t\tcase \"uint8\":\n\t\t\t*(boundParameter.boundPointer.(*uint8)) = uint8(configuration.Int(parameterName))\n\t\tcase \"uint16\":\n\t\t\t*(boundParameter.boundPointer.(*uint16)) = uint16(configuration.Int(parameterName))\n\t\tcase \"uint32\":\n\t\t\t*(boundParameter.boundPointer.(*uint32)) = uint32(configuration.Int(parameterName))\n\t\tcase \"uint64\":\n\t\t\t*(boundParameter.boundPointer.(*uint64)) = uint64(configuration.Int64(parameterName))\n\t\tcase \"[]string\":\n\t\t\t*(boundParameter.boundPointer.(*[]string)) = configuration.Strings(parameterName)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "e2f8f9037f503a0bf14d3014bbab02a5", "score": "0.54547596", "text": "func (b Builtin) params() TypeList { return b.Types }", "title": "" }, { "docid": "38a8e14a6f895b8345c40ff53d5ab762", "score": "0.54434586", "text": "func updateTypes(r *aws.Request) {\n\tit := reflect.TypeOf(r.Params).Elem()\n\tot := reflect.TypeOf(r.Data).Elem()\n\tpg := r.Operation.Paginator\n\tif pg == nil {\n\t\tpg = &nilPaginator\n\t}\n\tmu.Lock()\n\tdefer mu.Unlock()\n\tif _, ok := skipFields[it]; !ok {\n\t\tupdateSkipFields(it, pg.InputTokens, pg.LimitToken)\n\t}\n\tif _, ok := skipFields[ot]; !ok {\n\t\tupdateSkipFields(ot, pg.OutputTokens, pg.TruncationToken)\n\t}\n}", "title": "" }, { "docid": "ddc55a27843f5e397db36d350330b40c", "score": "0.5404895", "text": "func (s *BaseScalaListener) EnterTypeParam(ctx *TypeParamContext) {}", "title": "" }, { "docid": "c10919297c839499c13f0a4cac26252c", "score": "0.53498733", "text": "func buildSetterFunc(paramName interface{}, srcParamValue interface{}) (newSetter paramSetter, err error) {\n\tparamNameAsString := paramName.(string)\n\n\tswitch typedValue := srcParamValue.(type) {\n\tcase time.Time:\n\t\tnewSetter, err = buildSetterFuncForTime(paramNameAsString, typedValue)\n\tcase []string:\n\t\tnewSetter, err = buildSetterFuncForStringArray(paramNameAsString, typedValue)\n\tcase model.PropRelation:\n\t\tnewSetter, err = buildSetterFuncForPropRelation(paramNameAsString, typedValue)\n\tdefault:\n\t\terr = fmt.Errorf(\"Unsupported for type of param[%T]. Param name: [%v]\", srcParamValue, paramNameAsString)\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "fe541ec84c5b9522350e6f7373d3accc", "score": "0.5264442", "text": "func declareParameters(function *Function, scopes *ScopeStack, registers *register.Manager, identifierLifeTime map[string]token.Position) error {\n\tfile := function.File\n\n\tfor i, parameter := range function.Parameters {\n\t\tif i >= len(registers.Call) {\n\t\t\treturn errors.New(errors.ExceededMaxParameters)\n\t\t}\n\n\t\tregister := registers.Call[i]\n\t\ttypeName := TypeNameFromTokens(parameter.TypeTokens)\n\t\tparameter.Type = file.Type(typeName)\n\n\t\tif parameter.Type == nil {\n\t\t\treturn NewError(errors.New(&errors.UnknownType{Name: typeName}), file.path, file.tokens[:parameter.Position+2], function)\n\t\t}\n\n\t\tvariable := &Variable{\n\t\t\tName: parameter.Name,\n\t\t\tType: parameter.Type,\n\t\t\tPosition: 0,\n\t\t\tAliveUntil: identifierLifeTime[parameter.Name],\n\t\t}\n\n\t\t_ = variable.SetRegister(register)\n\t\tregister.Assign()\n\t\tscopes.Add(variable)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "82a5d023170dd90085b4fcdfe9ce8a52", "score": "0.52559674", "text": "func manipulateParameters(alt paramAlt, params []parameters, str string) (toReturn []parameters) {\n\tfor _, param := range params {\n\t\ttoReturn = append(toReturn, alt(param, str))\n\t}\n\treturn toReturn\n}", "title": "" }, { "docid": "f304246403254191ce2b3c41fee6effd", "score": "0.5253923", "text": "func (s *BaseJavaParserListener) EnterTypeParameters(ctx *TypeParametersContext) {}", "title": "" }, { "docid": "149dbe8adfbf443aa2814bc6e6fe47b3", "score": "0.5241727", "text": "func (v TypeFillingVisitor) funcType(recv *ast.FieldList, params, results []*ast.Field) Type {\n v.starIsDeref = false\n paramList := []Type{}\n for _, param := range params {\n for _, paramIdent := range param.Names {\n paramList = append(paramList, v.getTypes(paramIdent))\n }\n }\n\n rtnList := []Type{}\n for _, result := range results {\n if len(result.Names) == 0 {\n rtnList = append(rtnList, v.getTypes(result.Type))\n } else {\n for _, resultIdent := range result.Names {\n rtnList = append(rtnList, v.getTypes(resultIdent))\n }\n }\n }\n\n var recvType *Type\n if recv != nil && len(recv.List) > 0 {\n rc := v.getTypes(recv.List[0].Names[0])\n recvType = &rc\n }\n\n\n v.starIsDeref = true\n return FunctionType{receiver:recvType, params:paramList, returns:rtnList}\n}", "title": "" }, { "docid": "5cf5ae0aeb05ae0fb48d792ad1a29507", "score": "0.52276254", "text": "func (ty *FuncType) Params() []*ValType {\n\tptr := C.wasm_functype_params(ty.ptr())\n\treturn ty.convertTypeList(ptr)\n}", "title": "" }, { "docid": "10082df9149aa1647b6d63210206df9b", "score": "0.5170348", "text": "func httpParamsToArgs(rpcFunc *RPCFunc, cdc *amino.Codec, r *http.Request) ([]reflect.Value, error) {\n\t// skip types.Context\n\tconst argsOffset = 1\n\n\tvalues := make([]reflect.Value, len(rpcFunc.argNames))\n\n\tfor i, name := range rpcFunc.argNames {\n\t\targType := rpcFunc.args[i+argsOffset]\n\n\t\tvalues[i] = reflect.Zero(argType) // set default for that type\n\n\t\targ := GetParam(r, name)\n\t\t// log.Notice(\"param to arg\", \"argType\", argType, \"name\", name, \"arg\", arg)\n\n\t\tif \"\" == arg {\n\t\t\tcontinue\n\t\t}\n\n\t\tv, err, ok := nonJSONStringToArg(cdc, argType, arg)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif ok {\n\t\t\tvalues[i] = v\n\t\t\tcontinue\n\t\t}\n\n\t\tvalues[i], err = jsonStringToArg(cdc, argType, arg)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn values, nil\n}", "title": "" }, { "docid": "f454eb07e805fbd098be97a46bf9b192", "score": "0.5159595", "text": "func functionParams(g Generator, f *compile.FunctionSpec) (string, error) {\n\treturn g.TextTemplate(\n\t\t`\n\t\t<- $params := newNamespace ->\n\t\t<- range .ArgsSpec>\n\t\t\t<- if .Required>\n\t\t\t\t<$params.NewName .Name> <typeReference .Type>,\n\t\t\t<- else>\n\t\t\t\t<$params.NewName .Name> <typeReferencePtr .Type>,\n\t\t\t<- end ->\n\t\t<end>\n\t\t`, f)\n}", "title": "" }, { "docid": "11b2723051c46ba8302fd564a8bcb9c1", "score": "0.5157516", "text": "func GenerateTypesForParameters(t *template.Template, params map[string]*openapi3.ParameterRef) ([]TypeDefinition, error) {\n\tvar types []TypeDefinition\n\tfor _, paramName := range SortedParameterKeys(params) {\n\t\tparamOrRef := params[paramName]\n\n\t\tgoType, err := paramToGoType(paramOrRef.Value, nil)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\"error generating Go type for schema in parameter %s\", paramName))\n\t\t}\n\n\t\ttypeDef := TypeDefinition{\n\t\t\tJsonName: paramName,\n\t\t\tSchema: goType,\n\t\t\tTypeName: SchemaNameToTypeName(paramName),\n\t\t}\n\n\t\tif paramOrRef.Ref != \"\" {\n\t\t\t// Generate a reference type for referenced parameters\n\t\t\trefType, err := RefPathToGoType(paramOrRef.Ref)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\"error generating Go type for (%s) in parameter %s\", paramOrRef.Ref, paramName))\n\t\t\t}\n\t\t\ttypeDef.TypeName = SchemaNameToTypeName(refType)\n\t\t}\n\n\t\ttypes = append(types, typeDef)\n\t}\n\treturn types, nil\n}", "title": "" }, { "docid": "8feb3714942d4ded0d31d192a81d64f2", "score": "0.5127497", "text": "func (s *BaseScalaListener) EnterTypeParamClause(ctx *TypeParamClauseContext) {}", "title": "" }, { "docid": "7d895e0bfb945c32aae99c042bf25a1d", "score": "0.50870734", "text": "func (l *Lambda) ConvertType(which TypeFlag) Object {\n\treturn NewError(\"Argument to %s not supported, got %s\", l.Type(), which)\n}", "title": "" }, { "docid": "b8cf83e2254e2b21b3563e752b25c5ca", "score": "0.5056026", "text": "func addFunctionWithConsecutiveTypeParameters(x, y int) int {\n\treturn x + y\n}", "title": "" }, { "docid": "5c5b26398d2b4e8ad4b27ac0c602c4fc", "score": "0.50352716", "text": "func convertParamTemplates(step *v1beta1.Step, params []v1beta1.ParamSpec) {\n\treplacements := make(map[string]string)\n\tfor _, p := range params {\n\t\treplacements[fmt.Sprintf(\"params.%s\", p.Name)] = fmt.Sprintf(\"$(inputs.params.%s)\", p.Name)\n\t\tv1beta1.ApplyStepReplacements(step, replacements, map[string][]string{})\n\t}\n\n\tv1beta1.ApplyStepReplacements(step, replacements, map[string][]string{})\n}", "title": "" }, { "docid": "400401c0eec4aab73115f65c0c217f55", "score": "0.5030405", "text": "func psToGeneric(templParams []psTemplateParams) (genericParams []interface{}) {\n\tfor _, v := range templParams {\n\t\tgenericParams = append(genericParams, interface{}(v))\n\t}\n\treturn\n}", "title": "" }, { "docid": "826e91ee2c56d71b7cee67069996923f", "score": "0.5014667", "text": "func CastParameterChange(structType any) ParameterChange {\n\tif casted, ok := structType.(ParameterChange); ok {\n\t\treturn casted\n\t}\n\tif casted, ok := structType.(*ParameterChange); ok {\n\t\treturn *casted\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "eb94572eb9ac99ea98930a9d9fe78463", "score": "0.4974931", "text": "func (s *BaseScalaListener) EnterTypeArgs(ctx *TypeArgsContext) {}", "title": "" }, { "docid": "9ca2bba5e57d4149bd4940838251ccd5", "score": "0.49718237", "text": "func ConvertType(args ...interface{}) string {\n\tt := args[0].(string)\n\tp := args[1].(string)\n\n\t// Convert softlayer types to golang types\n\tswitch t {\n\tcase \"unsignedLong\", \"unsignedInt\":\n\t\treturn \"uint\"\n\tcase \"boolean\":\n\t\treturn \"bool\"\n\tcase \"dateTime\":\n\t\tif p != \"datatypes\" {\n\t\t\treturn \"datatypes.Time\"\n\t\t} else {\n\t\t\treturn \"Time\"\n\t\t}\n\tcase \"decimal\", \"float\":\n\t\tif p != \"datatypes\" {\n\t\t\treturn \"datatypes.Float64\"\n\t\t} else {\n\t\t\treturn \"Float64\"\n\t\t}\n\tcase \"base64Binary\":\n\t\treturn \"[]byte\"\n\tcase \"json\", \"enum\":\n\t\treturn \"string\"\n\t}\n\n\tif strings.HasPrefix(t, \"SoftLayer_\") {\n\t\tt = RemovePrefix(t)\n\t\tif p != \"datatypes\" {\n\t\t\treturn \"datatypes.\" + t\n\t\t}\n\t\t// A Property called Resource that is an 'Entity' can be multiple types in reality\n\t\t// Specifically this is for Container_Search_Result\n\t\tif len(args) >= 4 && args[3] == \"resource\" && t == \"Entity\" {\n\t\t\treturn \"interface{}\"\n\t\t}\n\t\treturn t\n\t}\n\n\tif strings.HasPrefix(t, \"McAfee_\") {\n\t\tif p != \"datatypes\" {\n\t\t\treturn \"datatypes.\" + t\n\t\t}\n\t\treturn t\n\t}\n\n\treturn t\n}", "title": "" }, { "docid": "5fdb0ea39d01e18b163a89b23c9053aa", "score": "0.4950726", "text": "func (ti *multipointType) GetTypeParams() map[string]string {\n\treturn map[string]string{\"SRID\": strconv.FormatUint(uint64(ti.sqlMultiPointType.SRID), 10),\n\t\t\"DefinedSRID\": strconv.FormatBool(ti.sqlMultiPointType.DefinedSRID)}\n}", "title": "" }, { "docid": "6aeecbae5f383959cf5280483b00f711", "score": "0.4927666", "text": "func (e *Elephant) parseParams(postType string, params []interface{}, args ...interface{}) error {\r\n\ttypes := paramTypes[postType].types\r\n\tpanic := \"Parsing the parameter \"\r\n\tvar err error\r\n\tdefer func() {\r\n\t\tif err := recover(); err != nil {\r\n\t\t\terr = errors.New(panic + \"in backend cashed\")\r\n\t\t}\r\n\t}()\r\n\tif len(types) != len(params) {\r\n\t\treturn fmt.Errorf(\"the count of parameters is not correct, type: %v, demand: %v\", postType, len(types))\r\n\t}\r\n\tfor i := 0; i < len(types); i += 1 {\r\n\t\tpanic += fmt.Sprintf(\"%d \", i)\r\n\t\tswitch types[i] {\r\n\t\tcase \"string\":\r\n\t\t\t*args[i].(*string) = params[i].(string)\r\n\t\t\tif postType != \"elephant_getTotalBalance\" && postType != \"elephant_getSignedCs\" {\r\n\t\t\t\tif i == paramTypes[postType].passwd {\r\n\t\t\t\t\te.logInfo(fmt.Sprintf(\"arg%d: ****\", i))\r\n\t\t\t\t} else {\r\n\t\t\t\t\te.logInfo(fmt.Sprintf(\"arg%d: %s\", i, params[i].(string)))\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\tcase \"uint32\":\r\n\t\t\t*args[i].(*uint32) = params[i].(uint32)\r\n\t\t\tif postType != \"elephant_getTotalBalance\" && postType != \"elephant_getSignedCs\" {\r\n\t\t\t\te.logInfo(fmt.Sprintf(\"arg%d: %d\", i, params[i].(uint32)))\r\n\t\t\t}\r\n\r\n\t\tcase \"uint64\":\r\n\t\t\t*args[i].(*uint64) = params[i].(uint64)\r\n\t\t\tif postType != \"elephant_getTotalBalance\" && postType != \"elephant_getSignedCs\" {\r\n\t\t\t\te.logInfo(fmt.Sprintf(\"arg%d: %d\", i, params[i].(uint64)))\r\n\t\t\t}\r\n\r\n\t\tcase \"*multipart.File\":\r\n\t\t\t*args[i].(**multipart.File) = params[i].(*multipart.File)\r\n\r\n\t\tdefault:\r\n\t\t\treturn errors.New(fmt.Sprintf(\"the params %d is not valid\", i+1))\r\n\t\t}\r\n\t}\r\n\treturn err\r\n}", "title": "" }, { "docid": "b9c89c41a16134c7ea7d98ffc03ddffc", "score": "0.49244574", "text": "func (s *BaseScalaListener) EnterParamType(ctx *ParamTypeContext) {}", "title": "" }, { "docid": "31ade441964aa9a7054d14216096149a", "score": "0.4899872", "text": "func getCodeToConvertInParameter(_typeExpr *ast.Expr, packName string, name string, isPointer bool, outFile *jen.File) []jen.Code {\n\tleftPart := jen.Id(name).Op(\":=\")\n\tif arrayExpr, isArray := (*_typeExpr).(*ast.ArrayType); isArray {\n\t\ttypeExpr := arrayExpr.Elt\n\t\tarrayLen := \"\"\n\t\tif arrayExpr.Len != nil {\n\t\t\tif litExpr, isLit := (arrayExpr.Len).(*ast.BasicLit); isLit {\n\t\t\t\tarrayLen = litExpr.Value\n\t\t\t}\n\t\t}\n\t\tarrayPart := jen.Op(\"*\").Op(\"[\" + arrayLen + \"]\")\n\t\tarrayTypeCode := getTypeCastCode(arrayPart, &typeExpr, packName, name, outFile)\n\t\tif arrayTypeCode != nil {\n\t\t\tif !isPointer {\n\t\t\t\tleftPart = leftPart.Op(\"*\")\n\t\t\t}\n\t\t\tleftPart = leftPart.Parens(arrayTypeCode)\n\t\t\tvar argCode jen.Code\n\t\t\tif !isPointer && arrayExpr.Len == nil {\n\t\t\t\targCode = jen.Op(\"&\").Id(argName(name))\n\t\t\t} else {\n\t\t\t\targCode = jen.Id(argName(name))\n\t\t\t}\n\t\t\trightCode := jen.Qual(\"unsafe\", \"Pointer\").Parens(argCode)\n\t\t\tleftPart = leftPart.Parens(rightCode)\n\t\t\treturn jenCodeToArray(leftPart)\n\t\t}\n\t} else if starExpr, isPointerParam := (*_typeExpr).(*ast.StarExpr); isPointerParam {\n\t\t_type := &starExpr.X\n\t\treturn getCodeToConvertInParameter(_type, packName, name, true, outFile)\n\t} else if identExpr, isIdent := (*_typeExpr).(*ast.Ident); isIdent {\n\t\ttypeName := identExpr.Name\n\t\tif IsBasicGoType(typeName) {\n\t\t\treturn jenCodeToArray(leftPart.Id(argName(name)))\n\t\t} else if isInHandleTypesList(typeName) {\n\t\t\treturn getLookupHandleCode(name, typeName, isPointer)\n\t\t} else if isInplaceConvertType(typeName) {\n\t\t\tif !isPointer {\n\t\t\t\tleftPart = leftPart.Op(\"*\")\n\t\t\t}\n\t\t\treturn jenCodeToArray(leftPart.Id(\"inplace\" + typeName).Call(jen.Id(argName(name))))\n\t\t} else {\n\t\t\tif isInHandleTypesList(packName + packageSeparator + typeName) {\n\t\t\t\treturn getLookupHandleCode(name, packName+packageSeparator+typeName, isPointer)\n\t\t\t} else {\n\t\t\t\tif !isPointer {\n\t\t\t\t\tleftPart = leftPart.Op(\"*\")\n\t\t\t\t}\n\t\t\t\tleftPart = leftPart.Parens(jen.Op(\"*\").Id(packName).Id(\".\").Id(typeName)).\n\t\t\t\t\tParens(jen.Qual(\"unsafe\", \"Pointer\").Parens(jen.Id(argName(name))))\n\t\t\t\treturn jenCodeToArray(leftPart)\n\t\t\t}\n\t\t}\n\t} else if selectorExpr, isSelector := (*_typeExpr).(*ast.SelectorExpr); isSelector {\n\t\tif identExpr, isIdent := (selectorExpr.X).(*ast.Ident); isIdent {\n\t\t\tpackName = identExpr.Name\n\t\t\ttypeName := selectorExpr.Sel.Name\n\t\t\tif isInHandleTypesList(packName + packageSeparator + typeName) {\n\t\t\t\treturn getLookupHandleCode(name, packName+packageSeparator+typeName, isPointer)\n\t\t\t}\n\t\t}\n\t\tif !isPointer {\n\t\t\tleftPart = leftPart.Op(\"*\")\n\t\t}\n\t\ttypeCastCode := getTypeCastCode(jen.Op(\"*\"), _typeExpr, packName, name, outFile)\n\t\treturn jenCodeToArray(leftPart.Parens(typeCastCode).\n\t\t\tParens(jen.Qual(\"unsafe\", \"Pointer\").Parens(jen.Id(argName(name)))))\n\t} else if _, isEllipsis := (*_typeExpr).(*ast.Ellipsis); isEllipsis {\n\t\t//TODO: stdevEclipse Implement\n\t\treturn jenCodeToArray(leftPart.Id(argName(name)))\n\t} else if _, isIntf := (*_typeExpr).(*ast.InterfaceType); isIntf {\n\t\treturn jenCodeToArray(leftPart.Id(\"convertToInterface\").Call(jen.Id(argName(name))))\n\t} else if _, isFunc := (*_typeExpr).(*ast.FuncType); isFunc {\n\t\treturn jenCodeToArray(leftPart.Id(\"copyToFunc\").Call(jen.Id(argName(name))))\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f84ee3de16a513f3b6d2fcac33126479", "score": "0.4882584", "text": "func Into(\n\tr *http.Request,\n\tgetURLParam func(key string) string,\n\tfn interface{},\n) []reflect.Value {\n\t// Input validation\n\tfnType := reflect.TypeOf(fn)\n\tif fnType.Kind() != reflect.Func {\n\t\tpanic(\"expects a function\")\n\t}\n\n\tinputs := getInputs(fnType)\n\tif inputLen := len(inputs); inputLen != 1 && inputLen != 2 {\n\t\tpanic(\"there should be only one or two arguments\")\n\t}\n\n\tinput := inputs[0]\n\tif input.Kind() != reflect.Struct {\n\t\tpanic(\"argument must be a struct\")\n\t}\n\n\tfields := getFields(input)\n\tfieldTypes := getStructFieldsTypes(fields)\n\tfoundComplexTypes := filterComplexTypes(fieldTypes)\n\tif len(foundComplexTypes) > 0 {\n\t\tpanic(\"there cannot be any complex types in the first argument's struct\")\n\t}\n\n\t// Work //\n\tinputValue := reflect.New(input).Elem()\n\n\t// URLParams\n\tfor _, field := range fields {\n\t\turlParam := getURLParam(field.Name)\n\t\tconvertToKindAndSetValueIn(urlParam, field.Type.Kind(), inputValue.FieldByName(field.Name))\n\t}\n\n\t// Query string\n\tfor _, field := range fields {\n\t\tvar foundValue []string\n\t\tfor k, value := range r.URL.Query() {\n\t\t\tnormalizedKey := strings.TrimSuffix(\n\t\t\t\tstrings.ToLower(k),\n\t\t\t\t\"[]\",\n\t\t\t)\n\n\t\t\tif normalizedKey == strings.ToLower(field.Name) {\n\t\t\t\tfoundValue = value\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif len(foundValue) > 0 && field.Type.Kind() != reflect.Slice {\n\t\t\tqueryParam := foundValue[0]\n\t\t\tfieldTypeKind := field.Type.Kind()\n\t\t\tfieldValue := inputValue.FieldByName(field.Name)\n\t\t\tconvertToKindAndSetValueIn(queryParam, fieldTypeKind, fieldValue)\n\t\t} else if field.Type.Kind() == reflect.Slice {\n\t\t\tlenValue := len(foundValue)\n\t\t\tfieldTypeKind := field.Type.Elem().Kind()\n\t\t\ts := reflect.MakeSlice(field.Type, lenValue, lenValue)\n\t\t\tfor i := 0; i < lenValue; i++ {\n\t\t\t\tconvertToKindAndSetValueIn(\n\t\t\t\t\tfoundValue[i],\n\t\t\t\t\tfieldTypeKind,\n\t\t\t\t\ts.Index(i),\n\t\t\t\t)\n\t\t\t}\n\n\t\t\tinputValue.FieldByName(field.Name).Set(s)\n\t\t}\n\t}\n\n\t// Request body\n\tvar complexTypeValue interface{}\n\thasBody := len(inputs) == 2\n\tif hasBody {\n\t\tcomplexType := inputs[1]\n\t\tcomplexTypeValue = reflect.New(complexType).Interface()\n\t\terr := json.NewDecoder(r.Body).Decode(&complexTypeValue)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\t// Call fn\n\tfnValue := reflect.ValueOf(fn)\n\tif hasBody {\n\t\treturnValues := fnValue.Call([]reflect.Value{\n\t\t\tinputValue,\n\t\t\treflect.Indirect(reflect.ValueOf(complexTypeValue)),\n\t\t})\n\t\treturn returnValues\n\t}\n\treturnValues := fnValue.Call([]reflect.Value{\n\t\tinputValue,\n\t})\n\treturn returnValues\n}", "title": "" }, { "docid": "fa6b80a4b06148792ed0e05c6c0aae05", "score": "0.48616228", "text": "func generateFunctionParams(params []parser.Node) (cParams []string) {\n\t// Iterate through the given parameters to translate them to C\n\tfor _, param := range params {\n\t\t// Retrieve the C type of the current parameter\n\t\tparamType := cTypes[param.ReturnType]\n\n\t\t// Append the translated parameter to the return array\n\t\tcParams = append(\n\t\t\tcParams,\n\t\t\tfmt.Sprintf(cParam, paramType, param.Value),\n\t\t)\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "e95d96426e145a1f9cda28ba12e9da0b", "score": "0.48474956", "text": "func (g *generator) argumentTypeName(expr model.Expression, destType model.Type, isInput bool) string {\n\tvar tokenRange hcl.Range\n\tif expr != nil {\n\t\tnode := expr.SyntaxNode()\n\t\tif node != nil && !reflect.ValueOf(node).IsNil() {\n\t\t\ttokenRange = expr.SyntaxNode().Range()\n\t\t}\n\t}\n\tif schemaType, ok := hcl2.GetSchemaForType(destType.(model.Type)); ok {\n\t\tswitch schemaType := schemaType.(type) {\n\t\tcase *schema.ArrayType:\n\t\t\ttoken := schemaType.ElementType.(*schema.ObjectType).Token\n\t\t\t_, module, member, diags := hcl2.DecomposeToken(token, tokenRange)\n\t\t\timportPrefix := strings.Split(module, \"/\")[0]\n\t\t\tcontract.Assert(len(diags) == 0)\n\t\t\tfmtString := \"[]%s.%s\"\n\t\t\tif isInput {\n\t\t\t\tfmtString = \"%s.%sArray\"\n\t\t\t}\n\t\t\treturn fmt.Sprintf(fmtString, importPrefix, member)\n\t\tcase *schema.ObjectType:\n\t\t\ttoken := schemaType.Token\n\t\t\t_, module, member, diags := hcl2.DecomposeToken(token, tokenRange)\n\t\t\timportPrefix := strings.Split(module, \"/\")[0]\n\t\t\tcontract.Assert(len(diags) == 0)\n\t\t\tfmtString := \"[]%s.%s\"\n\t\t\tif isInput {\n\t\t\t\tfmtString = \"%s.%sArgs\"\n\t\t\t}\n\t\t\treturn fmt.Sprintf(fmtString, importPrefix, member)\n\t\tdefault:\n\t\t\tcontract.Failf(\"unexpected schema type %T\", schemaType)\n\t\t}\n\t}\n\n\t// TODO support rest of types\n\tswitch destType := destType.(type) {\n\tcase *model.OpaqueType:\n\t\tswitch destType {\n\t\tcase model.NumberType:\n\t\t\treturn \"float64\"\n\t\tdefault:\n\t\t\treturn destType.Name\n\t\t}\n\t// TODO could probably improve these types by inspecting kv types\n\tcase *model.ObjectType, *model.MapType:\n\t\treturn \"interface{}\"\n\tdefault:\n\t\tcontract.Failf(\"unexpected schema type %T\", destType)\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "d56b120a48c54c656be57cb42bb11389", "score": "0.4841027", "text": "func (p DB) normalizeType(sqlType, defval string, precision int64) (string, string) {\n\tif sqlType == \"timestamp without time zone\" {\n\t\tsqlType = \"timestamp\"\n\t}\n\tif sqlType == \"integer\" {\n\t\tsqlType = \"int\"\n\t}\n\t// Sequences are normalized to serial.\n\tif strings.Index(defval, \"nextval(\") != -1 && sqlType == \"int\" {\n\t\tsqlType = \"serial\"\n\t\tdefval = \"\"\n\t}\n\tif sqlType == \"numeric\" && precision > 0 {\n\t\tsqlType = \"numeric(\" + strconv.FormatInt(precision, 10) + \")\"\n\t}\n\treturn sqlType, defval\n}", "title": "" }, { "docid": "d3dc482ae849b6af800a4aa01e722979", "score": "0.48366284", "text": "func (ti *intType) GetTypeParams() map[string]string {\n\tsqlParam := \"\"\n\tswitch ti.sqlIntType.Type() {\n\tcase sqltypes.Int8:\n\t\tsqlParam = intTypeParams_Width_8\n\tcase sqltypes.Int16:\n\t\tsqlParam = intTypeParams_Width_16\n\tcase sqltypes.Int24:\n\t\tsqlParam = intTypeParams_Width_24\n\tcase sqltypes.Int32:\n\t\tsqlParam = intTypeParams_Width_32\n\tcase sqltypes.Int64:\n\t\tsqlParam = intTypeParams_Width_64\n\tdefault:\n\t\tpanic(fmt.Errorf(`unknown int type info sql type \"%v\"`, ti.sqlIntType.Type().String()))\n\t}\n\treturn map[string]string{intTypeParams_Width: sqlParam}\n}", "title": "" }, { "docid": "b5a7840d71afb6e843b6b92b2b0b2e70", "score": "0.482419", "text": "func CoerceType(param interface{}) ([]byte, error) {\n\tvar data []byte\n\tvar err error\n\n\tswitch p := param.(type) {\n\tcase string:\n\t\tinput := p\n\t\tdata = []byte(input)\n\n\tcase []byte:\n\t\tdata = p\n\n\tdefault:\n\t\tdata, err = json.Marshal(param)\n\t\tif err != nil {\n\t\t\treturn nil, errors.New(\n\t\t\t\t\"marshaling input data to JSON failed, \" +\n\t\t\t\t\t\"passed in data must be of type []byte, string, or support marshaling to JSON\",\n\t\t\t)\n\t\t}\n\t}\n\n\treturn data, nil\n}", "title": "" }, { "docid": "36892b17087dc8a85bf713165138fe38", "score": "0.48135486", "text": "func asIntFromType(t reflect.Type, param string) int64 {\n\tswitch t {\n\tcase timeDurationType:\n\t\treturn asIntFromTimeDuration(param)\n\tdefault:\n\t\treturn asInt(param)\n\t}\n}", "title": "" }, { "docid": "2c0d2d0201bedc6d9c6cc04853fd3b7b", "score": "0.4810567", "text": "func (s *BaseScalaListener) EnterVariantTypeParam(ctx *VariantTypeParamContext) {}", "title": "" }, { "docid": "a6faaee308d1aa67b53c4186c8215584", "score": "0.48034033", "text": "func multipointTypeConverter(ctx context.Context, src *multipointType, destTi TypeInfo) (tc TypeConverter, needsConversion bool, err error) {\n\tswitch dest := destTi.(type) {\n\tcase *bitType:\n\t\treturn func(ctx context.Context, vrw types.ValueReadWriter, v types.Value) (types.Value, error) {\n\t\t\treturn types.Uint(0), nil\n\t\t}, true, nil\n\tcase *blobStringType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *boolType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *datetimeType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *decimalType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *enumType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *floatType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *geomcollType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *geometryType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *inlineBlobType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *intType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *jsonType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *linestringType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *multilinestringType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *multipointType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *multipolygonType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *pointType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *polygonType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *setType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *timeType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *uintType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *uuidType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *varBinaryType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *varStringType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tcase *yearType:\n\t\treturn wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)\n\tdefault:\n\t\treturn nil, false, UnhandledTypeConversion.New(src.String(), destTi.String())\n\t}\n}", "title": "" }, { "docid": "66216fac466a5e5b0b1267722b3346b1", "score": "0.48027351", "text": "func getCodeToConvertOutParameter(_typeExpr *ast.Expr, packageName string, name string, isPointer bool) jen.Code {\n\n\tif _, isArray := (*_typeExpr).(*ast.ArrayType); isArray {\n\t\treturn jen.Id(\"copyToGoSlice\").Call(jen.Qual(\"reflect\", \"ValueOf\").Call(jen.Id(argName(name))),\n\t\t\tjen.Id(name))\n\t} else if starExpr, isPointerRecv := (*_typeExpr).(*ast.StarExpr); isPointerRecv {\n\t\t_type := &starExpr.X\n\t\treturn getCodeToConvertOutParameter(_type, packageName, name, true)\n\t} else if identExpr, isIdent := (*_typeExpr).(*ast.Ident); isIdent {\n\t\ttypeName := identExpr.Name\n\t\tif isLibArrayType(typeName, packageName) {\n\t\t\treturn jen.Id(\"copyTo\"+getSliceName(typeName)).Call(jen.Qual(\"reflect\", \"ValueOf\").Call(jen.Id(argName(name))),\n\t\t\t\tjen.Id(name))\n\t\t}\n\t\tif dealOutStringAsGostring && typeName == \"string\" {\n\t\t\treturn jen.Id(\"copyString\").Call(jen.Id(argName(name)), jen.Id(name))\n\t\t} else if IsBasicGoType(typeName) {\n\t\t\treturn jen.Op(\"*\").Id(name).Op(\"=\").Id(argName(name))\n\t\t} else if isInHandleTypesList(packageName + packageSeparator + typeName) {\n\t\t\tvar argCode jen.Code\n\t\t\tif isPointer {\n\t\t\t\targCode = jen.Id(argName(name))\n\t\t\t} else {\n\t\t\t\targCode = jen.Op(\"&\").Id(argName(name))\n\t\t\t}\n\t\t\treturn jen.Op(\"*\").Id(name).Op(\"=\").Id(\"register\" + handleTypes[packageName+packageSeparator+typeName] + \"Handle\").Call(argCode)\n\t\t} else if isLibArrayType(typeName, packageName) {\n\t\t\tvar argCode jen.Code\n\t\t\tif isPointer {\n\t\t\t\targCode = jen.Parens(jen.Op(\"*\").Id(argName(name))).Op(\"[:]\")\n\t\t\t} else {\n\t\t\t\targCode = jen.Id(argName(name)).Op(\"[:]\")\n\t\t\t}\n\n\t\t\treturn jen.Id(\"copyToBuffer\").Call(jen.Qual(\"reflect\", \"ValueOf\").Call(argCode),\n\t\t\t\tjen.Qual(\"unsafe\", \"Pointer\").Call(jen.Id(name)),\n\t\t\t\tjen.Id(\"uint\").Parens(jen.Id(\"Sizeof\"+typeName)))\n\n\t\t} else {\n\t\t\tvar argCode jen.Code\n\t\t\tif isPointer {\n\t\t\t\targCode = jen.Id(argName(name))\n\t\t\t} else {\n\t\t\t\targCode = jen.Op(\"&\").Id(argName(name))\n\t\t\t}\n\t\t\treturn jen.Op(\"*\").Id(name).Op(\"=\").Op(\"*\").Parens(jen.Op(\"*\").\n\t\t\t\tQual(\"C\", packageName+packageSeparator+typeName)).\n\t\t\t\tParens(jen.Qual(\"unsafe\", \"Pointer\").Parens(argCode))\n\t\t}\n\t} else if selectorExpr, isSelector := (*_typeExpr).(*ast.SelectorExpr); isSelector {\n\t\tidentExpr, isIdent := (selectorExpr.X).(*ast.Ident)\n\t\tif isIdent {\n\t\t\tselName := identExpr.Name\n\t\t\ttypeName := selectorExpr.Sel.Name\n\t\t\tvar argCode jen.Code\n\t\t\tif isPointer {\n\t\t\t\targCode = jen.Id(argName(name))\n\t\t\t} else {\n\t\t\t\targCode = jen.Op(\"&\").Id(argName(name))\n\t\t\t}\n\t\t\tif isInHandleTypesList(selName + packageSeparator + typeName) {\n\t\t\t\treturn jen.Op(\"*\").Id(name).Op(\"=\").\n\t\t\t\t\tId(\"register\" + handleTypes[selName+packageSeparator+typeName] + \"Handle\").\n\t\t\t\t\tCall(argCode)\n\t\t\t}\n\t\t\tif isInCustomTypesList(selName + packageSeparator + typeName) {\n\t\t\t\treturn jen.Op(\"*\").Id(name).Op(\"=\").Op(\"*\").Parens(jen.Op(\"*\").\n\t\t\t\t\tQual(\"C\", selName+packageSeparator+typeName)).\n\t\t\t\t\tParens(jen.Qual(\"unsafe\", \"Pointer\").Parens(argCode))\n\t\t\t} else {\n\t\t\t\treturn jen.Op(\"*\").Id(name).Op(\"=\").Op(\"*\").Parens(jen.Op(\"*\").\n\t\t\t\t\tQual(\"C\", selName+packageSeparator+typeName)).\n\t\t\t\t\tParens(jen.Qual(\"unsafe\", \"Pointer\").Parens(argCode))\n\t\t\t}\n\t\t}\n\t} else if mapExpr, isMap := (*_typeExpr).(*ast.MapType); isMap {\n\t\tidentKeyExpr, isKeyIdent := (mapExpr.Key).(*ast.Ident)\n\t\tidentValueExpr, isValueIdent := (mapExpr.Value).(*ast.Ident)\n\t\tif isKeyIdent && isValueIdent {\n\t\t\tif identKeyExpr.Name == \"string\" && identValueExpr.Name == \"string\" {\n\t\t\t\treturn jen.Id(\"copyToStringMap\").Call(jen.Id(argName(name)), jen.Id(name))\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "ba2b44b7d64d4aa629bc966c8797d1c5", "score": "0.48012263", "text": "func convert(v reflect.Value, t reflect.Type) reflect.Value {\n\tif v.Type() == t {\n\t\treturn v\n\t}\n\tswitch val := v.Interface().(type) {\n\tcase reflect.Type:\n\t\treturn v // type conversion\n\tcase UntypedInt:\n\t\tswitch t {\n\t\tcase reflect.TypeOf(UntypedFloat{}):\n\t\t\tres := UntypedFloat{new(big.Float)}\n\t\t\tres.Float.SetInt(val.Int)\n\t\t\treturn reflect.ValueOf(res)\n\t\tcase reflect.TypeOf(UntypedComplex{}):\n\t\t\tres := UntypedComplex{new(big.Float), new(big.Float)}\n\t\t\tres.Real.SetInt(val.Int)\n\t\t\treturn reflect.ValueOf(res)\n\t\t}\n\t\tret := reflect.New(t).Elem()\n\t\tswitch t.Kind() {\n\t\tcase reflect.Interface:\n\t\t\tret.Set(reflect.ValueOf(int(val.Int64())))\n\t\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:\n\t\t\tret.SetUint(val.Uint64())\n\t\tcase reflect.Float32, reflect.Float64:\n\t\t\tret.SetFloat(float64(val.Int64()))\n\t\tcase reflect.Complex64, reflect.Complex128:\n\t\t\tret.SetComplex(complex(float64(val.Int64()), 0))\n\t\tdefault:\n\t\t\tret.SetInt(val.Int64())\n\t\t}\n\t\treturn ret\n\tcase UntypedFloat:\n\t\tswitch t {\n\t\tcase reflect.TypeOf(UntypedComplex{}):\n\t\t\tres := UntypedComplex{new(big.Float), new(big.Float)}\n\t\t\tres.Real.Set(val.Float)\n\t\t\treturn reflect.ValueOf(res)\n\t\t}\n\t\tret := reflect.New(t).Elem()\n\t\tf, _ := val.Float64()\n\t\tswitch t.Kind() {\n\t\tcase reflect.Interface:\n\t\t\tret.Set(reflect.ValueOf(float64(f)))\n\t\tcase reflect.Complex64, reflect.Complex128:\n\t\t\tret.SetComplex(complex(float64(f), 0))\n\t\tdefault:\n\t\t\tret.SetFloat(f)\n\t\t}\n\t\treturn ret\n\tcase UntypedComplex:\n\t\tret := reflect.New(t).Elem()\n\t\tr, _ := val.Real.Float64()\n\t\ti, _ := val.Imag.Float64()\n\t\tif t.Kind() == reflect.Interface {\n\t\t\tret.Set(reflect.ValueOf(complex(r, i)))\n\t\t} else {\n\t\t\tret.SetComplex(complex(r, i))\n\t\t}\n\t\treturn ret\n\tcase UntypedString:\n\t\tret := reflect.New(t).Elem()\n\t\ts := val.String\n\t\tif t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 {\n\t\t\tret.Set(reflect.ValueOf([]byte(s)))\n\t\t} else if t.Kind() == reflect.Interface {\n\t\t\tret.Set(reflect.ValueOf(s))\n\t\t} else {\n\t\t\tret.SetString(s)\n\t\t}\n\t\treturn ret\n\tcase UntypedRune:\n\t\tret := reflect.New(t).Elem()\n\t\tr := val.Rune\n\t\tif t.Kind() == reflect.Interface {\n\t\t\tret.Set(reflect.ValueOf(r))\n\t\t} else {\n\t\t\tret.SetInt(int64(r))\n\t\t}\n\t\treturn ret\n\tcase UntypedBool:\n\t\tret := reflect.New(t).Elem()\n\t\tb := val.Bool\n\t\tif t.Kind() == reflect.Interface {\n\t\t\tret.Set(reflect.ValueOf(b))\n\t\t} else {\n\t\t\tret.SetBool(b)\n\t\t}\n\t\treturn ret\n\tdefault:\n\t\tret := reflect.New(t).Elem()\n\t\tret.Set(v)\n\t\treturn ret\n\t}\n}", "title": "" }, { "docid": "23f698fcbc116af74d7ac5335eb53bc8", "score": "0.4787816", "text": "func volLsToGeneric(templParams []volumeLsTemplateParams, JSONParams []volumeLsJSONParams) (genericParams []interface{}) {\n\tif len(templParams) > 0 {\n\t\tfor _, v := range templParams {\n\t\t\tgenericParams = append(genericParams, interface{}(v))\n\t\t}\n\t\treturn\n\t}\n\tfor _, v := range JSONParams {\n\t\tgenericParams = append(genericParams, interface{}(v))\n\t}\n\treturn\n}", "title": "" }, { "docid": "9a21adfcc0d0d36f151ee363c7ce354d", "score": "0.47873423", "text": "func (ms *MethodSpec) setRequestType(curThriftFile string, funcSpec *compile.FunctionSpec, packageHelper *PackageHelper) error {\n\tif len(funcSpec.ArgsSpec) == 0 {\n\t\tms.RequestType = \"\"\n\t\treturn nil\n\t}\n\tvar err error\n\tif ms.isRequestBoxed(funcSpec) {\n\t\tms.RequestBoxed = true\n\t\tms.BoxedRequestType, err = packageHelper.TypeFullName(funcSpec.ArgsSpec[0].Type)\n\t\tms.BoxedRequestName = PascalCase(funcSpec.ArgsSpec[0].Name)\n\t\tif err == nil && IsStructType(funcSpec.ArgsSpec[0].Type) {\n\t\t\tms.BoxedRequestType = \"*\" + ms.BoxedRequestType\n\t\t}\n\t}\n\n\tgoPackageName, err := packageHelper.TypePackageName(curThriftFile)\n\tif err == nil {\n\t\tms.ShortRequestType = goPackageName + \".\" +\n\t\t\tms.ThriftService + \"_\" + strings.Title(ms.Name) + \"_Args\"\n\t\tms.RequestType = \"*\" + ms.ShortRequestType\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to set request type\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "bb465d79bee1411d4c23e9df41d7a6db", "score": "0.47749636", "text": "func getFlatParams(fi *fieldInfo, args []interface{}, tz *time.Location) (params []interface{}) {\noutFor:\n\tfor _, arg := range args {\n\t\tif arg == nil {\n\t\t\tparams = append(params, arg)\n\t\t\tcontinue\n\t\t}\n\n\t\tval := reflect.ValueOf(arg)\n\t\tkind := val.Kind()\n\t\tif kind == reflect.Ptr {\n\t\t\tval = val.Elem()\n\t\t\tkind = val.Kind()\n\t\t\targ = val.Interface()\n\t\t}\n\n\t\tswitch kind {\n\t\tcase reflect.String:\n\t\t\tv := val.String()\n\t\t\tif fi != nil {\n\t\t\t\tif fi.fieldType == TypeTimeField || fi.fieldType == TypeDateField || fi.fieldType == TypeDateTimeField {\n\t\t\t\t\tvar t time.Time\n\t\t\t\t\tvar err error\n\t\t\t\t\tif len(v) >= 19 {\n\t\t\t\t\t\ts := v[:19]\n\t\t\t\t\t\tt, err = time.ParseInLocation(formatDateTime, s, DefaultTimeLoc)\n\t\t\t\t\t} else if len(v) >= 10 {\n\t\t\t\t\t\ts := v\n\t\t\t\t\t\tif len(v) > 10 {\n\t\t\t\t\t\t\ts = v[:10]\n\t\t\t\t\t\t}\n\t\t\t\t\t\tt, err = time.ParseInLocation(formatDate, s, tz)\n\t\t\t\t\t} else {\n\t\t\t\t\t\ts := v\n\t\t\t\t\t\tif len(s) > 8 {\n\t\t\t\t\t\t\ts = v[:8]\n\t\t\t\t\t\t}\n\t\t\t\t\t\tt, err = time.ParseInLocation(formatTime, s, tz)\n\t\t\t\t\t}\n\t\t\t\t\tif err == nil {\n\t\t\t\t\t\tif fi.fieldType == TypeDateField {\n\t\t\t\t\t\t\tv = t.In(tz).Format(formatDate)\n\t\t\t\t\t\t} else if fi.fieldType == TypeDateTimeField {\n\t\t\t\t\t\t\tv = t.In(tz).Format(formatDateTime)\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tv = t.In(tz).Format(formatTime)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\targ = v\n\t\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\t\targ = val.Int()\n\t\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\t\targ = val.Uint()\n\t\tcase reflect.Float32:\n\t\t\targ, _ = StrTo(ToStr(arg)).Float64()\n\t\tcase reflect.Float64:\n\t\t\targ = val.Float()\n\t\tcase reflect.Bool:\n\t\t\targ = val.Bool()\n\t\tcase reflect.Slice, reflect.Array:\n\t\t\tif _, ok := arg.([]byte); ok {\n\t\t\t\tcontinue outFor\n\t\t\t}\n\n\t\t\tvar args []interface{}\n\t\t\tfor i := 0; i < val.Len(); i++ {\n\t\t\t\tv := val.Index(i)\n\n\t\t\t\tvar vu interface{}\n\t\t\t\tif v.CanInterface() {\n\t\t\t\t\tvu = v.Interface()\n\t\t\t\t}\n\n\t\t\t\tif vu == nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\targs = append(args, vu)\n\t\t\t}\n\n\t\t\tif len(args) > 0 {\n\t\t\t\tp := getFlatParams(fi, args, tz)\n\t\t\t\tparams = append(params, p...)\n\t\t\t}\n\t\t\tcontinue outFor\n\t\tcase reflect.Struct:\n\t\t\tif v, ok := arg.(time.Time); ok {\n\t\t\t\tif fi != nil && fi.fieldType == TypeDateField {\n\t\t\t\t\targ = v.In(tz).Format(formatDate)\n\t\t\t\t} else if fi != nil && fi.fieldType == TypeDateTimeField {\n\t\t\t\t\targ = v.In(tz).Format(formatDateTime)\n\t\t\t\t} else if fi != nil && fi.fieldType == TypeTimeField {\n\t\t\t\t\targ = v.In(tz).Format(formatTime)\n\t\t\t\t} else {\n\t\t\t\t\targ = v.In(tz).Format(formatDateTime)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\ttyp := val.Type()\n\t\t\t\tname := getFullName(typ)\n\t\t\t\tvar value interface{}\n\t\t\t\tif mmi, ok := defaultModelCache.getByFullName(name); ok {\n\t\t\t\t\tif _, vu, exist := getExistPk(mmi, val); exist {\n\t\t\t\t\t\tvalue = vu\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\targ = value\n\n\t\t\t\tif arg == nil {\n\t\t\t\t\tpanic(fmt.Errorf(\"need a valid args value, unknown table or value `%s`\", name))\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tparams = append(params, arg)\n\t}\n\treturn\n}", "title": "" }, { "docid": "b4bb14f559df41ef25fc34cc887a581c", "score": "0.47722805", "text": "func (stm Stm) ParamTypeMaker(event string) ParamTypeMaker {\n\tf := stm.lookup(event, ParamTypeMakerSuffix)\n\tif f != nil {\n\t\treturn f.(ParamTypeMaker)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8d3be2ae9b7cce366b49e46b30a927ca", "score": "0.47680932", "text": "func convertParam(param interface{}) int {\n\tvar result int\n\tswitch param.(type) {\n\tdefault:\n\t\treturn int(0)\n\tcase float64:\n\t\tresult = int(param.(float64))\n\tcase string:\n\t\tt, err := strconv.Atoi(param.(string))\n\t\tif err != nil {\n\t\t\tresult = int(0)\n\t\t}\n\t\tresult = t\n\t}\n\tif result < 0 {\n\t\treturn int(0)\n\t}\n\treturn result\n}", "title": "" }, { "docid": "9255e547e38332ca4c280e0cfa18bde2", "score": "0.4762658", "text": "func convertParametersIntoRawExtension(t *testing.T, parameters map[string]interface{}) *runtime.RawExtension {\n\tmarshalledParams, err := json.Marshal(parameters)\n\tif err != nil {\n\t\tt.Fatalf(\"Failed to marshal parameters %v : %v\", parameters, err)\n\t}\n\treturn &runtime.RawExtension{Raw: marshalledParams}\n}", "title": "" }, { "docid": "5f80105b5090e3f45bfb083f189268fd", "score": "0.47590375", "text": "func newParamValueErr(v interface{}) error {\n\ttyp := fmt.Sprintf(\"%T\", v)\n\treturn fmt.Errorf(\"%w: cannot process function on %q\", ErrInvalidParamValue, typ)\n}", "title": "" }, { "docid": "ad6cf81a7642c927d8667644907dc417", "score": "0.4757889", "text": "func mapParams(params interface{}, fn func(param string, index int, field reflect.StructField)) {\n\trParams := reflect.TypeOf(params)\n\tfor i := 0; i < rParams.NumField(); i++ {\n\t\tfield := rParams.Field(i)\n\t\tparam := field.Tag.Get(\"json\")\n\t\tfn(param, i, field)\n\t}\n}", "title": "" }, { "docid": "1446ad0dd6c8c351662f556f7c7b869c", "score": "0.47475258", "text": "func conversionCode(from, to, typeName string, pointer bool) (string, bool) {\n\tvar (\n\t\tparse string\n\t\tcast string\n\t\tcheckErr bool\n\t)\n\ttarget := to\n\tneedCast := typeName != stringN && typeName != bytesN && flagType(typeName) != \"JSON\"\n\tdecl := \"\"\n\tif needCast && pointer {\n\t\ttarget = \"val\"\n\t\tdecl = \":\"\n\t}\n\tswitch typeName {\n\tcase boolN:\n\t\tparse = fmt.Sprintf(\"%s, err %s= strconv.ParseBool(%s)\", target, decl, from)\n\t\tcheckErr = true\n\tcase intN:\n\t\tparse = fmt.Sprintf(\"var v int64\\nv, err = strconv.ParseInt(%s, 10, 64)\", from)\n\t\tcast = fmt.Sprintf(\"%s %s= int(v)\", target, decl)\n\t\tcheckErr = true\n\tcase int32N:\n\t\tparse = fmt.Sprintf(\"var v int64\\nv, err = strconv.ParseInt(%s, 10, 32)\", from)\n\t\tcast = fmt.Sprintf(\"%s %s= int32(v)\", target, decl)\n\t\tcheckErr = true\n\tcase int64N:\n\t\tparse = fmt.Sprintf(\"%s, err %s= strconv.ParseInt(%s, 10, 64)\", target, decl, from)\n\tcase uintN:\n\t\tparse = fmt.Sprintf(\"var v uint64\\nv, err = strconv.ParseUint(%s, 10, 64)\", from)\n\t\tcast = fmt.Sprintf(\"%s %s= uint(v)\", target, decl)\n\t\tcheckErr = true\n\tcase uint32N:\n\t\tparse = fmt.Sprintf(\"var v uint64\\nv, err = strconv.ParseUint(%s, 10, 32)\", from)\n\t\tcast = fmt.Sprintf(\"%s %s= uint32(v)\", target, decl)\n\t\tcheckErr = true\n\tcase uint64N:\n\t\tparse = fmt.Sprintf(\"%s, err %s= strconv.ParseUint(%s, 10, 64)\", target, decl, from)\n\t\tcheckErr = true\n\tcase float32N:\n\t\tparse = fmt.Sprintf(\"var v float64\\nv, err = strconv.ParseFloat(%s, 32)\", from)\n\t\tcast = fmt.Sprintf(\"%s %s= float32(v)\", target, decl)\n\t\tcheckErr = true\n\tcase float64N:\n\t\tparse = fmt.Sprintf(\"%s, err %s= strconv.ParseFloat(%s, 64)\", target, decl, from)\n\t\tcheckErr = true\n\tcase stringN:\n\t\tparse = fmt.Sprintf(\"%s %s= %s\", target, decl, from)\n\tcase bytesN:\n\t\tparse = fmt.Sprintf(\"%s %s= string(%s)\", target, decl, from)\n\tdefault:\n\t\tparse = fmt.Sprintf(\"err = json.Unmarshal([]byte(%s), &%s)\", from, target)\n\t\tcheckErr = true\n\t}\n\tif !needCast {\n\t\treturn parse, checkErr\n\t}\n\tif cast != \"\" {\n\t\tparse = parse + \"\\n\" + cast\n\t}\n\tif to != target {\n\t\tref := \"\"\n\t\tif pointer {\n\t\t\tref = \"&\"\n\t\t}\n\t\tparse = parse + fmt.Sprintf(\"\\n%s = %s%s\", to, ref, target)\n\t}\n\treturn parse, checkErr\n}", "title": "" }, { "docid": "ca20f1c4696142185d706303c11816cd", "score": "0.4738153", "text": "func (o *GetEventsParams) bindType(rawData []string, hasKey bool, formats strfmt.Registry) error {\n\tvar raw string\n\tif len(rawData) > 0 {\n\t\traw = rawData[len(rawData)-1]\n\t}\n\n\t// Required: false\n\t// AllowEmptyValue: false\n\n\tif raw == \"\" { // empty values pass all other validations\n\t\treturn nil\n\t}\n\to.Type = &raw\n\n\treturn nil\n}", "title": "" }, { "docid": "eef7ab2e28d6f682a26e5bb40e51ad3e", "score": "0.47370273", "text": "func insert(params []interface{}, args ...interface{}) []interface{} {\n\tfor _, x := range args {\n\t\tswitch x := x.(type) {\n\t\tcase []int:\n\t\t\tfor _, x := range x {\n\t\t\t\tparams = append(params, x)\n\t\t\t}\n\t\tcase []int64:\n\t\t\tfor _, x := range x {\n\t\t\t\tparams = append(params, x)\n\t\t\t}\n\t\tcase []string:\n\t\t\tfor _, x := range x {\n\t\t\t\tparams = append(params, x)\n\t\t\t}\n\t\tcase []interface{}:\n\t\t\tparams = insert(params, x...)\n\t\tdefault:\n\t\t\tparams = append(params, x)\n\t\t}\n\t}\n\treturn params\n}", "title": "" }, { "docid": "6f12da6854fd8f6b353d5c080e3d6c6d", "score": "0.47185376", "text": "func httpParamsToArgs(rpcFunc *RPCFunc, r *http.Request) ([]reflect.Value, error) {\n\targTypes := rpcFunc.args\n\targNames := rpcFunc.argNames\n\n\tvar err error\n\tvalues := make([]reflect.Value, len(argNames))\n\tfor i, name := range argNames {\n\t\tty := argTypes[i]\n\t\targ := GetParam(r, name)\n\t\tvalues[i], err = _jsonStringToArg(ty, arg)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn values, nil\n}", "title": "" }, { "docid": "080dfbb0529c945b73b3826510adbc0a", "score": "0.47143608", "text": "func TransStringType(s string) uint64 {\n\ts = strings.ToLower(s)\n\tswitch {\n\tcase strings.Contains(s, \"string\"), strings.Contains(s, \"char\"), strings.Contains(s, \"text\"), strings.Contains(s, \"json\"):\n\t\treturn types.TypeStringArg\n\tcase strings.Contains(s, \"int\"), strings.Contains(s, \"long\"), strings.Contains(s, \"short\"), strings.Contains(s, \"tiny\"):\n\t\treturn types.TypeIntArg\n\tcase strings.Contains(s, \"float\"), strings.Contains(s, \"decimal\"), strings.Contains(s, \"double\"):\n\t\treturn types.TypeFloatArg\n\tcase strings.Contains(s, \"time\"), strings.Contains(s, \"date\"):\n\t\treturn types.TypeDatetimeArg\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"no implement for type: %s\", s))\n\t}\n}", "title": "" }, { "docid": "3eab1e2ccfa15e3b9e373b20259414c0", "score": "0.4708465", "text": "func newParamTypeErr(p, e interface{}) error {\n\ttyp := fmt.Sprintf(\"%T\", e)\n\treturn fmt.Errorf(\"%w: %#v not a(n) %q\", ErrInvalidParamType, p, typ)\n}", "title": "" }, { "docid": "c4b001e4911afac07677efe2f14f2814", "score": "0.4708459", "text": "func (s *BaseScalaListener) ExitFunctionArgTypes(ctx *FunctionArgTypesContext) {}", "title": "" }, { "docid": "162693c0d0904f42ab117e5847d869dd", "score": "0.46993232", "text": "func main() {\n\tfmt.Println(\"horld world !\")\n\tfmt.Println(\"hello go!\")\n\n\tvar i int;\n\tvar f float32;\n\ti = 8\n\tf = 7\n\tfmt.Println(\"i == \", i)\n\tfmt.Println(\"f == \", f)\n\tfmt.Printf(\"i's type is %T\\n\", i)\n\tfmt.Printf(\"f's type is %T\\n\", f)\n\n\tvar a, b = 1, \"foo\"\n\tfmt.Println(\"a == \", a, \"b == \", b)\n\tfmt.Printf(\"a's type is %T\\n\", a)\n\tfmt.Printf(\"b's type is %T\\n\", b)\n\n\tvar ptr *int\n\tptr = &i\n\tfmt.Println(\"ptr == \", ptr)\n\tfmt.Println(\"*ptr == \", *ptr)\n\n\tvar x interface{}\n\n\tswitch i := x.(type) {\n\tcase nil:\n\t\tfmt.Println(\"type of x :%T\", i)\n\tcase int:\n\t\tfmt.Printf(\"x is int\")\n\tcase float64:\n\t\tfmt.Printf(\"x is float64\")\n\tcase func(int) float64:\n\t\tfmt.Printf(\"x is func(int)\")\n\tcase bool, string:\n\t\tfmt.Printf(\"x is bool or string\")\n\tdefault:\n\t\tfmt.Printf(\"don't know the type\")\n\t}\n\n\tfmt.Println(stringutils.Reverse(\"!oG ,olleH\"));\n\n\n\n}", "title": "" }, { "docid": "2e5fc54520c688ac77031a086ccc1bd8", "score": "0.4696133", "text": "func (g *translator) convertType(conf IdentConfig, t cc.Type, where token.Position) types.Type {\n\t// custom type overrides coming from the config\n\t// note that we don't save them since they might depend\n\t// not only on the input type, but also on a field name\n\tswitch conf.Type {\n\tcase HintBool:\n\t\treturn g.env.Go().Bool()\n\tcase HintIface:\n\t\treturn g.env.Go().Iface()\n\tcase HintString:\n\t\treturn g.env.Go().String()\n\tcase HintSlice:\n\t\tct := g.newTypeCC(IdentConfig{}, t, where)\n\t\tvar elem types.Type\n\t\tswitch ct := ct.(type) {\n\t\tcase types.PtrType:\n\t\t\telem = ct.Elem()\n\t\tcase types.ArrayType:\n\t\t\telem = ct.Elem()\n\t\tdefault:\n\t\t\tpanic(\"expected an array or a pointer\")\n\t\t}\n\t\treturn types.SliceT(elem)\n\t}\n\t// allow invalid types, they might still be useful\n\t// since one may define them in a separate Go file\n\t// and make the code valid\n\tif t.Kind() == cc.Invalid {\n\t\treturn types.UnkT(g.env.PtrSize())\n\t}\n\tif ct, ok := g.ctypes[t]; ok {\n\t\treturn ct\n\t}\n\tct := g.newTypeCC(conf, t, where)\n\tg.ctypes[t] = ct\n\treturn ct\n}", "title": "" }, { "docid": "efeedd1a2e02e9dbef3c9046bf8d4eb2", "score": "0.4687939", "text": "func SwitchOnType(x interface{}) {\n\tswitch x.(type) {\n\tcase int:\n\t\tfmt.Println(\"int\")\n\tcase float32:\n\t\tfmt.Println(\"float32\")\n\tcase SomeType:\n\t\tfmt.Println(\"SomeType\")\n\tdefault:\n\t\tfmt.Println(\"Unknown\")\n\t}\n}", "title": "" }, { "docid": "d1bfecb606210f9854fad1697d8d59fb", "score": "0.46833733", "text": "func BindParameters(pointerToStruct interface{}, optionalNamespace ...string) {\n\tvar prefix string\n\tif len(optionalNamespace) == 0 {\n\t\tprefix = lowerCamelCase(callerShortPackageName())\n\t} else {\n\t\tprefix = optionalNamespace[0]\n\t}\n\n\tval := reflect.ValueOf(pointerToStruct).Elem()\n\tfor i := 0; i < val.NumField(); i++ {\n\t\tvalueField := val.Field(i)\n\t\ttypeField := val.Type().Field(i)\n\n\t\tname := prefix + \".\"\n\t\tif tagName, exists := typeField.Tag.Lookup(\"name\"); exists {\n\t\t\tname += tagName\n\t\t} else {\n\t\t\tname += lowerCamelCase(typeField.Name)\n\t\t}\n\n\t\tswitch defaultValue := valueField.Interface().(type) {\n\t\tcase bool:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.BoolVarP(valueField.Addr().Interface().(*bool), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.BoolVar(valueField.Addr().Interface().(*bool), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"bool\",\n\t\t\t}\n\t\tcase time.Duration:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif parsedDuration, err := time.ParseDuration(tagDefaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t} else {\n\t\t\t\t\tdefaultValue = parsedDuration\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.DurationVarP(valueField.Addr().Interface().(*time.Duration), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.DurationVar(valueField.Addr().Interface().(*time.Duration), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"time.Duration\",\n\t\t\t}\n\t\tcase float32:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Float32VarP(valueField.Addr().Interface().(*float32), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Float32Var(valueField.Addr().Interface().(*float32), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"float32\",\n\t\t\t}\n\t\tcase float64:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Float64VarP(valueField.Addr().Interface().(*float64), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Float64Var(valueField.Addr().Interface().(*float64), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"float64\",\n\t\t\t}\n\t\tcase int:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.IntVarP(valueField.Addr().Interface().(*int), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.IntVar(valueField.Addr().Interface().(*int), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"int\",\n\t\t\t}\n\t\tcase int8:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Int8VarP(valueField.Addr().Interface().(*int8), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Int8Var(valueField.Addr().Interface().(*int8), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"int8\",\n\t\t\t}\n\t\tcase int16:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Int16VarP(valueField.Addr().Interface().(*int16), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Int16Var(valueField.Addr().Interface().(*int16), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"int16\",\n\t\t\t}\n\t\tcase int32:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Int32VarP(valueField.Addr().Interface().(*int32), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Int32Var(valueField.Addr().Interface().(*int32), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"int32\",\n\t\t\t}\n\t\tcase int64:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Int64VarP(valueField.Addr().Interface().(*int64), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Int64Var(valueField.Addr().Interface().(*int64), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"int64\",\n\t\t\t}\n\t\tcase string:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.StringVarP(valueField.Addr().Interface().(*string), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.StringVar(valueField.Addr().Interface().(*string), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"string\",\n\t\t\t}\n\t\tcase uint:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.UintVarP(valueField.Addr().Interface().(*uint), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.UintVar(valueField.Addr().Interface().(*uint), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"uint\",\n\t\t\t}\n\t\tcase uint8:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Uint8VarP(valueField.Addr().Interface().(*uint8), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Uint8Var(valueField.Addr().Interface().(*uint8), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"uint8\",\n\t\t\t}\n\t\tcase uint16:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Uint16VarP(valueField.Addr().Interface().(*uint16), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Uint16Var(valueField.Addr().Interface().(*uint16), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"uint16\",\n\t\t\t}\n\t\tcase uint32:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Uint32VarP(valueField.Addr().Interface().(*uint32), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Uint32Var(valueField.Addr().Interface().(*uint32), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"uint32\",\n\t\t\t}\n\t\tcase uint64:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tif _, err := fmt.Sscan(tagDefaultValue, &defaultValue); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.Uint64VarP(valueField.Addr().Interface().(*uint64), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.Uint64Var(valueField.Addr().Interface().(*uint64), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"uint64\",\n\t\t\t}\n\t\tcase []string:\n\t\t\tif tagDefaultValue, exists := typeField.Tag.Lookup(\"default\"); exists {\n\t\t\t\tparsedValue, err := csv.NewReader(strings.NewReader(tagDefaultValue)).Read()\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t\tdefaultValue = parsedValue\n\t\t\t}\n\n\t\t\tif shortHand, exists := typeField.Tag.Lookup(\"shorthand\"); exists {\n\t\t\t\tpflag.StringSliceVarP(valueField.Addr().Interface().(*[]string), name, shortHand, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t} else {\n\t\t\t\tpflag.StringSliceVar(valueField.Addr().Interface().(*[]string), name, defaultValue, typeField.Tag.Get(\"usage\"))\n\t\t\t}\n\n\t\t\tboundParameters[name] = &BoundParameter{\n\t\t\t\tboundPointer: valueField.Addr().Interface(),\n\t\t\t\tboundType: \"[]string\",\n\t\t\t}\n\t\tdefault:\n\t\t\tBindParameters(valueField.Addr().Interface(), name)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "3f3de9a50b7ccc041928fced8235eb87", "score": "0.46782604", "text": "func Fun(name string, i ... interface{}) AqlFunction{\n var f AqlFunction\n f.Name = name\n f.Params = i\n return f\n}", "title": "" }, { "docid": "aae461d028de01bafa6305e9980aa6e1", "score": "0.4676255", "text": "func (p *Parameter) DataType(typeName string) *Parameter {\r\n\tp.data.DataType = typeName\r\n\treturn p\r\n}", "title": "" }, { "docid": "008bf1f02be0c324ae5b36b26d7db7fe", "score": "0.46754602", "text": "func conv(t_dst, t_src types.Type, x value) value {\n\tut_src := t_src.Underlying()\n\tut_dst := t_dst.Underlying()\n\n\t// Destination type is not an \"untyped\" type.\n\tif b, ok := ut_dst.(*types.Basic); ok && b.Info()&types.IsUntyped != 0 {\n\t\tpanic(\"oops: conversion to 'untyped' type: \" + b.String())\n\t}\n\n\t// Nor is it an interface type.\n\tif _, ok := ut_dst.(*types.Interface); ok {\n\t\tif _, ok := ut_src.(*types.Interface); ok {\n\t\t\tpanic(\"oops: Convert should be ChangeInterface\")\n\t\t} else {\n\t\t\tpanic(\"oops: Convert should be MakeInterface\")\n\t\t}\n\t}\n\n\t// Remaining conversions:\n\t// + untyped string/number/bool constant to a specific\n\t// representation.\n\t// + conversions between non-complex numeric types.\n\t// + conversions between complex numeric types.\n\t// + integer/[]byte/[]rune -> string.\n\t// + string -> []byte/[]rune.\n\t//\n\t// All are treated the same: first we extract the value to the\n\t// widest representation (int64, uint64, float64, complex128,\n\t// or string), then we convert it to the desired type.\n\n\tswitch ut_src := ut_src.(type) {\n\tcase *types.Pointer:\n\t\tswitch ut_dst := ut_dst.(type) {\n\t\tcase *types.Basic:\n\t\t\t// *value to unsafe.Pointer?\n\t\t\tif ut_dst.Kind() == types.UnsafePointer {\n\t\t\t\treturn unsafe.Pointer(x.(*value))\n\t\t\t}\n\t\t}\n\n\tcase *types.Slice:\n\t\t// []byte or []rune -> string\n\t\tswitch ut_src.Elem().Underlying().(*types.Basic).Kind() {\n\t\tcase types.Byte:\n\t\t\tx := x.([]value)\n\t\t\tb := make([]byte, 0, len(x))\n\t\t\tfor i := range x {\n\t\t\t\tb = append(b, x[i].(byte))\n\t\t\t}\n\t\t\treturn string(b)\n\n\t\tcase types.Rune:\n\t\t\tx := x.([]value)\n\t\t\tr := make([]rune, 0, len(x))\n\t\t\tfor i := range x {\n\t\t\t\tr = append(r, x[i].(rune))\n\t\t\t}\n\t\t\treturn string(r)\n\t\t}\n\n\tcase *types.Basic:\n\t\tx = widen(x)\n\n\t\t// integer -> string?\n\t\tif ut_src.Info()&types.IsInteger != 0 {\n\t\t\tif ut_dst, ok := ut_dst.(*types.Basic); ok && ut_dst.Kind() == types.String {\n\t\t\t\treturn fmt.Sprintf(\"%c\", x)\n\t\t\t}\n\t\t}\n\n\t\t// string -> []rune, []byte or string?\n\t\tif s, ok := x.(string); ok {\n\t\t\tswitch ut_dst := ut_dst.(type) {\n\t\t\tcase *types.Slice:\n\t\t\t\tvar res []value\n\t\t\t\tswitch ut_dst.Elem().Underlying().(*types.Basic).Kind() {\n\t\t\t\tcase types.Rune:\n\t\t\t\t\tfor _, r := range []rune(s) {\n\t\t\t\t\t\tres = append(res, r)\n\t\t\t\t\t}\n\t\t\t\t\treturn res\n\t\t\t\tcase types.Byte:\n\t\t\t\t\tfor _, b := range []byte(s) {\n\t\t\t\t\t\tres = append(res, b)\n\t\t\t\t\t}\n\t\t\t\t\treturn res\n\t\t\t\t}\n\t\t\tcase *types.Basic:\n\t\t\t\tif ut_dst.Kind() == types.String {\n\t\t\t\t\treturn x.(string)\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak // fail: no other conversions for string\n\t\t}\n\n\t\t// unsafe.Pointer -> *value\n\t\tif ut_src.Kind() == types.UnsafePointer {\n\t\t\t// TODO(adonovan): this is wrong and cannot\n\t\t\t// really be fixed with the current design.\n\t\t\t//\n\t\t\t// return (*value)(x.(unsafe.Pointer))\n\t\t\t// creates a new pointer of a different\n\t\t\t// type but the underlying interface value\n\t\t\t// knows its \"true\" type and so cannot be\n\t\t\t// meaningfully used through the new pointer.\n\t\t\t//\n\t\t\t// To make this work, the interpreter needs to\n\t\t\t// simulate the memory layout of a real\n\t\t\t// compiled implementation.\n\t\t\t//\n\t\t\t// To at least preserve type-safety, we'll\n\t\t\t// just return the zero value of the\n\t\t\t// destination type.\n\t\t\treturn zero(t_dst)\n\t\t}\n\n\t\t// Conversions between complex numeric types?\n\t\tif ut_src.Info()&types.IsComplex != 0 {\n\t\t\tswitch ut_dst.(*types.Basic).Kind() {\n\t\t\tcase types.Complex64:\n\t\t\t\treturn complex64(x.(complex128))\n\t\t\tcase types.Complex128:\n\t\t\t\treturn x.(complex128)\n\t\t\t}\n\t\t\tbreak // fail: no other conversions for complex\n\t\t}\n\n\t\t// Conversions between non-complex numeric types?\n\t\tif ut_src.Info()&types.IsNumeric != 0 {\n\t\t\tkind := ut_dst.(*types.Basic).Kind()\n\t\t\tswitch x := x.(type) {\n\t\t\tcase int64: // signed integer -> numeric?\n\t\t\t\tswitch kind {\n\t\t\t\tcase types.Int:\n\t\t\t\t\treturn int(x)\n\t\t\t\tcase types.Int8:\n\t\t\t\t\treturn int8(x)\n\t\t\t\tcase types.Int16:\n\t\t\t\t\treturn int16(x)\n\t\t\t\tcase types.Int32:\n\t\t\t\t\treturn int32(x)\n\t\t\t\tcase types.Int64:\n\t\t\t\t\treturn int64(x)\n\t\t\t\tcase types.Uint:\n\t\t\t\t\treturn uint(x)\n\t\t\t\tcase types.Uint8:\n\t\t\t\t\treturn uint8(x)\n\t\t\t\tcase types.Uint16:\n\t\t\t\t\treturn uint16(x)\n\t\t\t\tcase types.Uint32:\n\t\t\t\t\treturn uint32(x)\n\t\t\t\tcase types.Uint64:\n\t\t\t\t\treturn uint64(x)\n\t\t\t\tcase types.Uintptr:\n\t\t\t\t\treturn uintptr(x)\n\t\t\t\tcase types.Float32:\n\t\t\t\t\treturn float32(x)\n\t\t\t\tcase types.Float64:\n\t\t\t\t\treturn float64(x)\n\t\t\t\t}\n\n\t\t\tcase uint64: // unsigned integer -> numeric?\n\t\t\t\tswitch kind {\n\t\t\t\tcase types.Int:\n\t\t\t\t\treturn int(x)\n\t\t\t\tcase types.Int8:\n\t\t\t\t\treturn int8(x)\n\t\t\t\tcase types.Int16:\n\t\t\t\t\treturn int16(x)\n\t\t\t\tcase types.Int32:\n\t\t\t\t\treturn int32(x)\n\t\t\t\tcase types.Int64:\n\t\t\t\t\treturn int64(x)\n\t\t\t\tcase types.Uint:\n\t\t\t\t\treturn uint(x)\n\t\t\t\tcase types.Uint8:\n\t\t\t\t\treturn uint8(x)\n\t\t\t\tcase types.Uint16:\n\t\t\t\t\treturn uint16(x)\n\t\t\t\tcase types.Uint32:\n\t\t\t\t\treturn uint32(x)\n\t\t\t\tcase types.Uint64:\n\t\t\t\t\treturn uint64(x)\n\t\t\t\tcase types.Uintptr:\n\t\t\t\t\treturn uintptr(x)\n\t\t\t\tcase types.Float32:\n\t\t\t\t\treturn float32(x)\n\t\t\t\tcase types.Float64:\n\t\t\t\t\treturn float64(x)\n\t\t\t\t}\n\n\t\t\tcase float64: // floating point -> numeric?\n\t\t\t\tswitch kind {\n\t\t\t\tcase types.Int:\n\t\t\t\t\treturn int(x)\n\t\t\t\tcase types.Int8:\n\t\t\t\t\treturn int8(x)\n\t\t\t\tcase types.Int16:\n\t\t\t\t\treturn int16(x)\n\t\t\t\tcase types.Int32:\n\t\t\t\t\treturn int32(x)\n\t\t\t\tcase types.Int64:\n\t\t\t\t\treturn int64(x)\n\t\t\t\tcase types.Uint:\n\t\t\t\t\treturn uint(x)\n\t\t\t\tcase types.Uint8:\n\t\t\t\t\treturn uint8(x)\n\t\t\t\tcase types.Uint16:\n\t\t\t\t\treturn uint16(x)\n\t\t\t\tcase types.Uint32:\n\t\t\t\t\treturn uint32(x)\n\t\t\t\tcase types.Uint64:\n\t\t\t\t\treturn uint64(x)\n\t\t\t\tcase types.Uintptr:\n\t\t\t\t\treturn uintptr(x)\n\t\t\t\tcase types.Float32:\n\t\t\t\t\treturn float32(x)\n\t\t\t\tcase types.Float64:\n\t\t\t\t\treturn float64(x)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tpanic(fmt.Sprintf(\"unsupported conversion: %s -> %s, dynamic type %T\", t_src, t_dst, x))\n}", "title": "" }, { "docid": "f50b912e05461216687806acced499b8", "score": "0.4671844", "text": "func valuesToTypes(values []interface{}) []reflect.Type {\n\tvar types []reflect.Type\n\n\tfor _, v := range values {\n\t\tfor _, arg := range v.(objectspec.NetworkPayload).GetArgs() {\n\t\t\ttypes = append(types, arg.Type())\n\t\t}\n\t}\n\n\treturn types\n}", "title": "" }, { "docid": "2900701fca34e5a051a7e65d411c0bf7", "score": "0.46678162", "text": "func ExpandParameterToEmitable(param Parameter) (interface{}, error) {\n\tvar err error\n\tswitch t := param.Type; t {\n\tcase PublicKeyType:\n\t\treturn param.Value.(*keys.PublicKey).Bytes(), nil\n\tcase ArrayType:\n\t\tarr := param.Value.([]Parameter)\n\t\tres := make([]interface{}, len(arr))\n\t\tfor i := range arr {\n\t\t\tres[i], err = ExpandParameterToEmitable(arr[i])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn res, nil\n\tcase MapType, InteropInterfaceType, UnknownType, AnyType, VoidType:\n\t\treturn nil, fmt.Errorf(\"unsupported parameter type: %s\", t.String())\n\tdefault:\n\t\treturn param.Value, nil\n\t}\n}", "title": "" }, { "docid": "69c5c52c0ec12b5cfd9dce735c156134", "score": "0.4665071", "text": "func typeCheckParameter(obj interface{}, expected string, name string) error {\n\t// Make sure there is an object.\n\tif obj == nil {\n\t\treturn nil\n\t}\n\n\t// Check the type is as expected.\n\tif reflect.TypeOf(obj).String() != expected {\n\t\treturn fmt.Errorf(\"Expected %s to be of type %s but received %s.\", name, expected, reflect.TypeOf(obj).String())\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "69c5c52c0ec12b5cfd9dce735c156134", "score": "0.4665071", "text": "func typeCheckParameter(obj interface{}, expected string, name string) error {\n\t// Make sure there is an object.\n\tif obj == nil {\n\t\treturn nil\n\t}\n\n\t// Check the type is as expected.\n\tif reflect.TypeOf(obj).String() != expected {\n\t\treturn fmt.Errorf(\"Expected %s to be of type %s but received %s.\", name, expected, reflect.TypeOf(obj).String())\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "9a3dbf6e57f9df9b61fa56ce9cd252f3", "score": "0.4660958", "text": "func fixExpressionTypes(exp syntax.Exp, tname syntax.TypeId, lookup *syntax.TypeLookup) {\n\tswitch exp := exp.(type) {\n\tcase *syntax.ArrayExp:\n\t\tif tname.ArrayDim > 0 {\n\t\t\ttname.ArrayDim--\n\t\t}\n\t\tfor _, e := range exp.Value {\n\t\t\tfixExpressionTypes(e, tname, lookup)\n\t\t}\n\tcase *syntax.MapExp:\n\t\tif tname.MapDim > 0 {\n\t\t\ttname.ArrayDim = tname.MapDim - 1\n\t\t\ttname.MapDim = 0\n\t\t\tfor _, e := range exp.Value {\n\t\t\t\tfixExpressionTypes(e, tname, lookup)\n\t\t\t}\n\t\t} else if lookup == nil {\n\t\t\tif possibleStructType(tname, lookup) {\n\t\t\t\texp.Kind = syntax.KindStruct\n\t\t\t}\n\t\t} else {\n\t\t\tt := lookup.Get(tname)\n\t\t\tif t != nil {\n\t\t\t\tif t, ok := t.(*syntax.StructType); ok {\n\t\t\t\t\texp.Kind = syntax.KindStruct\n\t\t\t\t\tfor _, member := range t.Members {\n\t\t\t\t\t\tfixExpressionTypes(exp.Value[member.Id], member.Tname, lookup)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else if possibleStructType(tname, lookup) {\n\t\t\t\texp.Kind = syntax.KindStruct\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "b5f05fc01cce6bfcd95d33f311c7b8de", "score": "0.46594042", "text": "func (c *compilerContext) getRawFuncType(typ *types.Signature) llvm.Type {\n\t// Get the return type.\n\tvar returnType llvm.Type\n\tswitch typ.Results().Len() {\n\tcase 0:\n\t\t// No return values.\n\t\treturnType = c.ctx.VoidType()\n\tcase 1:\n\t\t// Just one return value.\n\t\treturnType = c.getLLVMType(typ.Results().At(0).Type())\n\tdefault:\n\t\t// Multiple return values. Put them together in a struct.\n\t\t// This appears to be the common way to handle multiple return values in\n\t\t// LLVM.\n\t\tmembers := make([]llvm.Type, typ.Results().Len())\n\t\tfor i := 0; i < typ.Results().Len(); i++ {\n\t\t\tmembers[i] = c.getLLVMType(typ.Results().At(i).Type())\n\t\t}\n\t\treturnType = c.ctx.StructType(members, false)\n\t}\n\n\t// Get the parameter types.\n\tvar paramTypes []llvm.Type\n\tif typ.Recv() != nil {\n\t\trecv := c.getLLVMType(typ.Recv().Type())\n\t\tif recv.StructName() == \"runtime._interface\" {\n\t\t\t// This is a call on an interface, not a concrete type.\n\t\t\t// The receiver is not an interface, but a i8* type.\n\t\t\trecv = c.i8ptrType\n\t\t}\n\t\tfor _, info := range c.expandFormalParamType(recv, \"\", nil) {\n\t\t\tparamTypes = append(paramTypes, info.llvmType)\n\t\t}\n\t}\n\tfor i := 0; i < typ.Params().Len(); i++ {\n\t\tsubType := c.getLLVMType(typ.Params().At(i).Type())\n\t\tfor _, info := range c.expandFormalParamType(subType, \"\", nil) {\n\t\t\tparamTypes = append(paramTypes, info.llvmType)\n\t\t}\n\t}\n\t// All functions take these parameters at the end.\n\tparamTypes = append(paramTypes, c.i8ptrType) // context\n\n\t// Make a func type out of the signature.\n\treturn llvm.FunctionType(returnType, paramTypes, false)\n}", "title": "" }, { "docid": "12390f4eaefc05b90e478553563b1a0a", "score": "0.46581286", "text": "func (b *Binder) Parameters() []reflect.Type {\n\tif b == nil {\n\t\treturn nil\n\t}\n\treturn []reflect.Type{b.inType}\n}", "title": "" }, { "docid": "664f91e4ff1d9e3b0b1b7363de07f0aa", "score": "0.46567082", "text": "func fixDatatype(t *Type, meta map[string]Type) {\n\tif strings.HasPrefix(t.Name, \"SoftLayer_Dns_Domain_ResourceRecord_\") {\n\t\tbaseRecordType, _ := meta[\"SoftLayer_Dns_Domain_ResourceRecord\"]\n\t\tfor propName, prop := range t.Properties {\n\t\t\tbaseRecordType.Properties[propName] = prop\n\t\t}\n\t\tmeta[\"SoftLayer_Dns_Domain_ResourceRecord\"] = baseRecordType\n\t} else if t.Name == \"SoftLayer_Container_User_Customer_External_Binding_Verisign\" || t.Name == \"SoftLayer_Container_User_Customer_External_Binding_Verisign_Totp\" {\n\t\tbaseType, _ := meta[\"SoftLayer_Container_User_Customer_External_Binding\"]\n\t\tfor propName, prop := range t.Properties {\n\t\t\tbaseType.Properties[propName] = prop\n\t\t}\n\t\tmeta[\"SoftLayer_Container_User_Customer_External_Binding\"] = baseType\n\t}\n}", "title": "" }, { "docid": "6649bf1b1fd4ff7da7f3712f160adc47", "score": "0.46493897", "text": "func newValue(typ typeID) value {\n\tswitch typ {\n\tcase typeString:\n\t\treturn new(valueString)\n\tcase typeBool:\n\t\treturn new(valueBool)\n\tcase typeInt:\n\t\treturn new(valueInt)\n\tcase typeFloat:\n\t\treturn new(valueFloat)\n\tcase typeDouble:\n\t\treturn new(valueDouble)\n\tcase typeUDim:\n\t\treturn new(valueUDim)\n\tcase typeUDim2:\n\t\treturn new(valueUDim2)\n\tcase typeRay:\n\t\treturn new(valueRay)\n\tcase typeFaces:\n\t\treturn new(valueFaces)\n\tcase typeAxes:\n\t\treturn new(valueAxes)\n\tcase typeBrickColor:\n\t\treturn new(valueBrickColor)\n\tcase typeColor3:\n\t\treturn new(valueColor3)\n\tcase typeVector2:\n\t\treturn new(valueVector2)\n\tcase typeVector3:\n\t\treturn new(valueVector3)\n\tcase typeVector2int16:\n\t\treturn new(valueVector2int16)\n\tcase typeCFrame:\n\t\treturn new(valueCFrame)\n\tcase typeCFrameQuat:\n\t\treturn new(valueCFrameQuat)\n\tcase typeToken:\n\t\treturn new(valueToken)\n\tcase typeReference:\n\t\treturn new(valueReference)\n\tcase typeVector3int16:\n\t\treturn new(valueVector3int16)\n\tcase typeNumberSequence:\n\t\treturn new(valueNumberSequence)\n\tcase typeColorSequence:\n\t\treturn new(valueColorSequence)\n\tcase typeNumberRange:\n\t\treturn new(valueNumberRange)\n\tcase typeRect:\n\t\treturn new(valueRect)\n\tcase typePhysicalProperties:\n\t\treturn new(valuePhysicalProperties)\n\tcase typeColor3uint8:\n\t\treturn new(valueColor3uint8)\n\tcase typeInt64:\n\t\treturn new(valueInt64)\n\tcase typeSharedString:\n\t\treturn new(valueSharedString)\n\tcase typeOptional:\n\t\treturn nil\n\tcase typeUniqueId:\n\t\treturn new(valueUniqueId)\n\tcase typeFont:\n\t\treturn new(valueFont)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3f6c8e844cf6126bdd8f38d492bc3806", "score": "0.46472353", "text": "func getParam(name string, buffer []byte, size uint32, nonStructType tdh.NonStructType) (*kevent.Kparam, error) {\n\tif len(buffer) == 0 {\n\t\treturn nil, errors.New(\"property buffer is empty\")\n\t}\n\n\tvar (\n\t\ttyp kparams.Type\n\t\tvalue kparams.Value\n\t)\n\n\tswitch nonStructType.InType {\n\tcase tdh.IntypeUnicodeString:\n\t\ttyp, value = kparams.UnicodeString, utf16.PtrToString(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypeAnsiString:\n\t\ttyp, value = kparams.AnsiString, string((*[1<<30 - 1]byte)(unsafe.Pointer(&buffer[0]))[:size-1:size-1])\n\n\tcase tdh.IntypeInt8:\n\t\ttyp, value = kparams.Int8, *(*int8)(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypeUint8:\n\t\ttyp, value = kparams.Uint8, *(*uint8)(unsafe.Pointer(&buffer[0]))\n\t\tif nonStructType.OutType == tdh.OutypeHexInt8 {\n\t\t\ttyp = kparams.HexInt8\n\t\t}\n\tcase tdh.IntypeBoolean:\n\t\ttyp, value = kparams.Bool, *(*bool)(unsafe.Pointer(&buffer[0]))\n\n\tcase tdh.IntypeInt16:\n\t\ttyp, value = kparams.Int16, *(*int16)(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypeUint16:\n\t\ttyp, value = kparams.Uint16, *(*uint16)(unsafe.Pointer(&buffer[0]))\n\t\tswitch nonStructType.OutType {\n\t\tcase tdh.OutypeHexInt16:\n\t\t\ttyp = kparams.HexInt16\n\t\tcase tdh.OutypePort:\n\t\t\ttyp = kparams.Port\n\t\t}\n\n\tcase tdh.IntypeInt32:\n\t\ttyp, value = kparams.Int32, *(*int32)(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypeUint32:\n\t\ttyp, value = kparams.Uint32, *(*uint32)(unsafe.Pointer(&buffer[0]))\n\t\tswitch nonStructType.OutType {\n\t\tcase tdh.OutypeHexInt32:\n\t\t\ttyp = kparams.HexInt32\n\t\tcase tdh.OutypeIPv4:\n\t\t\ttyp = kparams.IPv4\n\t\t}\n\n\tcase tdh.IntypeInt64:\n\t\ttyp, value = kparams.Int64, *(*int64)(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypeUint64:\n\t\ttyp, value = kparams.Uint64, *(*uint64)(unsafe.Pointer(&buffer[0]))\n\t\tif nonStructType.OutType == tdh.OutypeHexInt64 {\n\t\t\ttyp = kparams.HexInt64\n\t\t}\n\n\tcase tdh.IntypeFloat:\n\t\ttyp, value = kparams.Float, *(*float32)(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypeDouble:\n\t\ttyp, value = kparams.Double, *(*float64)(unsafe.Pointer(&buffer[0]))\n\n\tcase tdh.IntypeHexInt32:\n\t\ttyp, value = kparams.HexInt32, *(*int32)(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypeHexInt64:\n\t\ttyp, value = kparams.HexInt64, *(*int64)(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypePointer, tdh.IntypeSizet:\n\t\ttyp, value = kparams.HexInt64, *(*uint64)(unsafe.Pointer(&buffer[0]))\n\tcase tdh.IntypeSID:\n\t\ttyp, value = kparams.SID, buffer\n\tcase tdh.IntypeWbemSID:\n\t\ttyp, value = kparams.WbemSID, buffer\n\tcase tdh.IntypeBinary:\n\t\tif nonStructType.OutType == tdh.OutypeIPv6 {\n\t\t\ttyp, value = kparams.IPv6, buffer\n\t\t} else {\n\t\t\ttyp, value = kparams.Binary, buffer\n\t\t}\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unknown type for %q parameter\", name)\n\t}\n\n\treturn kevent.NewKparam(name, typ, value), nil\n}", "title": "" }, { "docid": "3bbe85d37201e4612e6519f383877a2e", "score": "0.4640676", "text": "func getDatumForChType(pgtype C.Oid, pgtypemod C.int, value interface{}, columnType *sql.ColumnType) C.Datum {\n\tvar (\n\t\tvalueDatum C.Datum\n\t\tfinal C.Datum\n\t\t//type input function\n\t\ttypeinput C.regproc\n\t\t//type modifier\n\t\ttypemod C.int\n\t\ttuple C.HeapTuple\n\t)\n\t// Idea is that every type in PG has a type-input function: see https://www.postgresql.org/docs/10/xtypes.html for ref\n\t// we go to these extents in case of types like\n\t// variable size strings varchar(20), varchar(30) and any other types that we don't anticipate\n\t// so we get the type's input function and call it with `valueDatum`\n\t// OidFunctionCall3 is used for the calling the function with provided args\n\n\t/* get the type's output function from system cache*/\n\ttuple = C.wrapper_SearchSysCache1Oid(C.wrapper_ObjectIdGetDatum(pgtype))\n\tif bool(!C.wrapper_HeapTupleIsValid(tuple)) {\n\t\terrLogger.Printf(\"cache lookup failed for type %v\", pgtype)\n\t}\n\n\t// we convert the HeapTuple pointer to pg_type struct\n\t// and access the input function aand modifier\n\t// see include/catalog/pg_type.h for details on these fields\n\ttypeinput = ((C.Form_pg_type)(unsafe.Pointer(C.wrapper_GETSTRUCT(tuple)))).typinput\n\ttypemod = ((C.Form_pg_type)(unsafe.Pointer(C.wrapper_GETSTRUCT(tuple)))).typtypmod\n\tC.ReleaseSysCache(tuple)\n\n\t// switch on the passed types's OID and match the same with passed CH type\n\t// Note that we do the type conversions on best-effort basis here.\n\t// if there's a type that doesn't get handled by default case\n\t// as in its string-repr isn't a valid expr on PG side\n\t// that will come up as error\n\t// but we should fix them retroactively\n\tswitch pgtype {\n\n\t// we plan to map every CH type to NUMERIC type in PG\n\t// so if the target column is of numeric type\n\t// do conversion based on underlying CH type\n\t// note that we don't have a direct wrapper_NumericGetDatum(__)\n\t// because it won't work directly as there is a external conversion required as well.\n\t// so we convert the input value to string repr, and rely on numeric's input function `numeric_in` to convert it to Numeric DataType\n\t// for details see https://stackoverflow.com/questions/48448064/postgresql-c-how-to-convert-uint64-t-into-numeric\n\t// the numeric type is coded at utils/adt/numeric.c\n\tcase C.NUMERICOID:\n\t\t// infoLogger.Printf(\"found numeric vs column type %v\", columnType.DatabaseTypeName())\n\t\tif columnType.DatabaseTypeName() == \"UInt64\" {\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value.(uint64))))\n\t\t} else if columnType.DatabaseTypeName() == \"UInt32\" {\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value.(uint32))))\n\t\t} else if columnType.DatabaseTypeName() == \"UInt16\" {\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value.(uint16))))\n\t\t} else if columnType.DatabaseTypeName() == \"UInt8\" {\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value.(uint8))))\n\t\t} else if columnType.DatabaseTypeName() == \"Int64\" {\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value.(int64))))\n\t\t} else if columnType.DatabaseTypeName() == \"Int32\" {\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value.(int32))))\n\t\t} else if columnType.DatabaseTypeName() == \"Int16\" {\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value.(int16))))\n\t\t} else if columnType.DatabaseTypeName() == \"Int8\" {\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value.(int8))))\n\t\t} else {\n\t\t\t// this should catch everything, but we're mentioning explicit type assertions above so a type still can fallthrough\n\t\t\t// this should also capture decimal values from CH column\n\t\t\t// given the target column is defined either with `numeric` or `numeric(precision, scale)`\n\t\t\t// see https://www.postgresql.org/docs/11/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL\n\t\t\t// specifying just `numeric` means values of any precision and scale can be stored\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value)))\n\t\t}\n\t\t// now we hope that auto type conversion by typeinput will convert valueDatum to Numeric datum\n\n\t// for date type, we follow the same strategy of relying on the type's input function\n\t// see utils/adt/date.c for `date_in`\n\t// parse the available CH value in format `YYYY-MM-DD` and pass it to PG type's input function\n\t// we map CH's `Date` type to PG `date` type\n\tcase C.DATEOID:\n\t\tparsedDate := value.(time.Time)\n\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", parsedDate.Format(\"2006-01-02\"))))\n\n\t// for timestamptz type, we rely on type's input function\n\t// see utils/adt/timestamp.c for input function `timestamptz_in`\n\t// parse the available CH value in format `2006-01-02T15:04:05Z07:00` (RFC3339) and pass it to PG type's input function\n\t// we convert CH's `DateTime` type to PG `timestamptz` type\n\n\t// Note: CH `DateTime` type always returns values in UTC timezone\n\t// although it's possible to annotate `DateTime` with another tz like `DateTime(CET)`\n\t// the returned values from the CH golang driver doesn't returned that tz info.\n\t// so we convert the same into PG tz type\n\t// If the returned value holds the tz info, that will be parsed accordingly.\n\t// displayed tz on `psql` client uses the client's timezone and converts to that.\n\tcase C.TIMESTAMPTZOID:\n\t\tparsedDateTime := value.(time.Time)\n\t\t// debugLogger.Println(\"ch tz type\", columnType.DatabaseTypeName())\n\t\t// debugLogger.Println(\"found timestamp with value %v\", parsedDateTime.Format(time.RFC3339))\n\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", parsedDateTime.Format(time.RFC3339))))\n\n\tcase C.FLOAT4OID:\n\t\ttypeV := reflect.TypeOf(value)\n\t\tswitch typeV.String() {\n\t\tcase \"float32\":\n\t\t\treturn C.wrapper_Float4GetDatum(C.float(value.(float32)))\n\t\tdefault:\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value)))\n\t\t}\n\tcase C.FLOAT8OID:\n\t\ttypeV := reflect.TypeOf(value)\n\t\tswitch typeV.String() {\n\t\tcase \"float64\":\n\t\t\treturn C.wrapper_Float8GetDatum(C.double(value.(float64)))\n\t\tcase \"float32\":\n\t\t\treturn C.wrapper_Float8GetDatum(C.double(value.(float32)))\n\t\tdefault:\n\t\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value)))\n\t\t}\n\tcase C.INT4OID:\n\t\tif columnType.DatabaseTypeName() == \"UInt32\" {\n\t\t\treturn C.wrapper_Int32GetDatum(C.int32((value.(uint32))))\n\t\t} else if columnType.DatabaseTypeName() == \"Int32\" {\n\t\t\treturn C.wrapper_Int32GetDatum(C.int32(value.(int32)))\n\t\t} else {\n\t\t\terrLogger.Printf(\"no appropriate conversion found for %v -> %v\", columnType.DatabaseTypeName(), \"INT4\")\n\t\t}\n\tcase C.INT8OID:\n\t\tif columnType.DatabaseTypeName() == \"UInt64\" {\n\t\t\treturn C.wrapper_Int64GetDatum(C.int64((value.(uint64))))\n\t\t} else if columnType.DatabaseTypeName() == \"Int64\" {\n\t\t\treturn C.wrapper_Int64GetDatum(C.int64(value.(int64)))\n\t\t} else {\n\t\t\terrLogger.Printf(\"no appropriate conversion found for %v -> %v\", columnType.DatabaseTypeName(), \"INT8\")\n\t\t}\n\tcase C.INT4ARRAYOID:\n\t\tfallthrough\n\tcase C.INT8ARRAYOID:\n\t\tfallthrough\n\tcase C.OIDARRAYOID:\n\t\tfallthrough\n\tcase C.FLOAT4ARRAYOID:\n\t\tfallthrough\n\tcase C.FLOAT8ARRAYOID:\n\t\tfallthrough\n\tcase C.NUMERICARRAYOID:\n\t\tparsedArray := fmt.Sprintf(\"%v\", value)\n\t\tparsedArray = strings.Replace(parsedArray, \"[\", \"{\", -1)\n\t\tparsedArray = strings.Replace(parsedArray, \"]\", \"}\", -1)\n\t\t// join array elements with \",\"\n\t\tparsedArray = strings.Join(strings.Split(parsedArray, \" \"), \",\")\n\t\t// parsedArray = fmt.Sprintf(\"'%s'\", parsedArray)\n\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(parsedArray))\n\t\t// get Oid for element type of the array\n\t\t// see pg_type.h for reference\n\t\ttypeelem := ((C.Form_pg_type)(unsafe.Pointer(C.wrapper_GETSTRUCT(tuple)))).typelem\n\t\t// infoLogger.Println(\" casting ... for array\", parsedArray, int(typeinput), int(typeelem), int(typemod))\n\t\t// pass typeelem as arg to `array_in` function\n\t\tfinal = C.wrapper_OidFunctionCall3(typeinput, valueDatum, C.wrapper_ObjectIdGetDatum(typeelem), C.wrapper_Int32GetDatum(typemod))\n\t\treturn final\n\n\tcase C.TEXTARRAYOID:\n\t\tfallthrough\n\tcase C.VARCHARARRAYOID:\n\n\t\t// Note: We need to find out the underlying array data type is FixedString(X) or String\n\t\t// because null-terminated FixedString() doesn't work well with `%v`\n\t\tif strings.HasPrefix(columnType.DatabaseTypeName(), \"Array(FixedString\") {\n\t\t\terrLogger.Println(\"FixedString array support on the way\")\n\n\t\t} else if strings.HasPrefix(columnType.DatabaseTypeName(), \"Array(String\") {\n\n\t\t} else {\n\t\t\terrLogger.Println(\"unrecoginzed remote array type from CH\")\n\t\t}\n\t\tassertedValue := value.([]string)\n\t\tbuf := &strings.Builder{}\n\t\tbuf.WriteString(\"{\")\n\t\tfor i, item := range assertedValue {\n\t\t\titem = fmt.Sprintf(\"'%s'\", item)\n\t\t\tif i != 0 {\n\t\t\t\tbuf.WriteString(\",\")\n\t\t\t}\n\t\t\tbuf.WriteString(item)\n\t\t}\n\t\tbuf.WriteString(\"}\")\n\t\t// infoLogger.Println(\" casting ... for array\", buf.String())\n\n\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(buf.String()))\n\t\t// get Oid for element type of the array\n\t\t// see pg_type.h for reference\n\t\ttypeelem := ((C.Form_pg_type)(unsafe.Pointer(C.wrapper_GETSTRUCT(tuple)))).typelem\n\t\t// infoLogger.Println(\" casting ... for array\", parsedArray, int(typeinput), int(typeelem), int(typemod))\n\t\t// pass typeelem as arg to `array_in` function\n\t\tfinal = C.wrapper_OidFunctionCall3(typeinput, valueDatum, C.wrapper_ObjectIdGetDatum(typeelem), C.wrapper_Int32GetDatum(typemod))\n\t\treturn final\n\tdefault:\n\t\t// by default cast to string\n\t\tvalueDatum = C.wrapper_CStringGetDatum(C.CString(fmt.Sprintf(\"%v\", value)))\n\t}\n\n\tfinal = C.wrapper_OidFunctionCall3(typeinput, valueDatum, C.wrapper_ObjectIdGetDatum(C.InvalidOid), C.wrapper_Int32GetDatum(typemod))\n\treturn final\n}", "title": "" }, { "docid": "da5b9f669bcb9da13ac1e10f8e62833a", "score": "0.46403816", "text": "func ParseParameters(log log.T, params map[string][]*string, paramsDef map[string]*contracts.Parameter) map[string]interface{} {\n\tresult := make(map[string]interface{})\n\n\tfor name, param := range params {\n\n\t\tif definition, ok := paramsDef[name]; ok {\n\t\t\tswitch definition.ParamType {\n\t\t\tcase contracts.ParamTypeString:\n\t\t\t\tresult[name] = *(param[0])\n\t\t\tcase contracts.ParamTypeStringList:\n\t\t\t\tnewParam := []string{}\n\t\t\t\tfor _, value := range param {\n\t\t\t\t\tnewParam = append(newParam, *value)\n\t\t\t\t}\n\t\t\t\tresult[name] = newParam\n\t\t\tcase contracts.ParamTypeStringMap:\n\t\t\t\tresult[name] = *(param[0])\n\t\t\tdefault:\n\t\t\t\tlog.Debug(\"unknown parameter type \", definition.ParamType)\n\t\t\t}\n\t\t}\n\t}\n\tlog.Debug(\"Parameters to be applied are \", result)\n\treturn result\n}", "title": "" }, { "docid": "6009ffdd90e8303a28ef7591d59f4520", "score": "0.46358767", "text": "func convertToTypedObject(in, out interface{}) error {\n\tif in == nil || out == nil {\n\t\treturn fmt.Errorf(\"convert objects should not be nil\")\n\t}\n\n\tswitch v := in.(type) {\n\tcase *unstructured.Unstructured:\n\t\treturn runtime.DefaultUnstructuredConverter.FromUnstructured(v.UnstructuredContent(), out)\n\tcase map[string]interface{}:\n\t\treturn runtime.DefaultUnstructuredConverter.FromUnstructured(v, out)\n\tdefault:\n\t\treturn fmt.Errorf(\"convert object must be pointer of unstructured or map[string]interface{}\")\n\t}\n}", "title": "" }, { "docid": "2211fdcb1293d4e3e4bb22317aba3eb2", "score": "0.4634326", "text": "func (s *BaseJavaParserListener) EnterNonWildcardTypeArguments(ctx *NonWildcardTypeArgumentsContext) {\n}", "title": "" }, { "docid": "d5eca4295a891c6564385fbc65e0637e", "score": "0.46249464", "text": "func httpParamsToArgs(funcInfo *FuncWrapper, r *http.Request) ([]reflect.Value, error) {\n\targTypes := funcInfo.args\n\targNames := funcInfo.argNames\n\n\tvar err error\n\tvalues := make([]reflect.Value, len(argNames))\n\tfor i, name := range argNames {\n\t\tty := argTypes[i]\n\t\targ := GetParam(r, name)\n\t\tvalues[i], err = _jsonStringToArg(ty, arg)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn values, nil\n}", "title": "" }, { "docid": "8312b811130a6534bb232c7d1f26d386", "score": "0.4615812", "text": "func applyTypeConversionForGoSwaggerToGocql( debug bool, output string, indent string, suffix string, fieldName string, fieldType string, field parser.FieldDetails, parserOutput parser.ParseOutput ) string {\n\n\t//ret := output + INDENT_1 + INDENT2 + indent\n\tret := output\n\tif debug {fmt.Printf(\"mapGoSwaggerToGoCSQLFieldType %s %s\\n \", fieldName,fieldType )}\n\n\tfieldName = suffix + fieldName\n\tswitch strings.ToLower(fieldType) {\n\tcase \"int\":\n\t\tret = ret + \"int(\" + fieldName + \")\"\n\tcase \"timestamp\":\n\t\tret = ret + PARSERTIME_FUNC_NAME + \"(\" + fieldName + \")\"\n\tcase \"float\":\n\t\tret = ret + \"float32(\" + fieldName + \")\"\n\tcase \"map\":\n\t\tlog.Fatalln(\"applyTypeConversionForGoSwaggerToGocql encountered type which is not supported\", fieldType)\n\tcase \"list\": fallthrough\n\tcase \"set\":\n\t\tif swagger.IsFieldTypeUDT(parserOutput, field.DbFieldCollectionType) {\n\t\t\tlog.Fatal( \"Sorry currently unable to handle map types that contain UDTs in sets or list themselves \")\n\t\t}\n\t\ttmp := CopyArrayElements( debug , false, false , INDENT_1 + indent , fieldName, \"\" , field, parserOutput )\n\t\tret = ret + tmp\n\n\tdefault:\n\t\tret = ret + fieldName\n\t}\n\n\tif debug { fmt.Printf(\"mapGoSwaggerToGoCSQLFieldType returning %s from field %s type %s\\n\", ret, fieldName, fieldType ) }\n\treturn ret\n}", "title": "" }, { "docid": "57d85b34dd2e059588baa5ab9c972681", "score": "0.4605353", "text": "func (self *PhysicsNinjaTile) UpdateTypeI(args ...interface{}) {\n self.Object.Call(\"updateType\", args)\n}", "title": "" }, { "docid": "bdbbcb5927f13190ff636a485dfca20c", "score": "0.4601572", "text": "func TypeCheckParameter(obj interface{}, expected string, name string) error {\n\t// Make sure there is an object.\n\tif obj == nil {\n\t\treturn nil\n\t}\n\n\t// Check the type is as expected.\n\tif reflect.TypeOf(obj).String() != expected {\n\t\treturn fmt.Errorf(\"Expected %s to be of type %s but received %s.\", name, expected, reflect.TypeOf(obj).String())\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d306ecbe61ab0dc2daa4795b143f2b3c", "score": "0.4597771", "text": "func populateTypeParamParents(tw *trap.Writer, typeparams *types.TypeParamList, parent types.Object) {\n\tif typeparams != nil {\n\t\tfor idx := 0; idx < typeparams.Len(); idx++ {\n\t\t\tsetTypeParamParent(typeparams.At(idx), parent)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "d00a134e3d60f26f7e6e916c7970c033", "score": "0.45918143", "text": "func funcArgTypes(f interface{}) []reflect.Type {\n\tt := reflect.TypeOf(f)\n\tn := t.NumIn()\n\ttypez := make([]reflect.Type, n)\n\tfor i := 0; i < n; i++ {\n\t\ttypez[i] = t.In(i)\n\t}\n\treturn typez\n}", "title": "" }, { "docid": "b822b204d11b53752033b59723c97811", "score": "0.4590699", "text": "func (s *stmt) mapArgs(nvargs []driver.NamedValue) error {\n\tfor i := 0; i < len(nvargs); i++ {\n\n\t\tfield := s.pr.parameterField(i)\n\n\t\tout, isOut := nvargs[i].Value.(sql.Out)\n\n\t\tif isOut {\n\t\t\tif !field.Out() {\n\t\t\t\treturn fmt.Errorf(\"argument %d field %s mismatch - use out argument with non-out field\", i, field.Name())\n\t\t\t}\n\t\t\tif out.In && !field.In() {\n\t\t\t\treturn fmt.Errorf(\"argument %d field %s mismatch - use in argument with out field\", i, field.Name())\n\t\t\t}\n\t\t}\n\n\t\t// currently we do not support out parameters\n\t\tif isOut {\n\t\t\treturn fmt.Errorf(\"argument %d field %s mismatch - out argument not supported\", i, field.Name())\n\t\t}\n\n\t\tvar err error\n\t\tif isOut {\n\t\t\tif out.In { // convert only if in parameter\n\t\t\t\tif out.Dest, err = s.convert(field, out.Dest); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"argument %d field %s conversion error - %w\", i, field.Name(), err)\n\t\t\t\t}\n\t\t\t\tnvargs[i].Value = out\n\t\t\t}\n\t\t} else {\n\t\t\tif nvargs[i].Value, err = s.convert(field, nvargs[i].Value); err != nil {\n\t\t\t\treturn fmt.Errorf(\"argument %d field %s conversion error - %w\", i, field.Name(), err)\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "b911ea2812379b9ceba0cc8bbcf95f08", "score": "0.45903692", "text": "func (me *TglesNewparamType) Walk() (err error) {\r\n\tif fn := WalkHandlers.TglesNewparamType; me != nil {\r\n\t\tif fn != nil {\r\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\t\treturn\r\n\t\t\t}\r\n\t\t}\r\n\t\tif err = me.XsdGoPkgHasElem_modifiersequenceglsl_newparam_typeschema_Modifier_TfxModifierEnum_.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\treturn\r\n\t\t}\r\n\t\tif err = me.XsdGoPkgHasElem_semanticsequenceglsl_newparam_typeschema_Semantic_XsdtNCName_.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\treturn\r\n\t\t}\r\n\t\tif err = me.XsdGoPkgHasGroup_GlesParamGroup.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\treturn\r\n\t\t}\r\n\t\tif err = me.XsdGoPkgHasElems_annotatesequenceglsl_newparam_typeschema_Annotate_TfxAnnotateType_.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\treturn\r\n\t\t}\r\n\t\tif fn != nil {\r\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\t\treturn\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\treturn\r\n}", "title": "" }, { "docid": "35c5eb3a71eb5dbc38d4eb24bcc41efa", "score": "0.45901334", "text": "func (p Parameter) MarshalJSON() ([]byte, error) {\n\tvar (\n\t\tresultRawValue json.RawMessage\n\t\tresultErr error\n\t)\n\tif p.Value == nil {\n\t\tif _, ok := validParamTypes[p.Type]; ok && p.Type != UnknownType {\n\t\t\treturn json.Marshal(rawParameter{Type: p.Type})\n\t\t}\n\t\treturn nil, fmt.Errorf(\"can't marshal %s\", p.Type)\n\t}\n\tswitch p.Type {\n\tcase BoolType, StringType, Hash160Type, Hash256Type:\n\t\tresultRawValue, resultErr = json.Marshal(p.Value)\n\tcase IntegerType:\n\t\tval, ok := p.Value.(int64)\n\t\tif !ok {\n\t\t\tresultErr = errors.New(\"invalid integer value\")\n\t\t\tbreak\n\t\t}\n\t\tvalStr := strconv.FormatInt(val, 10)\n\t\tresultRawValue = json.RawMessage(`\"` + valStr + `\"`)\n\tcase PublicKeyType, ByteArrayType, SignatureType:\n\t\tif p.Type == PublicKeyType {\n\t\t\tresultRawValue, resultErr = json.Marshal(hex.EncodeToString(p.Value.([]byte)))\n\t\t} else {\n\t\t\tresultRawValue, resultErr = json.Marshal(base64.StdEncoding.EncodeToString(p.Value.([]byte)))\n\t\t}\n\tcase ArrayType:\n\t\tvar value = p.Value.([]Parameter)\n\t\tif value == nil {\n\t\t\tresultRawValue, resultErr = json.Marshal([]Parameter{})\n\t\t} else {\n\t\t\tresultRawValue, resultErr = json.Marshal(value)\n\t\t}\n\tcase MapType:\n\t\tppair := p.Value.([]ParameterPair)\n\t\tresultRawValue, resultErr = json.Marshal(ppair)\n\tcase InteropInterfaceType, AnyType:\n\t\tresultRawValue = nil\n\tdefault:\n\t\tresultErr = fmt.Errorf(\"can't marshal %s\", p.Type)\n\t}\n\tif resultErr != nil {\n\t\treturn nil, resultErr\n\t}\n\treturn json.Marshal(rawParameter{\n\t\tType: p.Type,\n\t\tValue: resultRawValue,\n\t})\n}", "title": "" }, { "docid": "c4580d4fdb75cabe37cc61e164e129fb", "score": "0.45866778", "text": "func (show *PhysicalShow) SetParamType(paramExprs *[]ast.ParamMarkerExpr) (err error) {\n\treturn err\n}", "title": "" }, { "docid": "c4573c1e6b758a160d958496c5439c14", "score": "0.45842153", "text": "func modifiable(origin *types.FieldType, to *types.FieldType) error {\n\tunsupportedMsg := fmt.Sprintf(\"type %v not match origin %v\", to.CompactStr(), origin.CompactStr())\n\tswitch origin.Tp {\n\tcase mysql.TypeVarchar, mysql.TypeString, mysql.TypeVarString,\n\t\tmysql.TypeBlob, mysql.TypeTinyBlob, mysql.TypeMediumBlob, mysql.TypeLongBlob:\n\t\tswitch to.Tp {\n\t\tcase mysql.TypeVarchar, mysql.TypeString, mysql.TypeVarString,\n\t\t\tmysql.TypeBlob, mysql.TypeTinyBlob, mysql.TypeMediumBlob, mysql.TypeLongBlob:\n\t\tdefault:\n\t\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(unsupportedMsg)\n\t\t}\n\tcase mysql.TypeTiny, mysql.TypeShort, mysql.TypeInt24, mysql.TypeLong, mysql.TypeLonglong:\n\t\tswitch to.Tp {\n\t\tcase mysql.TypeTiny, mysql.TypeShort, mysql.TypeInt24, mysql.TypeLong, mysql.TypeLonglong:\n\t\tdefault:\n\t\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(unsupportedMsg)\n\t\t}\n\tcase mysql.TypeEnum:\n\t\tif origin.Tp != to.Tp {\n\t\t\tmsg := fmt.Sprintf(\"cannot modify enum type column's to type %s\", to.String())\n\t\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(msg)\n\t\t}\n\t\tif len(to.Elems) < len(origin.Elems) {\n\t\t\tmsg := fmt.Sprintf(\"the number of enum column's elements is less than the original: %d\", len(origin.Elems))\n\t\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(msg)\n\t\t}\n\t\tfor index, originElem := range origin.Elems {\n\t\t\ttoElem := to.Elems[index]\n\t\t\tif originElem != toElem {\n\t\t\t\tmsg := fmt.Sprintf(\"cannot modify enum column value %s to %s\", originElem, toElem)\n\t\t\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(msg)\n\t\t\t}\n\t\t}\n\tcase mysql.TypeNewDecimal:\n\t\t// The root cause is modifying decimal precision needs to rewrite binary representation of that decimal.\n\t\tif to.Flen != origin.Flen || to.Decimal != origin.Decimal {\n\t\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(\"can't change decimal column precision\")\n\t\t}\n\tdefault:\n\t\tif origin.Tp != to.Tp {\n\t\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(unsupportedMsg)\n\t\t}\n\t}\n\n\tif to.Flen > 0 && to.Flen < origin.Flen {\n\t\tmsg := fmt.Sprintf(\"length %d is less than origin %d\", to.Flen, origin.Flen)\n\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(msg)\n\t}\n\tif to.Decimal > 0 && to.Decimal < origin.Decimal {\n\t\tmsg := fmt.Sprintf(\"decimal %d is less than origin %d\", to.Decimal, origin.Decimal)\n\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(msg)\n\t}\n\n\ttoUnsigned := mysql.HasUnsignedFlag(to.Flag)\n\toriginUnsigned := mysql.HasUnsignedFlag(origin.Flag)\n\tif originUnsigned != toUnsigned {\n\t\tmsg := fmt.Sprintf(\"can't change unsigned integer to signed or vice versa\")\n\t\treturn errUnsupportedModifyColumn.GenWithStackByArgs(msg)\n\t}\n\n\terr := modifiableCharsetAndCollation(to.Charset, to.Collate, origin.Charset, origin.Collate)\n\treturn errors.Trace(err)\n}", "title": "" }, { "docid": "f0104a599f5b2d1cff8fb2eeab8745c5", "score": "0.4583855", "text": "func toInputType(t, attrs string) string {\n\n\tswitch t {\n\tcase \"string\", \"[]string\":\n\t\tswitch structTag(attrs, \"subtype\") { // various possibilities - distinguish by subtype\n\t\tcase \"separator\":\n\t\t\treturn \"separator\"\n\t\tcase \"fieldset\":\n\t\t\treturn \"fieldset\"\n\t\tcase \"date\":\n\t\t\treturn \"date\"\n\t\tcase \"time\":\n\t\t\treturn \"time\"\n\t\tcase \"textarea\":\n\t\t\treturn \"textarea\"\n\t\tcase \"select\":\n\t\t\treturn \"select\"\n\t\tcase \"radiogroup\":\n\t\t\treturn \"radiogroup\"\n\t\t}\n\t\treturn \"text\"\n\tcase \"int\", \"float64\", \"[]int\", \"[]float64\":\n\t\tswitch structTag(attrs, \"subtype\") { // might want dropdown, for instance for list of years\n\t\tcase \"select\":\n\t\t\treturn \"select\"\n\t\t}\n\t\treturn \"number\"\n\tcase \"bool\", \"[]bool\":\n\t\tswitch structTag(attrs, \"subtype\") { // not always checkbox, but sometimes dropdown\n\t\tcase \"select\":\n\t\t\treturn \"select\"\n\t\t}\n\t\treturn \"checkbox\"\n\tcase \"[]uint8\":\n\t\treturn \"file\"\n\t}\n\treturn \"text\"\n}", "title": "" }, { "docid": "f0a3db64cad6d1b71572e6cb3d93f8b5", "score": "0.45829776", "text": "func MarshalWithParams(val interface{}, params string) ([]byte, error) {}", "title": "" }, { "docid": "76d28171f8716838fb898034412e024f", "score": "0.45751545", "text": "func (o *GetFacilitiesParams) bindType(rawData []string, hasKey bool, formats strfmt.Registry) error {\n\tvar raw string\n\tif len(rawData) > 0 {\n\t\traw = rawData[len(rawData)-1]\n\t}\n\n\t// Required: false\n\t// AllowEmptyValue: false\n\tif raw == \"\" { // empty values pass all other validations\n\t\treturn nil\n\t}\n\n\to.Type = &raw\n\n\treturn nil\n}", "title": "" }, { "docid": "2ae75040ebc617bc2ca12cd6283582da", "score": "0.45602208", "text": "func (d *wrappedProtoDescriptor) ConvertParams(method string, params []interface{}) ([]interface{}, error) {\n\tm := d.MethodDescriptor(method)\n\tif m == nil {\n\t\treturn nil, fmt.Errorf(\"can't find method %s in proto\", method)\n\t}\n\tim := m.GetInputType()\n\treturn d.convertParams(im, params)\n}", "title": "" }, { "docid": "a20d5ecbbfd04b66eed2fa54ae41e5ea", "score": "0.45566306", "text": "func ParamTypeTable() params.TypeTable {\n\treturn params.NewTypeTable().RegisterParamSet(&Params{})\n}", "title": "" }, { "docid": "a20d5ecbbfd04b66eed2fa54ae41e5ea", "score": "0.45566306", "text": "func ParamTypeTable() params.TypeTable {\n\treturn params.NewTypeTable().RegisterParamSet(&Params{})\n}", "title": "" }, { "docid": "8ec530cd690bcd30a5286614f3ecc452", "score": "0.45488152", "text": "func (g *generator) formattedTypeParams(it *model.Interface, pkgOverride string) (string, string) {\n\tif len(it.TypeParams) == 0 {\n\t\treturn \"\", \"\"\n\t}\n\tvar long, short strings.Builder\n\tlong.WriteString(\"[\")\n\tshort.WriteString(\"[\")\n\tfor i, v := range it.TypeParams {\n\t\tif i != 0 {\n\t\t\tlong.WriteString(\", \")\n\t\t\tshort.WriteString(\", \")\n\t\t}\n\t\tlong.WriteString(v.Name)\n\t\tshort.WriteString(v.Name)\n\t\tlong.WriteString(fmt.Sprintf(\" %s\", v.Type.String(g.packageMap, pkgOverride)))\n\t}\n\tlong.WriteString(\"]\")\n\tshort.WriteString(\"]\")\n\treturn long.String(), short.String()\n}", "title": "" }, { "docid": "abb8f7aea4aa427c7263e1090f250008", "score": "0.45476356", "text": "func (me *Tgles2NewparamType) Walk() (err error) {\r\n\tif fn := WalkHandlers.Tgles2NewparamType; me != nil {\r\n\t\tif fn != nil {\r\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\t\treturn\r\n\t\t\t}\r\n\t\t}\r\n\t\tif err = me.XsdGoPkgHasGroup_Gles2ValueGroup.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\treturn\r\n\t\t}\r\n\t\tif err = me.XsdGoPkgHasElem_semanticsequenceglsl_newparam_typeschema_Semantic_XsdtNCName_.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\treturn\r\n\t\t}\r\n\t\tif err = me.XsdGoPkgHasElems_annotatesequenceglsl_newparam_typeschema_Annotate_TfxAnnotateType_.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\treturn\r\n\t\t}\r\n\t\tif err = me.XsdGoPkgHasElem_modifiersequenceglsl_newparam_typeschema_Modifier_TfxModifierEnum_.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\treturn\r\n\t\t}\r\n\t\tif fn != nil {\r\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\r\n\t\t\t\treturn\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\treturn\r\n}", "title": "" }, { "docid": "39e5e7169f9df20b8935a9f4254e0b79", "score": "0.45467335", "text": "func (g *generator) argumentTypeName(expr model.Expression, destType model.Type) string {\n\tsuffix := \"Args\"\n\tif g.insideFunctionInvoke {\n\t\tsuffix = \"InputArgs\"\n\t}\n\treturn g.argumentTypeNameWithSuffix(expr, destType, suffix)\n}", "title": "" } ]
6223cb93b2e4714191218eeb57b0b8c4
prepare a cookie which has no content and has already expired
[ { "docid": "266771389b0cc51900aab8c842fa28d4", "score": "0.5347544", "text": "func makeDeleteFlash() *http.Cookie {\n\tcookie := new(http.Cookie)\n\tcookie.Name = flashName\n\tcookie.Value = base64.URLEncoding.EncodeToString([]byte(\"\"))\n\tcookie.Expires = time.Unix(0, 0)\n\tcookie.Path = \"/\"\n\treturn cookie\n}", "title": "" } ]
[ { "docid": "e091fb77cbe7b984bbd8c7f0661cd6df", "score": "0.7004918", "text": "func genCookie(cookiename, value string) *http.Cookie {\n return &http.Cookie{\n Name: cookiename,\n Value: value,\n Expires: time.Now().Add(24 * time.Hour),\n }\n}", "title": "" }, { "docid": "3959ef5dce76c38793b4089989459e53", "score": "0.6622748", "text": "func invalidateCookie(res http.ResponseWriter) {\n\t// set the experiation date to last year\n\texpire := time.Now().AddDate(-1, 0, 0)\n\tcookie := http.Cookie{Name: \"uuid\", Path: \"/\", Expires: expire}\n\thttp.SetCookie(res, &cookie)\n}", "title": "" }, { "docid": "4960a384b4dfcc710a717957b032c6b7", "score": "0.63774216", "text": "func PrepareCookie(ingredients []Ingredient) Cookie {\n\tif len(ingredients) < 1 {\n\t\tpanic((\"No ingredients, how to make a cookie?\"))\n\t}\n\treturn Cookie{Ingredients: ingredients}\n}", "title": "" }, { "docid": "38ed613fa75988bc65c5a86156e74e04", "score": "0.617808", "text": "func newCookie() (cookie *http.Cookie) {\n\tcookie = &http.Cookie{\n\t\tName: cookieSessionName,\n\t\tValue: cookieInformationEncoding(),\n\t\tHttpOnly: true,\n\t\t//Secure: false,\n\t}\n\treturn\n}", "title": "" }, { "docid": "588a615665eea4d357e47f0be309d130", "score": "0.61566323", "text": "func createDefaultCookie() {\r\n\tsettingMutex.Lock()\r\n\tdefer settingMutex.Unlock()\r\n\tdefaultCookieJar, _ = cookiejar.New(nil)\r\n}", "title": "" }, { "docid": "1fee043e7ec0cb1770c633227da92b4c", "score": "0.61465174", "text": "func Cookie(value string) *SimpleElement { return newSEString(\"cookie\", value) }", "title": "" }, { "docid": "2d84a74c2fc92be6c0790cbcb48df785", "score": "0.6128482", "text": "func readCreateCookie(req *http.Request) (cookie *http.Cookie) {\n\tcookie, err := req.Cookie(cookieSessionName) // get if a cookie already exists (had not expired)\n\tif err == http.ErrNoCookie {\n\t\tcookie = newCookie() // need a new cookie.\n\t}\n\treturn\n}", "title": "" }, { "docid": "ca7e7951af11fe9070d3f1f516ad62d9", "score": "0.6082405", "text": "func NewCookie(name string, value string, age int64) *http.Cookie {\n\tvar utctime time.Time\n\tif age == 0 {\n\t\t// 2^31 - 1 seconds (roughly 2038)\n\t\tutctime = time.Unix(2147483647, 0)\n\t} else {\n\t\tutctime = time.Unix(time.Now().Unix()+age, 0)\n\t}\n\treturn &http.Cookie{Name: name, Value: value, Expires: utctime}\n}", "title": "" }, { "docid": "7463a96563c12e47df705c080eb2b70c", "score": "0.6033965", "text": "func NewCookie() *Cookie {\n\treturn &Cookie{\n\t\tuids: make(map[string]uidWithExpiry),\n\t}\n}", "title": "" }, { "docid": "2cab7012e6e35286891f17716517cb96", "score": "0.6011771", "text": "func (cookie *Cookie) SetCookieOnResponse(w http.ResponseWriter, setSiteCookie bool, cfg *config.HostCookie, ttl time.Duration) {\n\thttpCookie := cookie.ToHTTPCookie(ttl)\n\tvar domain string = cfg.Domain\n\thttpCookie.Secure = true\n\n\tif domain != \"\" {\n\t\thttpCookie.Domain = domain\n\t}\n\n\tvar currSize int = len([]byte(httpCookie.String()))\n\tfor cfg.MaxCookieSizeBytes > 0 && currSize > cfg.MaxCookieSizeBytes && len(cookie.uids) > 0 {\n\t\tvar oldestElem string = \"\"\n\t\tvar oldestDate int64 = math.MaxInt64\n\t\tfor key, value := range cookie.uids {\n\t\t\ttimeUntilExpiration := time.Until(value.Expires)\n\t\t\tif timeUntilExpiration < time.Duration(oldestDate) {\n\t\t\t\toldestElem = key\n\t\t\t\toldestDate = int64(timeUntilExpiration)\n\t\t\t}\n\t\t}\n\t\tdelete(cookie.uids, oldestElem)\n\t\thttpCookie = cookie.ToHTTPCookie(ttl)\n\t\tif domain != \"\" {\n\t\t\thttpCookie.Domain = domain\n\t\t}\n\t\tcurrSize = len([]byte(httpCookie.String()))\n\t}\n\n\tif setSiteCookie {\n\t\t// httpCookie.Secure = true\n\t\thttpCookie.SameSite = http.SameSiteNoneMode\n\t}\n\tw.Header().Add(\"Set-Cookie\", httpCookie.String())\n}", "title": "" }, { "docid": "62b7f21579710c4494352fe67bb7f335", "score": "0.6004418", "text": "func createSessionCookie(token string, maxAge int) *http.Cookie {\n cookie := http.Cookie{}\n cookie.Name = SessionCookieName\n cookie.Value = token\n cookie.Path = \"/\"\n cookie.MaxAge = maxAge\n cookie.Secure = true\n cookie.HttpOnly = true\n return &cookie\n}", "title": "" }, { "docid": "eed3524f90b79fb74371f33e712fd569", "score": "0.5922332", "text": "func (c *Cookie) Delete() *Cookie { return c.MaxAgeDays(-30).HTTPOnly(false) }", "title": "" }, { "docid": "465059cc95da7cfcab2d0e7b8780acf8", "score": "0.5846672", "text": "func setHydraCookie(w http.ResponseWriter, name, value string) {\n\thttp.SetCookie(w, &http.Cookie{\n\t\tName: name,\n\t\tValue: value,\n\t\tExpires: time.Now().Add(7 * 24 * time.Hour),\n\t})\n}", "title": "" }, { "docid": "307dc57cee03bf7dfe1798eb6cc82038", "score": "0.5837882", "text": "func unexpiredCookies(resp *http.Response) (cookies []*http.Cookie) {\n\tfor _, cookie := range resp.Cookies() {\n\t\tif cookie.RawExpires == \"\" || cookie.Expires.After(time.Now()) {\n\t\t\tcookies = append(cookies, cookie)\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "d8cc92f8599369a8d6d9bb0e0d942dea", "score": "0.57905513", "text": "func makeFlashCookie(note string) *http.Cookie {\n\tcookie := new(http.Cookie)\n\tcookie.Name = flashName\n\tcookie.Value = base64.URLEncoding.EncodeToString([]byte(note))\n\tcookie.Expires = time.Now().Add(flashDuration)\n\tcookie.Path = \"/\"\n\treturn cookie\n}", "title": "" }, { "docid": "2f7800babc7a5108568838b197a84d35", "score": "0.57850325", "text": "func removeHydraCookie(w http.ResponseWriter, name string) {\n\thttp.SetCookie(w, &http.Cookie{\n\t\tName: name,\n\t\tValue: \"\",\n\t\tExpires: time.Unix(0, 0),\n\t})\n}", "title": "" }, { "docid": "a580630e559b683adaa713918f3bd943", "score": "0.57798815", "text": "func (cookie *Cookie) Expires(expires time.Time) *Cookie {\n\tcookie.expires = &expires\n\treturn cookie\n}", "title": "" }, { "docid": "e8d3e35e47c31c5ed77cc8f9c7d3cd76", "score": "0.5776426", "text": "func setCookie(res http.ResponseWriter, req *http.Request, id string) error {\r\n\t//name of cookies = \"cookie\" for 1hr & \"CRA\" for 2yrs\r\n\tco, _ := req.Cookie(\"CRA\")\r\n\tco = &http.Cookie{\r\n\t\tName: \"CRA\",\r\n\t\tValue: id,\r\n\t\tHttpOnly: false,\r\n\t\tExpires: time.Now().AddDate(2, 0, 0),\r\n\t}\r\n\thttp.SetCookie(res, co)\r\n\t// fmt.Println(\"Htmlmain.setCookie - done with set id = \", id)\r\n\treturn nil\r\n}", "title": "" }, { "docid": "02f93d61b5eb148f868fb91f69ea90d7", "score": "0.5755394", "text": "func createJwtCookie(token string) *http.Cookie {\n\tcookie := &http.Cookie{}\n\tcookie.Name = \"JWTCookie\"\n\tcookie.Value = token\n\tcookie.Expires = time.Now().Add(time.Hour * 72)\n\tcookie.Path = \"/\"\n\tcookie.HttpOnly = false // change this later\n\tcookie.Secure = false // change this later\n\t// cookie.SameSite = http.SameSiteLaxMode\n\t// cookie.Domain = \"api.saferwall.com\"\n\treturn cookie\n}", "title": "" }, { "docid": "5a7040abfb4902e5dd372344543a8058", "score": "0.5747644", "text": "func CreateCookie(w http.ResponseWriter, r *http.Request, n string, v string) bool {\n\tcookie, err := r.Cookie(n)\n\tif err != nil {\n\t\tcookie = &http.Cookie{\n\t\t\tName: n,\n\t\t\tValue: v,\n\t\t}\n\t\thttp.SetCookie(w, cookie)\n\t\treturn true;\n\t}\n\treturn false;\n}", "title": "" }, { "docid": "cf679e6e63094590b8b7e4443a2adaef", "score": "0.5743244", "text": "func FromHTTPCookie(httpCookie *http.Cookie) *Cookie {\n\treturn NewCookie(httpCookie.Name).\n\t\tValue(httpCookie.Value).\n\t\tPath(httpCookie.Path).\n\t\tDomain(httpCookie.Domain).\n\t\tExpires(httpCookie.Expires).\n\t\tMaxAge(httpCookie.MaxAge).\n\t\tSecure(httpCookie.Secure).\n\t\tHttpOnly(httpCookie.HttpOnly)\n}", "title": "" }, { "docid": "e3f44470d5f2b71d2cdd2924f565171b", "score": "0.57281655", "text": "func (c *Cookie) Secure(secure bool) *Cookie { c.secure = secure; return c }", "title": "" }, { "docid": "fa5ecce9a4f365497a6d21aae3fda016", "score": "0.57248545", "text": "func CreateRefreshCookie(value string, dur time.Duration) *http.Cookie {\n\treturn &http.Cookie{\n\t\tName: \"refresh_token\",\n\t\tValue: value,\n\t\tExpires: time.Now().Local().Add(dur),\n\t\tSameSite: http.SameSiteNoneMode,\n\t\tSecure: true,\n\t\tHttpOnly: true,\n\t}\n}", "title": "" }, { "docid": "23875ad9bd7df55925d0f8a62bb8ced9", "score": "0.57175434", "text": "func GenCookie(username string) http.Cookie {\n // generate random 50 byte string to use as a session cookie\n randomValue := make([]byte, COOKIE_LENGTH)\n rand.Read(randomValue)\n cookieValue := strings.ToLower(username) + \":\" + fmt.Sprintf(\"%X\", randomValue)\n expire := time.Now().AddDate(0, 0, 1)\n return http.Cookie{Name: \"SessionID\", Value: cookieValue, Expires: expire, HttpOnly: true}\n}", "title": "" }, { "docid": "3622086b9fb9681bc0a6943194d54f6f", "score": "0.57132953", "text": "func Test_Ctx_Cookie(t *testing.T) {\n\tt.Parallel()\n\tapp := New()\n\tctx := app.AcquireCtx(&fasthttp.RequestCtx{})\n\tdefer app.ReleaseCtx(ctx)\n\texpire := time.Now().Add(24 * time.Hour)\n\tvar dst []byte\n\tdst = expire.In(time.UTC).AppendFormat(dst, time.RFC1123)\n\thttpdate := strings.Replace(string(dst), \"UTC\", \"GMT\", -1)\n\tctx.Cookie(&Cookie{\n\t\tName: \"username\",\n\t\tValue: \"john\",\n\t\tExpires: expire,\n\t})\n\texpect := \"username=john; expires=\" + httpdate + \"; path=/; SameSite=Lax\"\n\tutils.AssertEqual(t, expect, string(ctx.Fasthttp.Response.Header.Peek(HeaderSetCookie)))\n\n\tctx.Cookie(&Cookie{SameSite: \"strict\"})\n\tctx.Cookie(&Cookie{SameSite: \"none\"})\n}", "title": "" }, { "docid": "72ca114f21e59feb2a4452ee7a37274e", "score": "0.5708279", "text": "func SetCookieToken(w http.ResponseWriter, token string) http.ResponseWriter {\n expiration := time.Now().Add(time.Minute * time.Duration(tokenValidity))\n cookie := http.Cookie{Name: \"Token\", Value: token, Expires: expiration}\n http.SetCookie(w, &cookie)\n return w\n}", "title": "" }, { "docid": "72a6c18c5712ea7b037c6d496b16f7b5", "score": "0.56858224", "text": "func (s *CookieStore) makeCSRFCookie(req *http.Request, value string, expiration time.Duration, now time.Time) *http.Cookie {\n\treturn s.makeCookie(req, s.CSRFCookieName, value, expiration, now)\n}", "title": "" }, { "docid": "c48182d54dce679923314454a7d8487e", "score": "0.56628364", "text": "func (c *Cookies) CreateCookie() revel.Result {\n\tvar jsonData map[string]interface{}\n\tc.Params.BindJSON(&jsonData)\n\n\tname, okname := jsonData[\"name\"].(string)\n\tdescription, okdesc := jsonData[\"description\"].(string)\n\tprice, okprice := jsonData[\"price\"].(float64)\n\tquantity, okquantity := jsonData[\"quantity\"].(float64)\n\n\tif !okname || !okdesc || !okprice || !okquantity {\n\t\treturn c.RenderJSON(map[string]string{\"status\": \"Invalid Parameters\"})\n\t}\n\n\tcookie := models.Cookie{\n\t\tName: name,\n\t\tDescription: description,\n\t\tPrice: uint(price),\n\t\tQuantity: uint(quantity),\n\t}\n\n\tcookie.Validate(c.Validation)\n\tif c.Validation.HasErrors() {\n\t\treturn c.RenderJSON(map[string]string{\"status\": \"Invalid Parameters\"})\n\t}\n\n\t_, err := services.InsertCookie(cookie)\n\tif err != nil {\n\t\tc.Response.Status = http.StatusBadRequest\n\t\treturn c.RenderJSON(map[string]string{\"status\": \"Invalid Request\"})\n\t}\n\n\treturn c.RenderJSON(jsonData)\n}", "title": "" }, { "docid": "88eb998439647f541d067c3e376616bf", "score": "0.56609505", "text": "func CreateMiscreantCookieCipher(cookieSecret []byte) func(s *CookieStore) error {\n\treturn func(s *CookieStore) error {\n\t\tcipher, err := aead.NewMiscreantCipher(cookieSecret)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"miscreant cookie-secret error: %s\", err.Error())\n\t\t}\n\t\ts.CookieCipher = cipher\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "0e81464d131f5a924ac846ca284b8d7e", "score": "0.56298476", "text": "func CreateCookie(name, value, domain, path string, maxage int, httpOnly bool) (*http.Cookie, error) {\n\t// Validate input data\n\tif stringutils.IsBlank(name) {\n\t\treturn nil, errors.New(\"cookie name not provided\")\n\t}\n\tif stringutils.IsBlank(value) {\n\t\treturn nil, errors.New(\"cookie value not provided\")\n\t}\n\t// Override path in case of not provided\n\tif stringutils.IsBlank(path) {\n\t\tpath = \"/\"\n\t}\n\t// Set session cookie in case of lesser than 0\n\tif maxage < 0 {\n\t\tmaxage = 0\n\t}\n\n\tcookie := http.Cookie{\n\t\tName: name,\n\t\tValue: value,\n\t\tDomain: domain,\n\t\tPath: path,\n\t\tMaxAge: maxage,\n\t\tHttpOnly: httpOnly,\n\t}\n\n\treturn &cookie, nil\n}", "title": "" }, { "docid": "602d192ffc68fa9d6d84711b38cb1386", "score": "0.5621644", "text": "func cookieExpires(maxAge int) (time.Time, bool) {\n\tif maxAge > 0 {\n\t\td := time.Duration(maxAge) * time.Second\n\t\treturn time.Now().Add(d), true\n\t} else if maxAge < 0 {\n\t\treturn time.Unix(1, 0), true // first second of the epoch\n\t}\n\treturn time.Time{}, false\n}", "title": "" }, { "docid": "a1c45298b23776f323f9b8fe8907b6e5", "score": "0.5617794", "text": "func clearCookie(rw http.ResponseWriter) {\n\tlog.Println(\"Cookie初期化\")\n\tcookie := &http.Cookie{\n\t\tName: sessionIDName,\n\t\tValue: \"\",\n\t}\n\thttp.SetCookie(rw, cookie)\n}", "title": "" }, { "docid": "aa61b2557796ba466f2e6d3f6d143bf7", "score": "0.56106377", "text": "func NewCookie(f *pkg.Fortune) Cookie {\n\treturn Cookie{\n\t\tFortune: f,\n\t}\n}", "title": "" }, { "docid": "eb232b1236223cb34c6dc3cb142dbe34", "score": "0.5600931", "text": "func getCookie(r *http.Request, cookiename string) (bool, *http.Cookie) {\n // Ignoring error value because it is likely that the cookie might not exist here\n cookie, _ := r.Cookie(cookiename)\n if cookie == nil {\n return false, nil\n }\n return true, cookie\n}", "title": "" }, { "docid": "e256e50ba10c8da671cff728c7972df1", "score": "0.55936986", "text": "func (d *Cookie) Remove(w http.ResponseWriter, r *http.Request) {\n\tif !d.Exists(r) {\n\t\treturn\n\t}\n\n\tc := d.new(r)\n\tc.Expires = time.Unix(1, 0)\n\tc.MaxAge = 0\n\tc.Value = tombstone\n\thttp.SetCookie(w, &c)\n}", "title": "" }, { "docid": "8148d3f77ce111a21c776c14ed7ed6db", "score": "0.5573843", "text": "func (c Cookie) Expires(expires time.Time) Cookie {\n\tc.c.Expires = expires\n\treturn c\n}", "title": "" }, { "docid": "b14085a0bc10b58921c2ee7ab4dc6c50", "score": "0.5542856", "text": "func ResetCookie() *http.Cookie {\n\treturn &http.Cookie{\n\t\tName: \"mccsToken\",\n\t\tValue: \"\",\n\t\tPath: \"/\",\n\t\tMaxAge: -1,\n\t\tHttpOnly: true,\n\t}\n}", "title": "" }, { "docid": "f13fd4e09275e2e8ff4bd33308b7dfd9", "score": "0.551788", "text": "func CreateUidCookie(c echo.Context) *http.Cookie {\n\tcookie := &http.Cookie{}\n\tcookie.Name = userCookieName\n\tcookie.Value = CreateUuid()\n\tcookie.Expires = time.Now().Add(2 * time.Hour)\n\tc.SetCookie(cookie)\n\n\treturn cookie\n}", "title": "" }, { "docid": "81799772fb0c6a743c86dd97aa757a03", "score": "0.55165124", "text": "func (ctx *RequestContext) ObtainOrCreateMemberCookie(listMemberId ListMemberId, httpCookie *http.Cookie) *MemberCookie {\n\tif httpCookie == nil {\n\t\t// If there's no httpCookie, then randomly generate an id and return a new one. We're taking a small chance of a collision, but that's ok.\n\t\trandomCookieId := MemberCookieId(rand.Uint64())\n\t\tmemberCookie := &MemberCookie{Id: randomCookieId, ListMemberId: listMemberId}\n\n\t\t// Only save the cookie in the db if a listMemberId is present\n\t\tif (listMemberId != 0) {\n\t\t\tctx.db.Insert(memberCookie)\n\t\t}\n\t\treturn memberCookie\n\t} else {\n\t\t// Since we have an httpCookie, try to retrieve the dbCookie if it exists\n\t\tencodedCookieId := httpCookie.Value\n\t\tmemberCookieId, err := DecodeMemberCookieId(encodedCookieId)\n\t\tmemberCookie := &MemberCookie{Id: memberCookieId}\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Problem parsing MemberCookieId from httpCookie: %s, error message: %s\", encodedCookieId, err)\n\t\t} else {\n\t\t\terr := ctx.db.Select(memberCookie)\n\t\t\tif err != nil {\n\t\t\t\tif strings.Contains(err.Error(), \"no rows in result set\") {\n\t\t\t\t\t// Since we didn't find a cookie in the db, save it if a listMemberId is present\n\t\t\t\t\tif (listMemberId != 0) {\n\t\t\t\t\t\tmemberCookie.ListMemberId = listMemberId\n\t\t\t\t\t\terr = ctx.db.Insert(memberCookie)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tfmt.Printf(\"Problem inserting MemberCookie record with id: : %d, DB message: %s\\n\", memberCookieId, err)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tfmt.Printf(\"Problem retrieving MemberCookie record with id: : %d, DB message: %s\\n\", memberCookieId, err)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// If we got here, it means we have an httpCookie and we retrieved the corresponding dbCookie\n\t\t\t\tif listMemberId == 0 {\n\t\t\t\t\t// If we didn't find a ListMemberId in the url and we have a httpCookie, then initialize the id from the dbCookie\n\t\t\t\t\tlistMemberId = memberCookie.ListMemberId\n\n\t\t\t\t} else if memberCookie.ListMemberId == 0 || memberCookie.ListMemberId != listMemberId {\n\t\t\t\t\t// If we already had a httpCookie that didn't have a list member id or the member id has changed, then update the httpCookie\n\t\t\t\t\tmemberCookie.ListMemberId = listMemberId\n\t\t\t\t\tmemberCookie.Updated = time.Now()\n\t\t\t\t\terr := ctx.db.Update(memberCookie)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tfmt.Printf(\"Problem updating MemberCookie record with id: %d, ListMemberId: %d, DB message: %s\\n\", memberCookie.Id, listMemberId, err)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn memberCookie\n\t}\n}", "title": "" }, { "docid": "a7e9c876893c9a51792c150a66be4810", "score": "0.5515661", "text": "func (r *Response) CookiePresent(cookieName string) *Response {\n\tr.cookiesPresent = append(r.cookiesPresent, cookieName)\n\treturn r\n}", "title": "" }, { "docid": "a7e9c876893c9a51792c150a66be4810", "score": "0.5515661", "text": "func (r *Response) CookiePresent(cookieName string) *Response {\n\tr.cookiesPresent = append(r.cookiesPresent, cookieName)\n\treturn r\n}", "title": "" }, { "docid": "8e4ef18618fbcb4f7cb9182339238f5f", "score": "0.5513256", "text": "func (l *HTTPLib) NewCookie(w http.ResponseWriter, name, value, path, domain string, expires time.Time, secure, httpOnly bool, sameSite http.SameSite) {\n\thttp.SetCookie(w, &http.Cookie{\n\t\tName: name,\n\t\tValue: value,\n\t\tPath: path,\n\t\tDomain: domain,\n\t\tExpires: expires,\n\t\tSecure: secure,\n\t\tHttpOnly: httpOnly,\n\t\tSameSite: sameSite,\n\t})\n}", "title": "" }, { "docid": "777d7fd1eb077383527edb63621a9dca", "score": "0.54860735", "text": "func CreateCookie(value string) *http.Cookie {\n\treturn &http.Cookie{\n\t\tName: \"mccsToken\",\n\t\tValue: value,\n\t\tPath: \"/\",\n\t\tMaxAge: 86400,\n\t\tHttpOnly: true,\n\t}\n}", "title": "" }, { "docid": "6057b7b26d261aa432f89462d45eee7e", "score": "0.5481366", "text": "func (sc *Cryptor) Clear(w http.ResponseWriter, r *http.Request) {\n\tc := sc.MakeCookieFunc(w, r)\n\tc.MaxAge = -1\n\thttp.SetCookie(w, c)\n}", "title": "" }, { "docid": "8bf45e9a73b81c9f5a16384aae183d94", "score": "0.5480389", "text": "func NewCookieOverseer(opts CookieOptions, secretKey []byte) *CookieOverseer {\n\tif len(opts.Name) == 0 {\n\t\tpanic(\"cookie name must be provided\")\n\t}\n\n\tblock, err := aes.NewCipher(secretKey)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tgcm, err := cipher.NewGCM(block)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\to := &CookieOverseer{\n\t\toptions: opts,\n\t\tsecretKey: secretKey,\n\t\tgcmBlockMode: gcm,\n\t}\n\n\to.resetExpiryMiddleware.resetter = o\n\n\treturn o\n}", "title": "" }, { "docid": "4f1af96109e64af27fac50b3babcca49", "score": "0.5464115", "text": "func setCookie(w http.ResponseWriter, nonce string, secureMode bool, maxAge int) error {\n\n\tencoded, err := secureCookie.Encode(csrfCookieName, nonce)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"the encode cookie failed, err = %v\", err)\n\t}\n\tcookie := &http.Cookie{\n\t\tName: csrfCookieName,\n\t\tValue: encoded,\n\t\tMaxAge: maxAge,\n\t\tHttpOnly: true,\n\t\tSecure: secureMode,\n\t\tSameSite: http.SameSiteLaxMode,\n\t}\n\n\thttp.SetCookie(w, cookie)\n\treturn nil\n}", "title": "" }, { "docid": "4f5b3748c5ef07cc26c67ef3bb42cb8a", "score": "0.5451011", "text": "func addCookie(context echo.Context, authToken string) {\n\texpire := time.Now().AddDate(0, 1, 0) // 1 month\n\tcookie := &http.Cookie{\n\t\tName: \"token\",\n\t\tExpires: expire,\n\t\tValue: auth.Bearer + \" \" + authToken,\n\t\tPath: \"/\",\n\t\t// Domain must not be set for auth to work with chrome without domain name\n\t\t// http://stackoverflow.com/questions/5849013/setcookie-does-not-set-cookie-in-google-chrome\n\t}\n\tcontext.Response().Header().Set(\"Set-Cookie\", cookie.String())\n}", "title": "" }, { "docid": "c87427e85bf60057842be38bf26fabf8", "score": "0.5449331", "text": "func (s *BasecookieListener) EnterCookie(ctx *CookieContext) {}", "title": "" }, { "docid": "d85ca4a20b5fb2220e4e99c9070756da", "score": "0.5431995", "text": "func (p *para) checkCookie(rawCookies string) {\n\theader := http.Header{}\n\theader.Add(\"Cookie\", rawCookies)\n\trequest := http.Request{Header: header}\n\tfor _, e := range request.Cookies() {\n\t\tif strings.Contains(e.Name, \"download_warning_\") {\n\t\t\tcookie, _ := request.Cookie(e.Name)\n\t\t\tp.Code = cookie.Value\n\t\t\tbreak\n\t\t}\n\t}\n}", "title": "" }, { "docid": "474dc27e088dca8ae2df78b170ef98b7", "score": "0.5421809", "text": "func MakeCookieFromOptions(req *http.Request, name string, value string, cookieOpts *options.Cookie, expiration time.Duration, now time.Time) *http.Cookie {\n\tdomain := GetCookieDomain(req, cookieOpts.Domains)\n\n\tif domain != \"\" {\n\t\treturn MakeCookie(req, name, value, cookieOpts.Path, domain, cookieOpts.HTTPOnly, cookieOpts.Secure, expiration, now, ParseSameSite(cookieOpts.SameSite))\n\t}\n\t// If nothing matches, create the cookie with the shortest domain\n\tdefaultDomain := \"\"\n\tif len(cookieOpts.Domains) > 0 {\n\t\tlogger.Errorf(\"Warning: request host %q did not match any of the specific cookie domains of %q\", requestutil.GetRequestHost(req), strings.Join(cookieOpts.Domains, \",\"))\n\t\tdefaultDomain = cookieOpts.Domains[len(cookieOpts.Domains)-1]\n\t}\n\treturn MakeCookie(req, name, value, cookieOpts.Path, defaultDomain, cookieOpts.HTTPOnly, cookieOpts.Secure, expiration, now, ParseSameSite(cookieOpts.SameSite))\n}", "title": "" }, { "docid": "84bde58009665b673ad00373ad756237", "score": "0.5403458", "text": "func NewCookie(name, value string) *http.Cookie {\n\toptions := DefaultSessionOptions\n\tcookie := &http.Cookie{\n\t\tName: name,\n\t\tValue: value,\n\t\tPath: options.Path,\n\t\tDomain: options.Domain,\n\t\tMaxAge: options.MaxAge,\n\t\tSecure: options.Secure,\n\t\tHttpOnly: options.HttpOnly,\n\t}\n\tif options.MaxAge > 0 {\n\t\td := time.Duration(options.MaxAge) * time.Second\n\t\tcookie.Expires = time.Now().Add(d)\n\t} else if options.MaxAge < 0 {\n\t\t// Set it to the past to expire now.\n\t\tcookie.Expires = time.Unix(1, 0)\n\t}\n\treturn cookie\n}", "title": "" }, { "docid": "2385fd6712ca6cb32612ec9c0364415e", "score": "0.53879577", "text": "func (a *Manager) Create(ctx *ws.Context, age int, value interface{}) error {\n\tt := a.pool.Get().(*token)\n\tdefer a.pool.Put(t)\n\tif err := t.reset(age, value); err != nil {\n\t\treturn err\n\t}\n\thttp.SetCookie(ctx.ResponseWriter, &http.Cookie{\n\t\tName: a.name,\n\t\tValue: t.String(),\n\t\tMaxAge: age,\n\t\tPath: a.path,\n\t\tSecure: a.secure,\n\t\tHttpOnly: true,\n\t\tSameSite: http.SameSiteStrictMode,\n\t})\n\treturn nil\n}", "title": "" }, { "docid": "bc1528abee99fb0a17d00fc34b31b3a5", "score": "0.5351003", "text": "func (c *Cookie) MaxAge(seconds int) *Cookie { c.maxAge = seconds; return c }", "title": "" }, { "docid": "5a85bd1c08be39ebc50e24d4780a300a", "score": "0.53301567", "text": "func (s *server) createUserCookie(w http.ResponseWriter, isRegistered bool, UserID string) {\n\tvar cookiedays = 365 // 356 days\n\tif isRegistered == true {\n\t\tcookiedays = 30 // 30 days\n\t}\n\n\tencoded, err := s.cookie.Encode(s.config.SecureCookieName, UserID)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\n\t}\n\n\tcookie := &http.Cookie{\n\t\tName: s.config.SecureCookieName,\n\t\tValue: encoded,\n\t\tPath: s.config.PathPrefix + \"/\",\n\t\tHttpOnly: true,\n\t\tDomain: s.config.AppDomain,\n\t\tMaxAge: 86400 * cookiedays,\n\t\tSecure: s.config.SecureCookieFlag,\n\t\tSameSite: http.SameSiteStrictMode,\n\t}\n\thttp.SetCookie(w, cookie)\n}", "title": "" }, { "docid": "748bcee6e9ee9b2f7aaadc9b1e1c2ed8", "score": "0.53183085", "text": "func (c *CookieOverseer) ResetExpiry(w http.ResponseWriter, r *http.Request) error {\n\tif c.options.MaxAge == 0 {\n\t\treturn nil\n\t}\n\n\tval, err := c.options.getCookieValue(w, r)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"unable to get session value from cookie\")\n\t}\n\n\tw.(cookieWriter).SetCookie(c.options.makeCookie(val))\n\n\treturn nil\n}", "title": "" }, { "docid": "33584cb3f771a31e0b1ba667dff5f3e0", "score": "0.53181857", "text": "func (rc *Ctx) ExtendCookie(name string, path string, years, months, days int) {\n\tc := rc.GetCookie(name)\n\tif c == nil {\n\t\treturn\n\t}\n\tc.Path = path\n\tc.Domain = rc.getCookieDomain()\n\tif c.Expires.IsZero() {\n\t\tc.Expires = time.Now().UTC().AddDate(years, months, days)\n\t} else {\n\t\tc.Expires = c.Expires.AddDate(years, months, days)\n\t}\n\trc.WriteCookie(c)\n}", "title": "" }, { "docid": "c32098d37a0803d205f96ee983c24741", "score": "0.53179026", "text": "func (req *request) Cookie() Cookie {\n return req.cookies\n}", "title": "" }, { "docid": "3e235b7941144c98ac0cf841862fbf0a", "score": "0.53119755", "text": "func NewFzCookieRef(ref unsafe.Pointer) *FzCookie {\n\treturn (*FzCookie)(ref)\n}", "title": "" }, { "docid": "d7e61eafa462c0bf2420d9ccd4615c7f", "score": "0.5299337", "text": "func (am AuthManager) injectCookie(ctx *Ctx, name, value string, expire time.Time) {\n\tctx.WriteNewCookie(&http.Cookie{\n\t\tName: name,\n\t\tValue: value,\n\t\tExpires: expire,\n\t\tPath: am.CookiePathOrDefault(),\n\t\tHttpOnly: am.CookieHTTPOnly,\n\t\tSecure: am.CookieSecure,\n\t\tSameSite: webutil.MustParseSameSite(am.CookieSameSite),\n\t})\n}", "title": "" }, { "docid": "30db5ff977555f05f55bba3c45775d21", "score": "0.5268361", "text": "func (c *Cookie) Domain(domain string) *Cookie { c.domain = domain; return c }", "title": "" }, { "docid": "d6ad56ff45c41be2e7129fe8f7a8def3", "score": "0.5258983", "text": "func getCookie(w http.ResponseWriter, req *http.Request) *http.Cookie {\n\tc, err := req.Cookie(\"session\")\n\tif err != nil {\n\t\tsID, _ := uuid.NewV4()\n\t\tc = &http.Cookie{\n\t\t\tName: \"session\",\n\t\t\tValue: sID.String(),\n\t\t}\n\t\thttp.SetCookie(w, c)\n\t}\n\treturn c\n}", "title": "" }, { "docid": "7f8cb4868a5f7421b735851e9724471c", "score": "0.52580035", "text": "func MakeCookie(req *http.Request, name string, value string, path string, domain string, httpOnly bool, secure bool, expiration time.Duration, now time.Time, sameSite http.SameSite) *http.Cookie {\n\tif domain != \"\" {\n\t\thost := requestutil.GetRequestHost(req)\n\t\tif h, _, err := net.SplitHostPort(host); err == nil {\n\t\t\thost = h\n\t\t}\n\t\tif !strings.HasSuffix(host, domain) {\n\t\t\tlogger.Errorf(\"Warning: request host is %q but using configured cookie domain of %q\", host, domain)\n\t\t}\n\t}\n\n\treturn &http.Cookie{\n\t\tName: name,\n\t\tValue: value,\n\t\tPath: path,\n\t\tDomain: domain,\n\t\tHttpOnly: httpOnly,\n\t\tSecure: secure,\n\t\tExpires: now.Add(expiration),\n\t\tSameSite: sameSite,\n\t}\n}", "title": "" }, { "docid": "9e0206cd8b6c7e79fbc136eb3a61f45a", "score": "0.5241197", "text": "func deleteAccount(w http.ResponseWriter, r *http.Request) {\n clearCache(w)\n cookie, _ := r.Cookie(LOGIN_COOKIE)\n sendCommand(CommandRequest{CommandDeleteAccount, cookie.Value})\n cookie.MaxAge = -1\n cookie.Expires = time.Now().Add(-1 * time.Hour)\n http.SetCookie(w, cookie)\n http.Redirect(w, r, \"/welcome\", http.StatusSeeOther)\n}", "title": "" }, { "docid": "262708b25d1b8519d856c318f2ac2496", "score": "0.52072793", "text": "func (s *Session) Cookie() *http.Cookie {\n\tma := 0\n\tif s.Valid() {\n\t\tma = int(DefaultDuration.Seconds())\n\t}\n\treturn &http.Cookie{\n\t\tName: SessionKey,\n\t\tValue: s.Id,\n\t\tExpires: s.Expires,\n\t\tMaxAge: ma,\n\t}\n}", "title": "" }, { "docid": "75ea947e6f8bf1edc3e68b1f0adea988", "score": "0.5206201", "text": "func createSessionCookie(w http.ResponseWriter, r *http.Request, value string) {\n\tcookie, err := r.Cookie(\"session\")\n\tif err != nil {\n\t\tcookie = &http.Cookie{\n\t\t\tName: \"session\",\n\t\t\tMaxAge: SessionTime,\n\t\t\tValue: value,\n\t\t\tHttpOnly: true,\n\t\t}\n\t\thttp.SetCookie(w, cookie)\n\t}\n}", "title": "" }, { "docid": "e3f3884d1a385dbab37e769134308add", "score": "0.52000403", "text": "func SetCookie(w http.ResponseWriter, r *http.Request) {\r\n\tcookieName := envvar.CookieName()\r\n\tcookie, err := r.Cookie(cookieName)\r\n\tif err != nil {\r\n\t\tcookie := &http.Cookie{\r\n\t\t\tName: cookieName,\r\n\t\t\tValue: (uuid.NewV4()).String(),\r\n\t\t\tHttpOnly: true,\r\n\t\t\tPath: \"/\",\r\n\t\t\tDomain: envvar.HostAddress(),\r\n\t\t\tSecure: true,\r\n\t\t}\r\n\t\thttp.SetCookie(w, cookie)\r\n\t\tlogger.Info.Println(\"set cookie : \" + cookie.Value + \"-\" + cookieName)\r\n\t\treturn\r\n\t}\r\n\t_, found := Get(r)\r\n\tif found {\r\n\t\tRefresh(r)\r\n\t\tlogger.Info.Println(\"session refresh: \" + cookie.Value)\r\n\t\treturn\r\n\t}\r\n\tlogger.Info.Println(cookie.Value + \" already set\")\r\n\r\n\treturn\r\n}", "title": "" }, { "docid": "db12c2830c7205d6450bfd23e71caef9", "score": "0.5197693", "text": "func makeCookie(session string) *http.Cookie {\n\tcookie := new(http.Cookie)\n\tcookie.Name = \"JSESSIONID\"\n\tcookie.Value = session\n\treturn cookie\n}", "title": "" }, { "docid": "02fca1bbff9d706b1fde5d1cd6c098ae", "score": "0.5192265", "text": "func (sc *SafeCookie) Seal(data []byte, c *http.Cookie) error {\n\tnonce := make([]byte, sc.AEAD.NonceSize())\n\tif _, err := rand.Read(nonce); err != nil {\n\t\treturn err\n\t}\n\n\tciphertext := sc.AEAD.Seal(nonce, nonce, data, []byte(c.Name))\n\tc.Value = base64.URLEncoding.EncodeToString(ciphertext)\n\n\treturn nil\n}", "title": "" }, { "docid": "fdd4e265fff8df6a2fdd2ccd41c8c2a3", "score": "0.51823014", "text": "func (cookieAuth *CookieAuth) Read(req *http.Request) (userId int64, err error) {\n\tvar cookie *http.Cookie\n\tif cookie, err = req.Cookie(cookieAuth.cookieName); err != nil {\n\t\tif err.Error() == NotPresentErrorString {\n\t\t\terr = nil // Ignore this \"error\" and instead just return nil userId result.\n\t\t} else {\n\t\t\terr = fmt.Errorf(\"Cookieauth.Read error reading cookie: %s\", err)\n\t\t}\n\t\treturn\n\t}\n\tif len(cookie.Value) == 0 {\n\t\treturn\n\t}\n\tvalue := make(map[string]string)\n\tif err = cookieAuth.secureCookie.Decode(cookieAuth.cookieName, cookie.Value, &value); err != nil {\n\t\terr = fmt.Errorf(\"Cookieauth.Read error decrypting cookie: %s\", err)\n\t\treturn\n\t}\n\t// Extract userId.\n\tuserIdString, ok := value[\"userId\"]\n\tif !ok {\n\t\t// No userId found in cookie value, return userId=0.\n\t\tlog.WithField(\"action\", \"cookieauth.read\").Infof(\"no userId found in value=%v\", value)\n\t\treturn\n\t}\n\tif userId, err = strconv.ParseInt(userIdString, 10, 64); err != nil {\n\t\terr = fmt.Errorf(\"Cookieauth.Read failed to parse userId=%v: %s\", userIdString, err)\n\t\treturn\n\t}\n\t// Extract and validate expiration timestamp.\n\tcreatedAtString, ok := value[\"createdAt\"]\n\tif !ok {\n\t\t// No createdAt found in cookie value, return userId=0.\n\t\tlog.WithField(\"action\", \"cookieauth.read\").Infof(\"no createdAt found in value=%v\", value)\n\t\treturn\n\t}\n\tcreatedAt, err := time.Parse(createdAtLayout, createdAtString)\n\tlog.WithField(\"action\", \"cookieauth.read\").Debugf(\"createdAt=%v since=%v\", createdAt, time.Since(createdAt))\n\tif time.Since(createdAt) > cookieAuth.expireAfter {\n\t\t// Expired.\n\t\tlog.WithField(\"action\", \"cookieauth.read\").Infof(\"discovered expired auth cookie createdAt=%s which more than %s ago\", createdAt, cookieAuth.expireAfter)\n\t\tuserId = 0 // Zero out userId.\n\t\terr = Expired\n\t\treturn\n\t}\n\tlog.WithField(\"action\", \"cookieauth.read\").Infof(\"found userId=%v\", value)\n\treturn\n}", "title": "" }, { "docid": "ba953ff1dbc947b387136265d00855d5", "score": "0.518004", "text": "func (cook CreateFenceCookie) Check() error {\n\treturn cook.Cookie.Check()\n}", "title": "" }, { "docid": "7b01492d7417f8c8517f6c3a424a7e9d", "score": "0.5173248", "text": "func (r *Reply) Cookie(cookie *http.Cookie) *Reply {\n\tif r.cookies == nil {\n\t\tr.cookies = make([]*http.Cookie, 0)\n\t}\n\n\tr.cookies = append(r.cookies, cookie)\n\treturn r\n}", "title": "" }, { "docid": "be03dbfb313796ee1863513a9c25cae0", "score": "0.5167642", "text": "func DeleteFCookie(c *fiber.Ctx) {\n\tcookie.Name = \"frontends1\"\n\tcookie.Expires = time.Now().Add(-time.Minute)\n\tc.ClearCookie()\n\tc.Cookie(&cookie)\n\tfmt.Println(\"Cookie: \", cookie)\n}", "title": "" }, { "docid": "b8720db5ad4bc53c5f60c7005a1938cc", "score": "0.51589805", "text": "func (client *HTTPClient) EnsureCookie(fromURL string, force bool) error {\n\tcURL, err := url.Parse(fromURL)\n\tif err != nil {\n\t\treturn WrapErr(err, \"failed to parse cookie URL\").With(\"url\", fromURL)\n\t}\n\tcookies := client.PersistentJar.Cookies(cURL)\n\tif force || len(cookies) == 0 {\n\t\tresp, err := client.Get(fromURL, nil)\n\t\tif err != nil {\n\t\t\treturn WrapErr(err, \"failed to fetch cookie URL\").With(\"url\", fromURL)\n\t\t}\n\t\tresp.Body.Close()\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a20e9d29b3b40622ff10f37d75c7683b", "score": "0.51558584", "text": "func CheckTheValidityOfTheTokenFromHTTPHeader(w http.ResponseWriter, r *http.Request) (writer http.ResponseWriter, newToken string, err error) {\n err = createError(011)\n for _, cookie := range r.Cookies() {\n if cookie.Name == \"Token\" {\n var token string\n token, err = CheckTheValidityOfTheToken(cookie.Value)\n //fmt.Println(\"T\", token, err)\n writer = SetCookieToken(w, token)\n newToken = token\n }\n }\n //fmt.Println(err)\n return\n}", "title": "" }, { "docid": "47d0ffeb2247400302169dfa4667a9f3", "score": "0.5149213", "text": "func createSession() (*http.Cookie, time.Time) {\n\n\tconst sessionLength int = 60\n\tvar timeCreated time.Time\n\n\t// Create Cookie used in web session\n\tsID, _ := uuid.NewV4()\n\tc := &http.Cookie{\n\t\tName: \"session\",\n\t\tValue: sID.String(),\n\t}\n\tc.MaxAge = sessionLength\n\n\t// Store creation time of Cookie into Postgres Database\n\t// Note that time.Now() uses current local time configured to your computer\n\ttimeCreated = time.Now()\n\tfmt.Println(timeCreated)\n\n\treturn c, timeCreated\n}", "title": "" }, { "docid": "faa54ccfadf6a6d7992939ada19fca91", "score": "0.51469886", "text": "func (c Cookie) SaveReq() Cookie {\n\tc.w.Req.AddCookie(c.c)\n\treturn c\n}", "title": "" }, { "docid": "e8e6ca91e1f2bf223807b9fa9ea99a4b", "score": "0.5146988", "text": "func (d *Cookie) Get(w http.ResponseWriter, r *http.Request, v any) (bool, error) {\n\ts, err := webmiddleware.GetSecureCookie(r.Context())\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tcookie, err := r.Cookie(d.Name)\n\tif err != nil || cookie.Value == tombstone {\n\t\treturn false, nil\n\t}\n\n\terr = s.Decode(d.Name, cookie.Value, v)\n\tif err != nil {\n\t\td.Remove(w, r)\n\t\treturn false, nil\n\t}\n\n\treturn true, nil\n}", "title": "" }, { "docid": "145808f600f52d4dd0eecd946e315589", "score": "0.51468", "text": "func GenerateCookie(email string, token string) (cookie string, err error) {\n\tmacString := email + \"#\" + token\n\tmacBytes, err := computeMAC([]byte(macString))\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn macString + \"#\" + hex.EncodeToString(macBytes), nil\n}", "title": "" }, { "docid": "45f6b1fefcf1018968ea3545acf0d202", "score": "0.5137174", "text": "func (r *Response) CookieNotPresent(cookieName string) *Response {\n\tr.cookiesNotPresent = append(r.cookiesNotPresent, cookieName)\n\treturn r\n}", "title": "" }, { "docid": "45f6b1fefcf1018968ea3545acf0d202", "score": "0.5137174", "text": "func (r *Response) CookieNotPresent(cookieName string) *Response {\n\tr.cookiesNotPresent = append(r.cookiesNotPresent, cookieName)\n\treturn r\n}", "title": "" }, { "docid": "989e1709e3163867a91dbaf4fa80070a", "score": "0.5131856", "text": "func (cookie *Cookie) ToHTTPCookie(ttl time.Duration) *http.Cookie {\n\tj, _ := json.Marshal(cookie)\n\tb64 := base64.URLEncoding.EncodeToString(j)\n\n\treturn &http.Cookie{\n\t\tName: uidCookieName,\n\t\tValue: b64,\n\t\tExpires: time.Now().Add(ttl),\n\t\tPath: \"/\",\n\t}\n}", "title": "" }, { "docid": "87dee13209e57a6ac6e140e141483230", "score": "0.5121043", "text": "func readCookie(res http.ResponseWriter, req *http.Request) {\n\tcookie := readCreateCookie(req)\n\thttp.SetCookie(res, cookie) // set cookie into browser.\n\tuserInformation = cookieInformationDecoding(cookie.Value) // decode and set user state into page variable.\n}", "title": "" }, { "docid": "8cb3ef293693371f07d9357e55a0ab9e", "score": "0.5120952", "text": "func DeleteCookieValue(w http.ResponseWriter){\n\thttp.SetCookie(w, &http.Cookie{\n\t\tName: \"SID\",\n\t\tValue: \"\",\n\t\tPath: \"/\",\n\t\tHttpOnly: true,\n\t\tExpires: time.Unix(0, 0),\n\t})\n}", "title": "" }, { "docid": "caa8e2ca47bf66f3469907f34a3a71a0", "score": "0.5109182", "text": "func Expires(value time.Time) *SimpleElement { return newSETime(\"expires\", value) }", "title": "" }, { "docid": "f85b78cad7ed554563f0de7cdf919e76", "score": "0.5098667", "text": "func NewCookie(name, value string) *Cookie {\n\treturn &Cookie{name: name, value: value, path: \"/\", httpOnly: true}\n}", "title": "" }, { "docid": "b2f2b50647405a01d1a089c626e88a67", "score": "0.5093006", "text": "func ClearCookie(w http.ResponseWriter, req *http.Request, name string) {\n\tSetCookieAge(w, req, name, \"\", -time.Second)\n}", "title": "" }, { "docid": "c35d59a53151007b8ab15d0b020bd3a5", "score": "0.50870043", "text": "func logout(w http.ResponseWriter, r *http.Request) {\n LOG[INFO].Println(\"Executing Logout\")\n clearCache(w)\n cookie, _ := r.Cookie(LOGIN_COOKIE)\n cookie.MaxAge = -1\n cookie.Expires = time.Now().Add(-1 * time.Hour)\n http.SetCookie(w, cookie)\n LOG[INFO].Println(\"Successfully Logged Out\")\n http.Redirect(w, r, \"/welcome\", http.StatusSeeOther)\n}", "title": "" }, { "docid": "3ecac01c0dd87b3082ba4485f70e455d", "score": "0.507716", "text": "func RefreshFCookie(token string) {\n\tcookie.Name = \"frontends1\"\n\tcookie.Value = token\n\tcookie.Expires = time.Now().Add(15 * time.Minute)\n\tcookie.Domain = Cdomain\n}", "title": "" }, { "docid": "13e5958306df8321e30247bcf8021880", "score": "0.5076842", "text": "func (c Cookie) ThreeMonths() Cookie {\n\treturn c.Expires(time.Now().AddDate(0, 3, 0))\n}", "title": "" }, { "docid": "2e9c35ca5ee04a44f7c5c6dc19b282f1", "score": "0.507645", "text": "func (c Cookie) Day() Cookie {\n\treturn c.Expires(time.Now().AddDate(0, 0, 1))\n}", "title": "" }, { "docid": "2f70e73768fee9c98a56890a5ac5128b", "score": "0.50727916", "text": "func TestCookieWithOptions(t *testing.T) {\n\tRunCookieSetup(t, func(privKey *rsa.PrivateKey) {\n\n\t\tsigner := NewCookieSigner(\"keyID\", privKey)\n\t\tsigner.Path = \"/\"\n\t\tsigner.Domain = testDomain\n\t\tsigner.Secure = false\n\n\t\tif signer.keyID != \"keyID\" || signer.privKey != privKey {\n\t\t\tt.Fatalf(\"NewCookieSigner does not properly assign values %+v\", signer)\n\t\t}\n\n\t\tp := &Policy{\n\t\t\tStatements: []Statement{\n\t\t\t\t{\n\t\t\t\t\tResource: \"*\",\n\t\t\t\t\tCondition: Condition{\n\t\t\t\t\t\tDateLessThan: &AWSEpochTime{time.Now().Add(1 * time.Hour)},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\n\t\tcookies, err := signer.SignWithPolicy(p)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Error signing cookies %#v\", err)\n\t\t}\n\t\tvalidateCookies(t, cookies, signer)\n\n\t})\n}", "title": "" }, { "docid": "6b9107b25f7f1a313f4f062ea0743b1b", "score": "0.507098", "text": "func (zr *ZRequest) SetCookie(ck *http.Cookie) *ZRequest {\n\tif zr.ended {\n\t\treturn zr\n\t}\n\tzr.cookies = append(zr.cookies, ck)\n\treturn zr\n}", "title": "" }, { "docid": "579c0ebcbfa97c21aec9ad3123035070", "score": "0.5070577", "text": "func (d *Cookie) Set(w http.ResponseWriter, r *http.Request, v any) error {\n\ts, err := webmiddleware.GetSecureCookie(r.Context())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tstr, err := s.Encode(d.Name, v)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tc := d.new(r)\n\tc.Value = str\n\thttp.SetCookie(w, &c)\n\n\treturn nil\n}", "title": "" }, { "docid": "91b1dfabc232697302423b2b2fb5bf27", "score": "0.5056292", "text": "func (c Cookie) ThreeYears() Cookie {\n\treturn c.Expires(time.Now().AddDate(3, 0, 0))\n}", "title": "" }, { "docid": "0778e8074268daccf2af2a30795d6e24", "score": "0.50537044", "text": "func (c Cookie) Get() (*http.Cookie, error) {\n\tif c.c.Value != \"\" {\n\t\treturn c.c, nil\n\t}\n\treturn c.w.Req.Cookie(c.c.Name)\n}", "title": "" }, { "docid": "127ab03a1c1b7a2b2434eda44b6364bb", "score": "0.50511473", "text": "func createSession(w http.ResponseWriter, r *http.Request, user db.User) {\n\tnewSession, err := helpers.Store(r).CreateSession(db.Session{\n\t\tUserID: user.ID,\n\t\tCreated: time.Now(),\n\t\tLastActive: time.Now(),\n\t\tIP: r.Header.Get(\"X-Real-IP\"),\n\t\tUserAgent: r.Header.Get(\"user-agent\"),\n\t\tExpired: false,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tencoded, err := util.Cookie.Encode(\"semaphore\", map[string]interface{}{\n\t\t\"user\": user.ID,\n\t\t\"session\": newSession.ID,\n\t})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thttp.SetCookie(w, &http.Cookie{\n\t\tName: \"semaphore\",\n\t\tValue: encoded,\n\t\tPath: \"/\",\n\t})\n}", "title": "" }, { "docid": "f4d989145a5a539bb883261ee66cde4a", "score": "0.50385416", "text": "func (cook SetCrtcTransformCookie) Check() error {\n\treturn cook.Cookie.Check()\n}", "title": "" }, { "docid": "777ce3658ef5132bc233b9d8a6bb4966", "score": "0.5037629", "text": "func (r *Response) Cookie(name, value string) *Response {\n\tr.cookies = append(r.cookies, NewCookie(name).Value(value))\n\treturn r\n}", "title": "" }, { "docid": "23dd57c374f420064c8d6b99271fdcab", "score": "0.50345933", "text": "func (cook ResetFenceCookie) Check() error {\n\treturn cook.Cookie.Check()\n}", "title": "" }, { "docid": "f7efc6de7faaba1d391ea103b8a7f2a4", "score": "0.50259876", "text": "func WriteNewAuth(w http.ResponseWriter, r *http.Request, authTokenString, refreshTokenString, csrfSecret string) {\n\texpiration := time.Now().Add(models.RefreshTokenValidTime)\n\n\tcookie := http.Cookie{Name: \"authToken\", Value: authTokenString, Expires: expiration, Path: \"/\", HttpOnly: true, Secure: true}\n\thttp.SetCookie(w, &cookie)\n\n\tcookie = http.Cookie{Name: \"refreshToken\", Value: refreshTokenString, Expires: expiration, Path: \"/\", HttpOnly: true, Secure: true}\n\thttp.SetCookie(w, &cookie)\n\n\tcookie = http.Cookie{Name: \"csrfSecret\", Value: csrfSecret, Expires: expiration, Path: \"/\", HttpOnly: true, Secure: true}\n\thttp.SetCookie(w, &cookie)\n\n\treturn\n}", "title": "" } ]
c065c3ab82cac17fbc875d915e30535e
SetContextRequestID sets a request ID in the given context and returns a new context.
[ { "docid": "d11305e0160eaa33d258a6ee462d8c49", "score": "0.8871772", "text": "func SetContextRequestID(ctx context.Context, reqID string) context.Context {\n\treturn context.WithValue(ctx, reqIDKey, reqID)\n}", "title": "" } ]
[ { "docid": "b98f3fe34edf455e4fa2e9450f3d0938", "score": "0.8388244", "text": "func (ctx *ServerContext) SetRequestID(requestID uint64) context.Context {\n\treturn context.WithValue(ctx, contextKeyRequestID, requestID)\n}", "title": "" }, { "docid": "8b12c687f0cd38374b46c02c54c3e1cc", "score": "0.8084778", "text": "func ContextWithRequestID(ctx context.Context, requestID string) context.Context {\n\treturn context.WithValue(ctx, requestIDKey, requestID)\n}", "title": "" }, { "docid": "6b63e7520a18cc4366c55c65d9c01c21", "score": "0.8012927", "text": "func WithRequestID(ctx context.Context, reqid string) context.Context {\n\treturn context.WithValue(ctx, reqidKey, reqid)\n}", "title": "" }, { "docid": "00f49f0fbafa95260746a9ed243fc722", "score": "0.7957035", "text": "func WithRequestID(ctx context.Context, id string) context.Context {\n\tm := TemplateMapFromContext(ctx)\n\tm[\"requestID\"] = id\n\tctx = WithTemplateMap(ctx, m)\n\n\treturn context.WithValue(ctx, contextKeyRequestID, id)\n}", "title": "" }, { "docid": "bb0d0b0c6174f9982a190bd6baab2432", "score": "0.7916821", "text": "func SetRequestID(ctx context.Context) context.Context {\n\treturn context.WithValue(ctx, logContextKeyRequestID, uuid.New().String())\n}", "title": "" }, { "docid": "95521d61a8e833cd652655cfdc19010c", "score": "0.78068805", "text": "func WithRequestID(c context.Context, id *json.RawMessage) context.Context {\n\treturn context.WithValue(c, requestIDKey{}, id)\n}", "title": "" }, { "docid": "9057ad2a991a3f995ac421bfc88d13fb", "score": "0.7735115", "text": "func NewRequestContextWithRequestID(ctx context.Context, rid string) context.Context {\n\treturn context.WithValue(ctx, ContextKey(\"request_id\"), rid)\n}", "title": "" }, { "docid": "776f72419e48218821fc57cec47b95f9", "score": "0.74383307", "text": "func withRequestID(ctx context.Context, id string) context.Context {\n\treturn context.WithValue(ctx, requestIDKey, id)\n}", "title": "" }, { "docid": "03bae8cde7c80f57bdb2f9b5db62934d", "score": "0.7434864", "text": "func ContextWithRequestID(ctx context.Context) (context.Context, string) {\n\treqID := ContextRequestID(ctx)\n\tif reqID == \"\" {\n\t\treqID = shortID()\n\t\tctx = context.WithValue(ctx, reqIDKey, reqID)\n\t}\n\treturn ctx, reqID\n}", "title": "" }, { "docid": "657c8ea56f7046a53d878bee96504951", "score": "0.72273105", "text": "func newRequestIDContext(req *http.Request) context.Context {\n\tctx := req.Context()\n\treqID := req.Header.Get(tokenSvcRequestHeader)\n\tif reqID == \"\" {\n\t\treqID = uuid.NewV4().String()\n\t}\n\treturn context.WithValue(ctx, requestIDKey, reqID)\n}", "title": "" }, { "docid": "58d3a996453edd0f779e3d3278816ac8", "score": "0.720733", "text": "func ContextRequestID(ctx context.Context) string {\n\tvar reqID string\n\tid := ctx.Value(reqIDKey)\n\tif id != nil {\n\t\treqID = id.(string)\n\t}\n\treturn reqID\n}", "title": "" }, { "docid": "9b9b8765d7e7d3364c0442ce9a174597", "score": "0.6957396", "text": "func (r *Request) SetContext(ctx context.Context) {\n\tif ctx == nil {\n\t\tpanic(\"context cannot be nil\")\n\t}\n\tsetRequestContext(ctx, r)\n}", "title": "" }, { "docid": "06ed31b41377f007825b9cb850469a5d", "score": "0.69563764", "text": "func (r *Request) SetContext(ctx context.Context) { // TODO: make context take control over the whole operation\n\tr.ctx = &ctx\n}", "title": "" }, { "docid": "b3ffc80ed799590e1853c8c3aa477917", "score": "0.6870503", "text": "func (request *CancelRequest) SetRequestID(value int64) {\n\trequest.ProxyMessage.SetRequestID(value)\n}", "title": "" }, { "docid": "27b34ec56c801196cbbd0e5a79f8ee2c", "score": "0.6857938", "text": "func SetContext(ctx context.Context, c *Context) (newctx context.Context) {\n\treturn context.WithValue(ctx, reqctx(255), c)\n}", "title": "" }, { "docid": "e0d07aadbdbc7c0a00a8652f5105d07d", "score": "0.67757595", "text": "func (r *Request) SetContext(ctx context.Context) *Request {\n\tr.context = ctx\n\treturn r\n}", "title": "" }, { "docid": "7bff1716b49af791da7c7ed62f57bd19", "score": "0.6765919", "text": "func (o *PutRecordsIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "89c24f2641b7b438b04da954983a96b9", "score": "0.674551", "text": "func (ctx *HijackRequest) SetContext(c context.Context) *HijackRequest {\n\tctx.req = ctx.req.WithContext(c)\n\treturn ctx\n}", "title": "" }, { "docid": "1f40551ce6b68995bb7c68a8440dcd0f", "score": "0.67451465", "text": "func (o *PutRecorderIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "b4d2844cd8c010cce4f50533bb738851", "score": "0.67011315", "text": "func (o *PatchRetentionsIDExecutionsEidParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "d698080bbc5f58ca4ab72762b06172ef", "score": "0.6650515", "text": "func (cu *CommentUpdate) SetRequestID(id uuid.UUID) *CommentUpdate {\n\tcu.mutation.SetRequestID(id)\n\treturn cu\n}", "title": "" }, { "docid": "24d35929ba0634fa291d590bf6b27107", "score": "0.6645062", "text": "func (o *CancelRequestParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "e38f5f84c8103556b6718ade21f20684", "score": "0.66042656", "text": "func (o *GetCustomersByIDPaymentInstrumentsParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "f61be55bd61b816a588a025482367dad", "score": "0.65686417", "text": "func (o *GetKerberosRequestByNameAndWorkspaceIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "35e77a0128776d8f372d738dcf3007fe", "score": "0.6567014", "text": "func xRequestIDToContext(ctx context.Context, r *http.Request) context.Context {\n\tid := uuid.NewV4().String()\n\th := r.Header.Get(\"X-Request-Id\")\n\tif len(h) > 0 {\n\t\tid = h\n\t} else {\n\t\tif v, ok := ctx.Value(kithttp.ContextKeyRequestXRequestID).(string); ok {\n\t\t\tid = v\n\t\t}\n\t}\n\treturn context.WithValue(\n\t\tctx,\n\t\tkithttp.ContextKeyRequestXRequestID,\n\t\tid,\n\t)\n}", "title": "" }, { "docid": "c2fcca888cc888c837b4cf2a0039a978", "score": "0.6541867", "text": "func (ctx *HttpContext) WithContext(runCtx context.Context) {\r\n\tif runCtx == nil {\r\n\t\tpanic(\"nil context\")\r\n\t}\r\n\tctx.context = runCtx\r\n\tctx.context = context.WithValue(ctx.context, \"RequestID\", ctx.Request().RequestID())\r\n}", "title": "" }, { "docid": "fa216c9f47cb3a5c36b4ce664392dc78", "score": "0.65410787", "text": "func (cuo *CommentUpdateOne) SetRequestID(id uuid.UUID) *CommentUpdateOne {\n\tcuo.mutation.SetRequestID(id)\n\treturn cuo\n}", "title": "" }, { "docid": "8bc2d7af31a31cd578cc55ebcecf666d", "score": "0.64966375", "text": "func (o *CreateIncidentParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "4c086bcdf561a3fd1e193733ec2ed8a7", "score": "0.6493843", "text": "func (o *PutTicketsTicketFidStatusParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "7d4d6f640541ce90b5dc7fe4fd4b86e6", "score": "0.6485947", "text": "func (o *UpdateSDTByIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "8b65df64bd175690edb94a13d63eaf00", "score": "0.6476919", "text": "func (o *GetV1IncidentsIncidentIDTasksTaskIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "ff35dffc25b8c93fe020a1b76273aac5", "score": "0.6474521", "text": "func (e *Error) SetRequestID(requestID string) *Error {\n\te.RequestIDField = requestID\n\treturn e\n}", "title": "" }, { "docid": "a02adacfbdde6cf999dd02667f644fb1", "score": "0.64606863", "text": "func (o *PatchCustomerListsByIDCustomersByIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "92e160d3b2f40553ec19c9830f8c2af3", "score": "0.64555633", "text": "func (o *PutMicroservicesIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "9336ed9628a05854ee602d2ed6832541", "score": "0.6455356", "text": "func (b *BaseResponse) SetRequestID(id string) Response {\n\tb.RequestID = id\n\treturn b\n}", "title": "" }, { "docid": "67431025d7aea86c612df65d41a95529", "score": "0.6432513", "text": "func (o *TokenIssueParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "70e30c35acd36c439c7ee6b42a8a19de", "score": "0.6427142", "text": "func (o *DeleteProvidersLocalByIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "26309361530b7e2f9e9d5c28783488ce", "score": "0.6415848", "text": "func SetCIDInContext(ctx context.Context, cid string) context.Context {\n\treturn context.WithValue(ctx, cidContextKeyName, cid)\n}", "title": "" }, { "docid": "71157e5873743a2fbdb2e7b809e524da", "score": "0.6400626", "text": "func (o *GetGroupFaxlineCallerIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "275a017b4f32006660537b249041d4f9", "score": "0.6399866", "text": "func (request *ProxyRequest) SetRequestID(value int64) {\n\trequest.ProxyMessage.SetRequestID(value)\n}", "title": "" }, { "docid": "227b5faf0aaa53e25974e4891d2c5cb1", "score": "0.6399403", "text": "func (c *Context) SetRequestId(requestId string) {\n\tc.Set(requestIdFieldKey, requestId)\n}", "title": "" }, { "docid": "f07d65729c41b7587c7545c309a0f147", "score": "0.6399114", "text": "func (o *PutCredentialRotationsRotationIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "80da00b1a235f68f9f91d6651302846d", "score": "0.6397053", "text": "func (o *SetRangerCloudIdentityMappingParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "6421a43b2fa7f12a75c91310b62fff50", "score": "0.63938755", "text": "func (o *PatchOperationsIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "05b4abf9e261cb91c92499de039a14cc", "score": "0.6387135", "text": "func SetRequestID(options RequestIDOptions) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tvar requestID string\n\n\t\tif options.AllowSetting {\n\t\t\trequestID = c.Request.Header.Get(\"Set-Request-Id\")\n\t\t}\n\n\t\tif requestID == \"\" {\n\t\t\trequestID = uuid.New().String()\n\t\t}\n\n\t\tc.Writer.Header().Set(\"X-Request-Id\", requestID)\n\t\tc.Next()\n\t}\n}", "title": "" }, { "docid": "6bc5ea5558c5a21a9c3230a555e3463a", "score": "0.6387072", "text": "func (o *SMSReceiptsByMessageIDGetParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "1e14d570b52221dba1a0e604132e71c3", "score": "0.63756263", "text": "func WithChiRequestID(ctx context.Context) context.Context {\n\treqid := middleware.GetReqID(ctx)\n\treturn WithRequestID(ctx, reqid)\n}", "title": "" }, { "docid": "ec57d58e1e1c3d7919b118f4321b457c", "score": "0.6374897", "text": "func RequestIDFromContext(ctx context.Context) string {\n\treturn ctx.Value(requestIDKey).(string)\n}", "title": "" }, { "docid": "c73632c71493898ef8627ab54514fc62", "score": "0.6357882", "text": "func (ctx *HttpContext) SetTimeoutContext(timeout time.Duration) context.Context {\r\n\tctx.context, ctx.cancle = context.WithTimeout(context.Background(), timeout)\r\n\tctx.context = context.WithValue(ctx.context, \"RequestID\", ctx.Request().RequestID())\r\n\treturn ctx.context\r\n}", "title": "" }, { "docid": "4725ec179ba37fd674573ecec5566fec", "score": "0.63520837", "text": "func (o *JobModifyParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "d574a1ea61684498ed92ac45cefcd275", "score": "0.6351833", "text": "func (o *ChangeFaxlineCallerIDSettingsParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "ccfb7605fd6a2c5a376c289fb0a21138", "score": "0.6348924", "text": "func (o *GetIPAMDNSRecordsParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "de95ff555e79a22194356d1422ff5df7", "score": "0.634874", "text": "func (o *RechargeTransactionsByTransactionIDGetParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "554618cbf0572ae487220baba403e883", "score": "0.6348663", "text": "func (o *IssueEditIssueParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "06f1bb1e7764749c2b8c527b3ba7913d", "score": "0.6336207", "text": "func (o *GetKeysKeyIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "15234b4327ff9aeb775899f70a466464", "score": "0.6315221", "text": "func (o *GetMalQueryRequestV1Params) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "5da8e0cf8be7d4f868fcd74559c5445a", "score": "0.63100404", "text": "func (o *GetCustomersCustomerFidInvoicesInvoiceFidParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "afb87360ed6d2fc38f7abb03b440d88b", "score": "0.63007355", "text": "func NewRequestContext(c context.Context, timeout time.Duration, request *http.Request, response http.ResponseWriter, params Params) (context.Context, context.CancelFunc) {\n\tctx, cancel := context.WithTimeout(c, timeout)\n\n\tdata := requestData{\n\t\trequest: request,\n\t\tresponse: response,\n\t\tparams: params,\n\t}\n\n\tctx = context.WithValue(ctx, requestKey, data)\n\n\treturn ctx, cancel\n}", "title": "" }, { "docid": "b843ef2a231b7a62a83505cbf04743b0", "score": "0.62997776", "text": "func (o *ValidateIntegrationIDUsingGETParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "4ad8fd3a44e4faf6c3ac8cacf956a8fc", "score": "0.6299051", "text": "func (o *DeleteV1IncidentRolesIncidentRoleIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "193ef4f49905371fb927d190e318144e", "score": "0.62928665", "text": "func (o *UpdateCollectorGroupByIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "669cd6b5118f6de24bedd9bae5c74860", "score": "0.62732625", "text": "func PopulateRequestID(idGen IDGenerator) kithttp.RequestFunc {\n\treturn func(ctx context.Context, r *http.Request) context.Context {\n\t\tctx = context.WithValue(ctx, econst.RequestID, idGen())\n\t\treturn ctx\n\t}\n}", "title": "" }, { "docid": "e51821f9016002a189e0a25e4f4bbb66", "score": "0.62694806", "text": "func (o *JobsRerunParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "5abbaa1730c0d0c120160f8d28a51c28", "score": "0.6267519", "text": "func (o *GetIPAMIpsParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "27091c006d23c3e07a540062fb8adda0", "score": "0.626284", "text": "func (o *PutClusterClusterIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "7873df49417be49afe4a6576da381f3d", "score": "0.62626135", "text": "func (o *ReadNetworkingV1NamespacedIngressParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "e95763340496fe69af9a4f5d874daf67", "score": "0.62605536", "text": "func (o *GetClusterClusterIDTaskTaskTypeTaskIDHistoryParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "ea1b4b74538aae465894c47ace22fca5", "score": "0.6259287", "text": "func (o *GetClustersUUIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "62c07a1d3dabdb455f2b5dc4a2fbeb6f", "score": "0.62578475", "text": "func (o *PatchSystemObjectDefinitionsByIDAttributeDefinitionsByIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "21b577f4ff4c5cd61becf6c90f515d4d", "score": "0.6249434", "text": "func (o *PutV1ServicesServiceIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "249bcc14da31b7bc20b4be64d35eb0e6", "score": "0.62392026", "text": "func (o *PutDocumentsIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "ee9b9d1e43c9140282deecd585946943", "score": "0.62381047", "text": "func RequestID(contextID string) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\trequestID := c.Request.Header.Get(RequestIDHeaderTag)\n\n\t\tif requestID == \"\" {\n\t\t\trequestID = uuid.New().String()\n\t\t}\n\n\t\tc.Set(contextID, requestID)\n\n\t\tc.Writer.Header().Set(RequestIDHeaderTag, requestID)\n\t\tc.Next()\n\t}\n}", "title": "" }, { "docid": "3e865c96a32fffa67656cd2ccef186ae", "score": "0.6236713", "text": "func (o *GETID4NParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "0e4afa80c5e15117147730a228d18765", "score": "0.62341523", "text": "func (o *PutLedgerVoucherIDSendToInboxParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "1bc800fed66b888829e298e973cbdfbf", "score": "0.6230056", "text": "func (o *GetV1IncidentsIncidentIDStatusPagesParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "7f9be2809db7c6dd6a2f9714cf3e5da3", "score": "0.62235", "text": "func (o *MergeIncidentsParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "fc4d6c22fc89b3a3dde2339b9f0d96ca", "score": "0.6216836", "text": "func (o *RemoveIncomingEntryParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "01c6fd0cb2deee8902e61d31387fbd2c", "score": "0.62125665", "text": "func RequestIDFromContext(ctx context.Context) string {\n\tv := ctx.Value(contextKeyRequestID)\n\tif v == nil {\n\t\treturn \"\"\n\t}\n\n\tt, ok := v.(string)\n\tif !ok {\n\t\treturn \"\"\n\t}\n\treturn t\n}", "title": "" }, { "docid": "b5bdf5f1ba538e02bd97260b7feadc61", "score": "0.62053096", "text": "func (o *GetImageFromDefaultByIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "0cd466fe5515e9b0a68b256fb8409e06", "score": "0.6202117", "text": "func (o *DeleteV1IncidentsIncidentIDStatusPagesStatusPageIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "3b679b8bba64fd92879c1a808fd15c4d", "score": "0.61999", "text": "func (o *PutIPLoadbalancingServiceNameHTTPRouteRouteIDRuleRuleIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "f1972c1df07d6ff032d5d56f3fd40ee9", "score": "0.61981755", "text": "func (o *PatchV1ChecklistTemplatesIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "76d5890ab490b7546199a199d02830d3", "score": "0.619672", "text": "func (o *ClaimTasksParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "beac263f3223f7786565f2cf2e2a9480", "score": "0.61964434", "text": "func (o *RolePrivilegeModifyParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "000e023beefbafab2c18b40c5daf2925", "score": "0.6193549", "text": "func (o *PutDevicesDeviceidLocationParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "e1bbc8b79bdf53b457d36e7d55fb1721", "score": "0.6190284", "text": "func (o *PutCwfNetworkIDNameParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "4b0e887f124fabc58e363fe02ac6582b", "score": "0.61874366", "text": "func (o *GetIPPoolsCredentialIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "1897f27c598008bdcd5c9cf5dfc9981d", "score": "0.6184216", "text": "func (o *GetV1TaskListsTaskListIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "9cf2ab107d01fdaef84a6b4ef5aefe23", "score": "0.61741376", "text": "func (o *GetTimeTrackingEntityParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "0b3ad0c75d07150e135c366ca26fe9f6", "score": "0.616837", "text": "func GetRequestIDFromContext(c *gin.Context) string {\n\tif v, ok := c.Get(XRequestIDKey); ok {\n\t\tif requestID, ok := v.(string); ok {\n\t\t\treturn requestID\n\t\t}\n\t}\n\n\treturn \"\"\n}", "title": "" }, { "docid": "db9cfc8e00182690e0e5299dc331991c", "score": "0.6164181", "text": "func (o *PutUsedForControllerFlexSubscriptionByIDParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "6abb287b514e78eac80b63d896281b82", "score": "0.61618656", "text": "func (o *ResumeSnapshotPlanParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "10037810ba5fefae4f94399cc8544e63", "score": "0.6156766", "text": "func GetCtxRequestID(c *gin.Context) (uuid.UUID, error) {\n\treturn getCtxID(c, RequestIDKey)\n}", "title": "" }, { "docid": "4d1f2970d892fe56e5ce2955c6ec674d", "score": "0.615506", "text": "func (o *NewIvrRecordingParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "24740405967c5a93dfd3d9ee344a58e8", "score": "0.61550415", "text": "func (o *CreateCertificateReplaceSpacesParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "21c89eb048b72cd9256bfe6bbfe7bca5", "score": "0.61520284", "text": "func (o *EmployeesByIDResetPasswordPutParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "0af0dbe5e7b5a2441e2f8250e6c9d8b1", "score": "0.61496943", "text": "func (o *PostVisitorsVisitorIDActionsActionKeyParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "66cc4b357453c4614286637ac6bc4bbf", "score": "0.61479896", "text": "func (o *GetStacksStackIDIntrusionDetectionReportsParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "f433d3ba3e8889aa3fde002dc94e87cf", "score": "0.6143421", "text": "func (o *WeaviateKeysRenewTokenParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "e59c70caa69fedd1f7d75a9d837d6299", "score": "0.6143202", "text": "func (o *GetBatchJobParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" } ]
103ceadf4422f3a089c903f5255e6e7d
UnmarshalJSON implements the json.Unmarshaller interface for type VolumeGroupProperties.
[ { "docid": "45b69298c04d41c7e6512feefb5efe22", "score": "0.8693563", "text": "func (v *VolumeGroupProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"encryption\":\n\t\t\terr = unpopulate(val, \"Encryption\", &v.Encryption)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"networkAcls\":\n\t\t\terr = unpopulate(val, \"NetworkACLs\", &v.NetworkACLs)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateEndpointConnections\":\n\t\t\terr = unpopulate(val, \"PrivateEndpointConnections\", &v.PrivateEndpointConnections)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"protocolType\":\n\t\t\terr = unpopulate(val, \"ProtocolType\", &v.ProtocolType)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &v.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" } ]
[ { "docid": "5372041f1ad2d36f413aa910379428d0", "score": "0.7961605", "text": "func (v *VolumeGroup) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &v.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &v.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &v.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"systemData\":\n\t\t\terr = unpopulate(val, \"SystemData\", &v.SystemData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &v.Type)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "cc61ef68dd7a8b2bde68b23ff5e561a3", "score": "0.7744811", "text": "func (v *VolumeGroupUpdateProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"encryption\":\n\t\t\terr = unpopulate(val, \"Encryption\", &v.Encryption)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"networkAcls\":\n\t\t\terr = unpopulate(val, \"NetworkACLs\", &v.NetworkACLs)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"protocolType\":\n\t\t\terr = unpopulate(val, \"ProtocolType\", &v.ProtocolType)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6f7cd3d981e86a12b9119356407b5ff6", "score": "0.7551996", "text": "func (v *VolumeGroupUpdate) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &v.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "fdb5c508b5ff15baa3f19d0d8428678c", "score": "0.7259809", "text": "func (v *VolumeProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"creationData\":\n\t\t\terr = unpopulate(val, \"CreationData\", &v.CreationData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sizeGiB\":\n\t\t\terr = unpopulate(val, \"SizeGiB\", &v.SizeGiB)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"storageTarget\":\n\t\t\terr = unpopulate(val, \"StorageTarget\", &v.StorageTarget)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"volumeId\":\n\t\t\terr = unpopulate(val, \"VolumeID\", &v.VolumeID)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6263eb9001b3662c889353cb5d99942d", "score": "0.7004013", "text": "func (v *VolumeUpdateProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"sizeGiB\":\n\t\t\terr = unpopulate(val, \"SizeGiB\", &v.SizeGiB)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "03a8e1f913371ae35e8bc2e4ce2c732b", "score": "0.6860393", "text": "func (v *VolumeGroupList) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &v.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &v.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "ac2ca880c7ee63ec762c1dce5fa6e780", "score": "0.6717019", "text": "func (s *ServerGroupProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"administratorLogin\":\n\t\t\terr = unpopulate(val, \"AdministratorLogin\", &s.AdministratorLogin)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"administratorLoginPassword\":\n\t\t\terr = unpopulate(val, \"AdministratorLoginPassword\", &s.AdministratorLoginPassword)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"availabilityZone\":\n\t\t\terr = unpopulate(val, \"AvailabilityZone\", &s.AvailabilityZone)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"backupRetentionDays\":\n\t\t\terr = unpopulate(val, \"BackupRetentionDays\", &s.BackupRetentionDays)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"citusVersion\":\n\t\t\terr = unpopulate(val, \"CitusVersion\", &s.CitusVersion)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"createMode\":\n\t\t\terr = unpopulate(val, \"CreateMode\", &s.CreateMode)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"delegatedSubnetArguments\":\n\t\t\terr = unpopulate(val, \"DelegatedSubnetArguments\", &s.DelegatedSubnetArguments)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"earliestRestoreTime\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"EarliestRestoreTime\", &s.EarliestRestoreTime)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"enableMx\":\n\t\t\terr = unpopulate(val, \"EnableMx\", &s.EnableMx)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"enableShardsOnCoordinator\":\n\t\t\terr = unpopulate(val, \"EnableShardsOnCoordinator\", &s.EnableShardsOnCoordinator)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"enableZfs\":\n\t\t\terr = unpopulate(val, \"EnableZfs\", &s.EnableZfs)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"maintenanceWindow\":\n\t\t\terr = unpopulate(val, \"MaintenanceWindow\", &s.MaintenanceWindow)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"pointInTimeUTC\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"PointInTimeUTC\", &s.PointInTimeUTC)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"postgresqlVersion\":\n\t\t\terr = unpopulate(val, \"PostgresqlVersion\", &s.PostgresqlVersion)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateDnsZoneArguments\":\n\t\t\terr = unpopulate(val, \"PrivateDNSZoneArguments\", &s.PrivateDNSZoneArguments)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"readReplicas\":\n\t\t\terr = unpopulate(val, \"ReadReplicas\", &s.ReadReplicas)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"resourceProviderType\":\n\t\t\terr = unpopulate(val, \"ResourceProviderType\", &s.ResourceProviderType)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"serverRoleGroups\":\n\t\t\terr = unpopulate(val, \"ServerRoleGroups\", &s.ServerRoleGroups)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sourceLocation\":\n\t\t\terr = unpopulate(val, \"SourceLocation\", &s.SourceLocation)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sourceResourceGroupName\":\n\t\t\terr = unpopulate(val, \"SourceResourceGroupName\", &s.SourceResourceGroupName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sourceServerGroup\":\n\t\t\terr = unpopulate(val, \"SourceServerGroup\", &s.SourceServerGroup)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sourceServerGroupName\":\n\t\t\terr = unpopulate(val, \"SourceServerGroupName\", &s.SourceServerGroupName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sourceSubscriptionId\":\n\t\t\terr = unpopulate(val, \"SourceSubscriptionID\", &s.SourceSubscriptionID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"standbyAvailabilityZone\":\n\t\t\terr = unpopulate(val, \"StandbyAvailabilityZone\", &s.StandbyAvailabilityZone)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"state\":\n\t\t\terr = unpopulate(val, \"State\", &s.State)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "0e397068f3358d64954c8af92b321027", "score": "0.6660944", "text": "func (p *Properties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"availabilityZones\":\n\t\t\terr = unpopulate(val, \"AvailabilityZones\", &p.AvailabilityZones)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"baseSizeTiB\":\n\t\t\terr = unpopulate(val, \"BaseSizeTiB\", &p.BaseSizeTiB)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"extendedCapacitySizeTiB\":\n\t\t\terr = unpopulate(val, \"ExtendedCapacitySizeTiB\", &p.ExtendedCapacitySizeTiB)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateEndpointConnections\":\n\t\t\terr = unpopulate(val, \"PrivateEndpointConnections\", &p.PrivateEndpointConnections)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &p.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sku\":\n\t\t\terr = unpopulate(val, \"SKU\", &p.SKU)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"totalIops\":\n\t\t\terr = unpopulate(val, \"TotalIops\", &p.TotalIops)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"totalMBps\":\n\t\t\terr = unpopulate(val, \"TotalMBps\", &p.TotalMBps)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"totalSizeTiB\":\n\t\t\terr = unpopulate(val, \"TotalSizeTiB\", &p.TotalSizeTiB)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"totalVolumeSizeGiB\":\n\t\t\terr = unpopulate(val, \"TotalVolumeSizeGiB\", &p.TotalVolumeSizeGiB)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"volumeGroupCount\":\n\t\t\terr = unpopulate(val, \"VolumeGroupCount\", &p.VolumeGroupCount)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a1a0dea404d236c254a205b25eac56d7", "score": "0.6615057", "text": "func (s *SmartGroupProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"alertSeverities\":\n\t\t\terr = unpopulate(val, \"AlertSeverities\", &s.AlertSeverities)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"alertStates\":\n\t\t\terr = unpopulate(val, \"AlertStates\", &s.AlertStates)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"alertsCount\":\n\t\t\terr = unpopulate(val, \"AlertsCount\", &s.AlertsCount)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"lastModifiedDateTime\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"LastModifiedDateTime\", &s.LastModifiedDateTime)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"lastModifiedUserName\":\n\t\t\terr = unpopulate(val, \"LastModifiedUserName\", &s.LastModifiedUserName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"monitorConditions\":\n\t\t\terr = unpopulate(val, \"MonitorConditions\", &s.MonitorConditions)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"monitorServices\":\n\t\t\terr = unpopulate(val, \"MonitorServices\", &s.MonitorServices)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &s.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"resourceGroups\":\n\t\t\terr = unpopulate(val, \"ResourceGroups\", &s.ResourceGroups)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"resourceTypes\":\n\t\t\terr = unpopulate(val, \"ResourceTypes\", &s.ResourceTypes)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"resources\":\n\t\t\terr = unpopulate(val, \"Resources\", &s.Resources)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"severity\":\n\t\t\terr = unpopulate(val, \"Severity\", &s.Severity)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"smartGroupState\":\n\t\t\terr = unpopulate(val, \"SmartGroupState\", &s.SmartGroupState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"startDateTime\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"StartDateTime\", &s.StartDateTime)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "52933c0fcd03203b2399b9db0ffcb276", "score": "0.6543401", "text": "func (v *VolumeUpdate) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &v.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8ddc4e3e35374228f0686fb01187eb96", "score": "0.65321636", "text": "func (v VolumeGroupProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"encryption\", v.Encryption)\n\tpopulate(objectMap, \"networkAcls\", v.NetworkACLs)\n\tpopulate(objectMap, \"privateEndpointConnections\", v.PrivateEndpointConnections)\n\tpopulate(objectMap, \"protocolType\", v.ProtocolType)\n\tpopulate(objectMap, \"provisioningState\", v.ProvisioningState)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "488156ef93777efffeaf24515d985ce6", "score": "0.64301634", "text": "func (v *Volume) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &v.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &v.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &v.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"systemData\":\n\t\t\terr = unpopulate(val, \"SystemData\", &v.SystemData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &v.Type)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "639d78de2ab49493a7eee8e99af1ec1f", "score": "0.6342277", "text": "func (s *ServerGroupConfigurationProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"allowedValues\":\n\t\t\terr = unpopulate(val, \"AllowedValues\", &s.AllowedValues)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"dataType\":\n\t\t\terr = unpopulate(val, \"DataType\", &s.DataType)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"description\":\n\t\t\terr = unpopulate(val, \"Description\", &s.Description)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"serverRoleGroupConfigurations\":\n\t\t\terr = unpopulate(val, \"ServerRoleGroupConfigurations\", &s.ServerRoleGroupConfigurations)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3caa5fcceb94f037a29d0bc05d38e1da", "score": "0.6332304", "text": "func (f *FleetMemberUpdateProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", f, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"group\":\n\t\t\terr = unpopulate(val, \"Group\", &f.Group)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", f, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "22fb2e01652299900e0600e6dbcb5859", "score": "0.6293958", "text": "func (cg *ClientGroupProperties) UnmarshalJSON(body []byte) error {\n\tvar m map[string]*json.RawMessage\n\terr := json.Unmarshal(body, &m)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor k, v := range m {\n\t\tswitch k {\n\t\tcase \"clientsOf\":\n\t\t\tif v != nil {\n\t\t\t\tclientsOf, err := unmarshalBasicResourceReference(*v)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcg.ClientsOf = clientsOf\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "68013989c24b02741c953c243499a994", "score": "0.62443495", "text": "func (v *VirtualNetworkLinkPatchProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"metadata\":\n\t\t\terr = unpopulate(val, \"Metadata\", &v.Metadata)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "0b2a849a093334ae31597cf384264ef7", "score": "0.62001604", "text": "func (s *ServerGroupPropertiesForUpdate) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"administratorLoginPassword\":\n\t\t\terr = unpopulate(val, \"AdministratorLoginPassword\", &s.AdministratorLoginPassword)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"availabilityZone\":\n\t\t\terr = unpopulate(val, \"AvailabilityZone\", &s.AvailabilityZone)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"backupRetentionDays\":\n\t\t\terr = unpopulate(val, \"BackupRetentionDays\", &s.BackupRetentionDays)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"citusVersion\":\n\t\t\terr = unpopulate(val, \"CitusVersion\", &s.CitusVersion)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"enableShardsOnCoordinator\":\n\t\t\terr = unpopulate(val, \"EnableShardsOnCoordinator\", &s.EnableShardsOnCoordinator)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"maintenanceWindow\":\n\t\t\terr = unpopulate(val, \"MaintenanceWindow\", &s.MaintenanceWindow)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"postgresqlVersion\":\n\t\t\terr = unpopulate(val, \"PostgresqlVersion\", &s.PostgresqlVersion)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"serverRoleGroups\":\n\t\t\terr = unpopulate(val, \"ServerRoleGroups\", &s.ServerRoleGroups)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"standbyAvailabilityZone\":\n\t\t\terr = unpopulate(val, \"StandbyAvailabilityZone\", &s.StandbyAvailabilityZone)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a689db748ae4e272a7a437be2958a380", "score": "0.61711764", "text": "func (s *ServerGroupServerProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"administratorLogin\":\n\t\t\terr = unpopulate(val, \"AdministratorLogin\", &s.AdministratorLogin)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"availabilityZone\":\n\t\t\terr = unpopulate(val, \"AvailabilityZone\", &s.AvailabilityZone)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"citusVersion\":\n\t\t\terr = unpopulate(val, \"CitusVersion\", &s.CitusVersion)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"enableHa\":\n\t\t\terr = unpopulate(val, \"EnableHa\", &s.EnableHa)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"enablePublicIp\":\n\t\t\terr = unpopulate(val, \"EnablePublicIP\", &s.EnablePublicIP)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"fullyQualifiedDomainName\":\n\t\t\terr = unpopulate(val, \"FullyQualifiedDomainName\", &s.FullyQualifiedDomainName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"haState\":\n\t\t\terr = unpopulate(val, \"HaState\", &s.HaState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"postgresqlVersion\":\n\t\t\terr = unpopulate(val, \"PostgresqlVersion\", &s.PostgresqlVersion)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"role\":\n\t\t\terr = unpopulate(val, \"Role\", &s.Role)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"serverEdition\":\n\t\t\terr = unpopulate(val, \"ServerEdition\", &s.ServerEdition)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"standbyAvailabilityZone\":\n\t\t\terr = unpopulate(val, \"StandbyAvailabilityZone\", &s.StandbyAvailabilityZone)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"state\":\n\t\t\terr = unpopulate(val, \"State\", &s.State)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"storageQuotaInMb\":\n\t\t\terr = unpopulate(val, \"StorageQuotaInMb\", &s.StorageQuotaInMb)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"vCores\":\n\t\t\terr = unpopulate(val, \"VCores\", &s.VCores)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d4e9fca1d32a259d9e180ff750aef036", "score": "0.6168287", "text": "func (v *ReleaseObjectGroupParams) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjsonC5a4559bDecodeGithubComChromedpCdprotoRuntime12(&r, v)\n\treturn r.Error()\n}", "title": "" }, { "docid": "4f1d276f93f91bfc80324943336c0b86", "score": "0.6158289", "text": "func (p *PrivateLinkResourceProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"groupId\":\n\t\t\terr = unpopulate(val, \"GroupID\", &p.GroupID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"requiredMembers\":\n\t\t\terr = unpopulate(val, \"RequiredMembers\", &p.RequiredMembers)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"requiredZoneNames\":\n\t\t\terr = unpopulate(val, \"RequiredZoneNames\", &p.RequiredZoneNames)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4f1d276f93f91bfc80324943336c0b86", "score": "0.6158289", "text": "func (p *PrivateLinkResourceProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"groupId\":\n\t\t\terr = unpopulate(val, \"GroupID\", &p.GroupID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"requiredMembers\":\n\t\t\terr = unpopulate(val, \"RequiredMembers\", &p.RequiredMembers)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"requiredZoneNames\":\n\t\t\terr = unpopulate(val, \"RequiredZoneNames\", &p.RequiredZoneNames)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4f1d276f93f91bfc80324943336c0b86", "score": "0.6158289", "text": "func (p *PrivateLinkResourceProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"groupId\":\n\t\t\terr = unpopulate(val, \"GroupID\", &p.GroupID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"requiredMembers\":\n\t\t\terr = unpopulate(val, \"RequiredMembers\", &p.RequiredMembers)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"requiredZoneNames\":\n\t\t\terr = unpopulate(val, \"RequiredZoneNames\", &p.RequiredZoneNames)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "459a6eeeee5a8a9fca04f364b39fa9e5", "score": "0.6157313", "text": "func (mg *MachineGroup) UnmarshalJSON(body []byte) error {\n\tvar m map[string]*json.RawMessage\n\terr := json.Unmarshal(body, &m)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor k, v := range m {\n\t\tswitch k {\n\t\tcase \"properties\":\n\t\t\tif v != nil {\n\t\t\t\tvar machineGroupProperties MachineGroupProperties\n\t\t\t\terr = json.Unmarshal(*v, &machineGroupProperties)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tmg.MachineGroupProperties = &machineGroupProperties\n\t\t\t}\n\t\tcase \"etag\":\n\t\t\tif v != nil {\n\t\t\t\tvar etag string\n\t\t\t\terr = json.Unmarshal(*v, &etag)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tmg.Etag = &etag\n\t\t\t}\n\t\tcase \"kind\":\n\t\t\tif v != nil {\n\t\t\t\tvar kind KindBasicCoreResource\n\t\t\t\terr = json.Unmarshal(*v, &kind)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tmg.Kind = kind\n\t\t\t}\n\t\tcase \"id\":\n\t\t\tif v != nil {\n\t\t\t\tvar ID string\n\t\t\t\terr = json.Unmarshal(*v, &ID)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tmg.ID = &ID\n\t\t\t}\n\t\tcase \"type\":\n\t\t\tif v != nil {\n\t\t\t\tvar typeVar string\n\t\t\t\terr = json.Unmarshal(*v, &typeVar)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tmg.Type = &typeVar\n\t\t\t}\n\t\tcase \"name\":\n\t\t\tif v != nil {\n\t\t\t\tvar name string\n\t\t\t\terr = json.Unmarshal(*v, &name)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tmg.Name = &name\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "e9441ef948c3b6f1ee92ae9d07169f60", "score": "0.61308146", "text": "func (u *UpdateGroup) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &u.Name)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "bf232fa74c66b7dc19e62dd5fe091d91", "score": "0.60629874", "text": "func (p *PrivateCloudProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"availabilityZoneId\":\n\t\t\terr = unpopulate(val, \"AvailabilityZoneID\", &p.AvailabilityZoneID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"availabilityZoneName\":\n\t\t\terr = unpopulate(val, \"AvailabilityZoneName\", &p.AvailabilityZoneName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"clustersNumber\":\n\t\t\terr = unpopulate(val, \"ClustersNumber\", &p.ClustersNumber)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"createdBy\":\n\t\t\terr = unpopulate(val, \"CreatedBy\", &p.CreatedBy)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"createdOn\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"CreatedOn\", &p.CreatedOn)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"dnsServers\":\n\t\t\terr = unpopulate(val, \"DNSServers\", &p.DNSServers)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"expires\":\n\t\t\terr = unpopulate(val, \"Expires\", &p.Expires)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"nsxType\":\n\t\t\terr = unpopulate(val, \"NsxType\", &p.NsxType)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"placementGroupId\":\n\t\t\terr = unpopulate(val, \"PlacementGroupID\", &p.PlacementGroupID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"placementGroupName\":\n\t\t\terr = unpopulate(val, \"PlacementGroupName\", &p.PlacementGroupName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateCloudId\":\n\t\t\terr = unpopulate(val, \"PrivateCloudID\", &p.PrivateCloudID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"resourcePools\":\n\t\t\terr = unpopulate(val, \"ResourcePools\", &p.ResourcePools)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"state\":\n\t\t\terr = unpopulate(val, \"State\", &p.State)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"totalCpuCores\":\n\t\t\terr = unpopulate(val, \"TotalCPUCores\", &p.TotalCPUCores)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"totalNodes\":\n\t\t\terr = unpopulate(val, \"TotalNodes\", &p.TotalNodes)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"totalRam\":\n\t\t\terr = unpopulate(val, \"TotalRAM\", &p.TotalRAM)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"totalStorage\":\n\t\t\terr = unpopulate(val, \"TotalStorage\", &p.TotalStorage)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &p.Type)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"vSphereVersion\":\n\t\t\terr = unpopulate(val, \"VSphereVersion\", &p.VSphereVersion)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"vcenterFqdn\":\n\t\t\terr = unpopulate(val, \"VcenterFqdn\", &p.VcenterFqdn)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"vcenterRefid\":\n\t\t\terr = unpopulate(val, \"VcenterRefid\", &p.VcenterRefid)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"virtualMachineTemplates\":\n\t\t\terr = unpopulate(val, \"VirtualMachineTemplates\", &p.VirtualMachineTemplates)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"virtualNetworks\":\n\t\t\terr = unpopulate(val, \"VirtualNetworks\", &p.VirtualNetworks)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"vrOpsEnabled\":\n\t\t\terr = unpopulate(val, \"VrOpsEnabled\", &p.VrOpsEnabled)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "119b4c9b210cd6593ce46e82d7e981e8", "score": "0.6037709", "text": "func (k *KeyVaultProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", k, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"identity\":\n\t\t\terr = unpopulate(val, \"Identity\", &k.Identity)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"keyIdentifier\":\n\t\t\terr = unpopulate(val, \"KeyIdentifier\", &k.KeyIdentifier)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"keyRotationEnabled\":\n\t\t\terr = unpopulate(val, \"KeyRotationEnabled\", &k.KeyRotationEnabled)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"lastKeyRotationTimestamp\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"LastKeyRotationTimestamp\", &k.LastKeyRotationTimestamp)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"versionedKeyIdentifier\":\n\t\t\terr = unpopulate(val, \"VersionedKeyIdentifier\", &k.VersionedKeyIdentifier)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", k, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d6dda07e6524ef2f931789476d2ddb80", "score": "0.6036207", "text": "func (p *PrivateEndpointConnectionProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"groupIds\":\n\t\t\terr = unpopulate(val, \"GroupIDs\", &p.GroupIDs)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateEndpoint\":\n\t\t\terr = unpopulate(val, \"PrivateEndpoint\", &p.PrivateEndpoint)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateLinkServiceConnectionState\":\n\t\t\terr = unpopulate(val, \"PrivateLinkServiceConnectionState\", &p.PrivateLinkServiceConnectionState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &p.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d6dda07e6524ef2f931789476d2ddb80", "score": "0.6036207", "text": "func (p *PrivateEndpointConnectionProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"groupIds\":\n\t\t\terr = unpopulate(val, \"GroupIDs\", &p.GroupIDs)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateEndpoint\":\n\t\t\terr = unpopulate(val, \"PrivateEndpoint\", &p.PrivateEndpoint)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateLinkServiceConnectionState\":\n\t\t\terr = unpopulate(val, \"PrivateLinkServiceConnectionState\", &p.PrivateLinkServiceConnectionState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &p.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "107d8ab3da63daa009db7e76671825fd", "score": "0.60343397", "text": "func (p *PrivateEndpointProperty) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &p.ID)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d9f3eddc52cc140ebb39f8e21f217234", "score": "0.6024543", "text": "func (t *TagSettingsProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", t, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"filterOperator\":\n\t\t\terr = unpopulate(val, \"FilterOperator\", &t.FilterOperator)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"tags\":\n\t\t\terr = unpopulate(val, \"Tags\", &t.Tags)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", t, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a3ec561318a2b7ee5756d53a397e589d", "score": "0.6022195", "text": "func (u *UpdateProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"baseSizeTiB\":\n\t\t\terr = unpopulate(val, \"BaseSizeTiB\", &u.BaseSizeTiB)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"extendedCapacitySizeTiB\":\n\t\t\terr = unpopulate(val, \"ExtendedCapacitySizeTiB\", &u.ExtendedCapacitySizeTiB)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d7e6b610a2fbd16ec7b0e92245ac016a", "score": "0.601754", "text": "func (cg *ClientGroup) UnmarshalJSON(body []byte) error {\n\tvar m map[string]*json.RawMessage\n\terr := json.Unmarshal(body, &m)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor k, v := range m {\n\t\tswitch k {\n\t\tcase \"properties\":\n\t\t\tif v != nil {\n\t\t\t\tvar clientGroupProperties ClientGroupProperties\n\t\t\t\terr = json.Unmarshal(*v, &clientGroupProperties)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcg.ClientGroupProperties = &clientGroupProperties\n\t\t\t}\n\t\tcase \"etag\":\n\t\t\tif v != nil {\n\t\t\t\tvar etag string\n\t\t\t\terr = json.Unmarshal(*v, &etag)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcg.Etag = &etag\n\t\t\t}\n\t\tcase \"kind\":\n\t\t\tif v != nil {\n\t\t\t\tvar kind KindBasicCoreResource\n\t\t\t\terr = json.Unmarshal(*v, &kind)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcg.Kind = kind\n\t\t\t}\n\t\tcase \"id\":\n\t\t\tif v != nil {\n\t\t\t\tvar ID string\n\t\t\t\terr = json.Unmarshal(*v, &ID)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcg.ID = &ID\n\t\t\t}\n\t\tcase \"type\":\n\t\t\tif v != nil {\n\t\t\t\tvar typeVar string\n\t\t\t\terr = json.Unmarshal(*v, &typeVar)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcg.Type = &typeVar\n\t\t\t}\n\t\tcase \"name\":\n\t\t\tif v != nil {\n\t\t\t\tvar name string\n\t\t\t\terr = json.Unmarshal(*v, &name)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcg.Name = &name\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "714bc37cc4adbdef8c1c5251867796dd", "score": "0.6006581", "text": "func (v VolumeGroupUpdateProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"encryption\", v.Encryption)\n\tpopulate(objectMap, \"networkAcls\", v.NetworkACLs)\n\tpopulate(objectMap, \"protocolType\", v.ProtocolType)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "6c0c41892704f26d369867e3a49d1269", "score": "0.5984348", "text": "func (v *VirtualNetworkLinkProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"metadata\":\n\t\t\terr = unpopulate(val, \"Metadata\", &v.Metadata)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &v.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"virtualNetwork\":\n\t\t\terr = unpopulate(val, \"VirtualNetwork\", &v.VirtualNetwork)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "cb10e2567f06ce28be8cbb2378a776d3", "score": "0.5983273", "text": "func (m *Group) UnmarshalJSON(b []byte) error {\n\treturn GroupJSONUnmarshaler.Unmarshal(bytes.NewReader(b), m)\n}", "title": "" }, { "docid": "076da1e46b8298736cd2b67a324a90ea", "score": "0.59457934", "text": "func (m *StoragePureProtectionGroup) UnmarshalJSON(raw []byte) error {\n\t// AO0\n\tvar aO0 StorageProtectionGroup\n\tif err := swag.ReadJSON(raw, &aO0); err != nil {\n\t\treturn err\n\t}\n\tm.StorageProtectionGroup = aO0\n\n\t// AO1\n\tvar dataAO1 struct {\n\t\tHostGroups []*StoragePureHostRef `json:\"HostGroups\"`\n\n\t\tHosts []*StoragePureHostRef `json:\"Hosts\"`\n\n\t\tRegisteredDevice *AssetDeviceRegistrationRef `json:\"RegisteredDevice,omitempty\"`\n\n\t\tSize int64 `json:\"Size,omitempty\"`\n\n\t\tSource string `json:\"Source,omitempty\"`\n\n\t\tTargets []string `json:\"Targets\"`\n\n\t\tVolumes []*StoragePureVolumeRef `json:\"Volumes\"`\n\t}\n\tif err := swag.ReadJSON(raw, &dataAO1); err != nil {\n\t\treturn err\n\t}\n\n\tm.HostGroups = dataAO1.HostGroups\n\n\tm.Hosts = dataAO1.Hosts\n\n\tm.RegisteredDevice = dataAO1.RegisteredDevice\n\n\tm.Size = dataAO1.Size\n\n\tm.Source = dataAO1.Source\n\n\tm.Targets = dataAO1.Targets\n\n\tm.Volumes = dataAO1.Volumes\n\n\treturn nil\n}", "title": "" }, { "docid": "ac94db4b20f42835357dbdf698708e39", "score": "0.5924361", "text": "func (p *Properties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"dnsResolverState\":\n\t\t\terr = unpopulate(val, \"DNSResolverState\", &p.DNSResolverState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &p.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"resourceGuid\":\n\t\t\terr = unpopulate(val, \"ResourceGUID\", &p.ResourceGUID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"virtualNetwork\":\n\t\t\terr = unpopulate(val, \"VirtualNetwork\", &p.VirtualNetwork)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5a5a9a9227399aef0a894711e11bde14", "score": "0.5906274", "text": "func (s *ServerGroupConfiguration) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &s.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &s.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &s.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"systemData\":\n\t\t\terr = unpopulate(val, \"SystemData\", &s.SystemData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &s.Type)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "aefb9b2c682efed0aa06b9eacf96496d", "score": "0.5900116", "text": "func (s *ServerGroupForUpdate) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"location\":\n\t\t\terr = unpopulate(val, \"Location\", &s.Location)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &s.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"tags\":\n\t\t\terr = unpopulate(val, \"Tags\", &s.Tags)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8727beaa6a0b398ca8aa6f4ad2e7c2db", "score": "0.5896666", "text": "func (s *ServerGroup) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &s.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"location\":\n\t\t\terr = unpopulate(val, \"Location\", &s.Location)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &s.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &s.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"systemData\":\n\t\t\terr = unpopulate(val, \"SystemData\", &s.SystemData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"tags\":\n\t\t\terr = unpopulate(val, \"Tags\", &s.Tags)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &s.Type)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2f6c95322707230059cc234e0ded53d3", "score": "0.5883077", "text": "func (a *AntiAffinityGroup_Tags) UnmarshalJSON(b []byte) error {\n\tobject := make(map[string]json.RawMessage)\n\terr := json.Unmarshal(b, &object)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(object) != 0 {\n\t\ta.AdditionalProperties = make(map[string]string)\n\t\tfor fieldName, fieldBuf := range object {\n\t\t\tvar fieldVal string\n\t\t\terr := json.Unmarshal(fieldBuf, &fieldVal)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrap(err, fmt.Sprintf(\"error unmarshaling field %s\", fieldName))\n\t\t\t}\n\t\t\ta.AdditionalProperties[fieldName] = fieldVal\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "995b92826ab8b07258ce43a43b7f6c7b", "score": "0.5869016", "text": "func (r *Volume) UnmarshalJSON(b []byte) error {\n\ttype tmp Volume\n\tvar s struct {\n\t\ttmp\n\t\tCreatedAt gophercloud.JSONRFC3339MilliNoZ `json:\"created_at\"`\n\t\tUpdatedAt gophercloud.JSONRFC3339MilliNoZ `json:\"updated_at\"`\n\t}\n\terr := json.Unmarshal(b, &s)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*r = Volume(s.tmp)\n\n\tr.CreatedAt = time.Time(s.CreatedAt)\n\tr.UpdatedAt = time.Time(s.UpdatedAt)\n\n\treturn err\n}", "title": "" }, { "docid": "9f0d003a0e64b4611332352300dd7293", "score": "0.58655477", "text": "func (r *RegistryProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"adminUserEnabled\":\n\t\t\terr = unpopulate(val, \"AdminUserEnabled\", &r.AdminUserEnabled)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"anonymousPullEnabled\":\n\t\t\terr = unpopulate(val, \"AnonymousPullEnabled\", &r.AnonymousPullEnabled)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"creationDate\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"CreationDate\", &r.CreationDate)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"dataEndpointEnabled\":\n\t\t\terr = unpopulate(val, \"DataEndpointEnabled\", &r.DataEndpointEnabled)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"dataEndpointHostNames\":\n\t\t\terr = unpopulate(val, \"DataEndpointHostNames\", &r.DataEndpointHostNames)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"encryption\":\n\t\t\terr = unpopulate(val, \"Encryption\", &r.Encryption)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"loginServer\":\n\t\t\terr = unpopulate(val, \"LoginServer\", &r.LoginServer)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"networkRuleBypassOptions\":\n\t\t\terr = unpopulate(val, \"NetworkRuleBypassOptions\", &r.NetworkRuleBypassOptions)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"networkRuleSet\":\n\t\t\terr = unpopulate(val, \"NetworkRuleSet\", &r.NetworkRuleSet)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"policies\":\n\t\t\terr = unpopulate(val, \"Policies\", &r.Policies)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateEndpointConnections\":\n\t\t\terr = unpopulate(val, \"PrivateEndpointConnections\", &r.PrivateEndpointConnections)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &r.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"publicNetworkAccess\":\n\t\t\terr = unpopulate(val, \"PublicNetworkAccess\", &r.PublicNetworkAccess)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"status\":\n\t\t\terr = unpopulate(val, \"Status\", &r.Status)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"zoneRedundancy\":\n\t\t\terr = unpopulate(val, \"ZoneRedundancy\", &r.ZoneRedundancy)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c68f76ff1570dcca8d43629ab6870a41", "score": "0.5819926", "text": "func (obj *ProductVariantImport) UnmarshalJSON(data []byte) error {\n\ttype Alias ProductVariantImport\n\tif err := json.Unmarshal(data, (*Alias)(obj)); err != nil {\n\t\treturn err\n\t}\n\tfor i := range obj.Attributes {\n\t\tvar err error\n\t\tobj.Attributes[i], err = mapDiscriminatorAttribute(obj.Attributes[i])\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "3a81c25671c8568ddcc1af0028a4a278", "score": "0.5813013", "text": "func (d *DimensionProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"displayName\":\n\t\t\terr = unpopulate(val, \"DisplayName\", &d.DisplayName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &d.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"toBeExportedForCustomer\":\n\t\t\terr = unpopulate(val, \"ToBeExportedForCustomer\", &d.ToBeExportedForCustomer)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "71322d0b6d29e6821807a73525f4c2a0", "score": "0.5811974", "text": "func (s *SecretProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"contentType\":\n\t\t\terr = unpopulate(val, \"ContentType\", &s.ContentType)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "7851ca504e8c1242b252b21557b496e8", "score": "0.58105505", "text": "func (o *MonitorOptionsAggregation) UnmarshalJSON(bytes []byte) (err error) {\n\traw := map[string]interface{}{}\n\tall := struct {\n\t\tGroupBy *string `json:\"group_by,omitempty\"`\n\t\tMetric *string `json:\"metric,omitempty\"`\n\t\tType *string `json:\"type,omitempty\"`\n\t}{}\n\terr = json.Unmarshal(bytes, &all)\n\tif err != nil {\n\t\terr = json.Unmarshal(bytes, &raw)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\to.UnparsedObject = raw\n\t\treturn nil\n\t}\n\to.GroupBy = all.GroupBy\n\to.Metric = all.Metric\n\to.Type = all.Type\n\treturn nil\n}", "title": "" }, { "docid": "362043a5e7177ad0a5bf05c7c5f5c9dd", "score": "0.57859725", "text": "func (p *Properties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"TableName\":\n\t\t\terr = unpopulate(val, \"TableName\", &p.TableName)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "1395abbd295c0ecc15980e3184cf42b8", "score": "0.57675827", "text": "func (f *FleetMemberProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", f, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"clusterResourceId\":\n\t\t\terr = unpopulate(val, \"ClusterResourceID\", &f.ClusterResourceID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"group\":\n\t\t\terr = unpopulate(val, \"Group\", &f.Group)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &f.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", f, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e2c9e0ffa86846d0cc58f6e7513eaf1d", "score": "0.5761517", "text": "func (s *SubscriptionProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"locationPlacementId\":\n\t\t\terr = unpopulate(val, \"LocationPlacementID\", &s.LocationPlacementID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"quotaId\":\n\t\t\terr = unpopulate(val, \"QuotaID\", &s.QuotaID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"registeredFeatures\":\n\t\t\terr = unpopulate(val, \"RegisteredFeatures\", &s.RegisteredFeatures)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"serializedDetails\":\n\t\t\terr = unpopulate(val, \"SerializedDetails\", &s.SerializedDetails)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"tenantId\":\n\t\t\terr = unpopulate(val, \"TenantID\", &s.TenantID)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8eb3bfa53ea75e08a3b61c2d05c79ed6", "score": "0.5754916", "text": "func (p *PublicCertificateProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"blob\":\n\t\t\terr = runtime.DecodeByteArray(string(val), &p.Blob, runtime.Base64StdFormat)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"publicCertificateLocation\":\n\t\t\terr = unpopulate(val, \"PublicCertificateLocation\", &p.PublicCertificateLocation)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"thumbprint\":\n\t\t\terr = unpopulate(val, \"Thumbprint\", &p.Thumbprint)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2b23a348f0de6a9d5cdc42e88ef65256", "score": "0.5742587", "text": "func (d *DeviceGroupModel) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &d.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &d.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &d.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"systemData\":\n\t\t\terr = unpopulate(val, \"SystemData\", &d.SystemData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &d.Type)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3205c8e08a9a519d702b7e8721e0b850", "score": "0.57387", "text": "func (e *EventGroupingSettings) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", e, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"aggregationKind\":\n\t\t\terr = unpopulate(val, \"AggregationKind\", &e.AggregationKind)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", e, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "9d04be99567b021ce2a5eb4af718baca", "score": "0.5730414", "text": "func (p *PrivateEndpointConnectionSimpleProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"groupIds\":\n\t\t\terr = unpopulate(val, \"GroupIDs\", &p.GroupIDs)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateEndpoint\":\n\t\t\terr = unpopulate(val, \"PrivateEndpoint\", &p.PrivateEndpoint)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateLinkServiceConnectionState\":\n\t\t\terr = unpopulate(val, \"PrivateLinkServiceConnectionState\", &p.PrivateLinkServiceConnectionState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "20c81ee4a8f67eb09209ecd6cef667b5", "score": "0.57295305", "text": "func (u *UserProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"encryptedPassword\":\n\t\t\terr = unpopulate(val, \"EncryptedPassword\", &u.EncryptedPassword)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"shareAccessRights\":\n\t\t\terr = unpopulate(val, \"ShareAccessRights\", &u.ShareAccessRights)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"userType\":\n\t\t\terr = unpopulate(val, \"UserType\", &u.UserType)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "fcf42e8b192fe072a3024d053aab59ef", "score": "0.5729371", "text": "func (v *VirtualNetworkLinkPatch) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &v.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3c8dc668c0f3627a7e027b84f70a4926", "score": "0.57083684", "text": "func (v *VolumeList) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &v.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &v.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f88bfd63c5084d0cec1cf4e9e361d4bd", "score": "0.5707513", "text": "func (s *ServerGroupPropertiesPrivateDNSZoneArguments) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"privateDnsZoneArmResourceId\":\n\t\t\terr = unpopulate(val, \"PrivateDNSZoneArmResourceID\", &s.PrivateDNSZoneArmResourceID)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "b0216196bc389438147494f0954ca052", "score": "0.57068646", "text": "func (cgm *ClientGroupMember) UnmarshalJSON(body []byte) error {\n\tvar m map[string]*json.RawMessage\n\terr := json.Unmarshal(body, &m)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor k, v := range m {\n\t\tswitch k {\n\t\tcase \"properties\":\n\t\t\tif v != nil {\n\t\t\t\tvar clientGroupMemberProperties ClientGroupMemberProperties\n\t\t\t\terr = json.Unmarshal(*v, &clientGroupMemberProperties)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcgm.ClientGroupMemberProperties = &clientGroupMemberProperties\n\t\t\t}\n\t\tcase \"id\":\n\t\t\tif v != nil {\n\t\t\t\tvar ID string\n\t\t\t\terr = json.Unmarshal(*v, &ID)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcgm.ID = &ID\n\t\t\t}\n\t\tcase \"type\":\n\t\t\tif v != nil {\n\t\t\t\tvar typeVar string\n\t\t\t\terr = json.Unmarshal(*v, &typeVar)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcgm.Type = &typeVar\n\t\t\t}\n\t\tcase \"name\":\n\t\t\tif v != nil {\n\t\t\t\tvar name string\n\t\t\t\terr = json.Unmarshal(*v, &name)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tcgm.Name = &name\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "37c19363d5c8885eac09b57102afbd6d", "score": "0.5693891", "text": "func (p *PrivateEndpointConnectionProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"privateEndpoint\":\n\t\t\terr = unpopulate(val, \"PrivateEndpoint\", &p.PrivateEndpoint)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateLinkServiceConnectionState\":\n\t\t\terr = unpopulate(val, \"PrivateLinkServiceConnectionState\", &p.PrivateLinkServiceConnectionState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &p.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6b54d759cbab06687b23da71eb4462ee", "score": "0.5688679", "text": "func (s *SnapshotProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"complianceResults\":\n\t\t\terr = unpopulate(val, \"ComplianceResults\", &s.ComplianceResults)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"createdAt\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"CreatedAt\", &s.CreatedAt)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &s.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &s.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"reportProperties\":\n\t\t\terr = unpopulate(val, \"ReportProperties\", &s.ReportProperties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"reportSystemData\":\n\t\t\terr = unpopulate(val, \"ReportSystemData\", &s.ReportSystemData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"snapshotName\":\n\t\t\terr = unpopulate(val, \"SnapshotName\", &s.SnapshotName)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8853ee679343f34c96a09a1b6d20ce95", "score": "0.56851655", "text": "func (u *UpdateGroupStatus) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"members\":\n\t\t\terr = unpopulate(val, \"Members\", &u.Members)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &u.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"status\":\n\t\t\terr = unpopulate(val, \"Status\", &u.Status)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "17b12505c916ade087d0c911c0e5afa9", "score": "0.56677294", "text": "func (p *ProviderProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"aad\":\n\t\t\terr = unpopulate(val, \"AAD\", &p.AAD)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"company\":\n\t\t\terr = unpopulate(val, \"Company\", &p.Company)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"defaultEndpoint\":\n\t\t\terr = unpopulate(val, \"DefaultEndpoint\", &p.DefaultEndpoint)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"description\":\n\t\t\terr = unpopulate(val, \"Description\", &p.Description)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"managedApplication\":\n\t\t\terr = unpopulate(val, \"ManagedApplication\", &p.ManagedApplication)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"pricingDimensions\":\n\t\t\terr = unpopulate(val, \"PricingDimensions\", &p.PricingDimensions)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"providerType\":\n\t\t\terr = unpopulate(val, \"ProviderType\", &p.ProviderType)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"quotaDimensions\":\n\t\t\terr = unpopulate(val, \"QuotaDimensions\", &p.QuotaDimensions)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"skus\":\n\t\t\terr = unpopulate(val, \"SKUs\", &p.SKUs)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"targets\":\n\t\t\terr = unpopulate(val, \"Targets\", &p.Targets)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d33566d3fed7e74def2d462826d75c3f", "score": "0.56544036", "text": "func (a *AvailableBalanceProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", a, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"amount\":\n\t\t\terr = unpopulate(val, \"Amount\", &a.Amount)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", a, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3d6d63bcc77a39e8747b82fc0cd1cfc8", "score": "0.56511545", "text": "func (c *ContainerProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"containerStatus\":\n\t\t\terr = unpopulate(val, \"ContainerStatus\", &c.ContainerStatus)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"createdDateTime\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"CreatedDateTime\", &c.CreatedDateTime)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"dataFormat\":\n\t\t\terr = unpopulate(val, \"DataFormat\", &c.DataFormat)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"refreshDetails\":\n\t\t\terr = unpopulate(val, \"RefreshDetails\", &c.RefreshDetails)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c9ed07cc865131dce57d76bece984dfb", "score": "0.56475496", "text": "func (v *InterestGroupDetails) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjsonC5a4559bDecodeGithubComChromedpCdprotoStorage26(&r, v)\n\treturn r.Error()\n}", "title": "" }, { "docid": "f5f63028c7a7c2e18f35a0f936bbe6fe", "score": "0.56401163", "text": "func (d *DevicePropertiesPatch) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"edgeProfile\":\n\t\t\terr = unpopulate(val, \"EdgeProfile\", &d.EdgeProfile)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4f6bbcc868524741a6d8a867e2e1dc8a", "score": "0.56299716", "text": "func (v VolumeGroup) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"id\", v.ID)\n\tpopulate(objectMap, \"name\", v.Name)\n\tpopulate(objectMap, \"properties\", v.Properties)\n\tpopulate(objectMap, \"systemData\", v.SystemData)\n\tpopulate(objectMap, \"type\", v.Type)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "b4d36275253436ac7a90baf703224eeb", "score": "0.5629954", "text": "func (u *UpdateProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"resourceId\":\n\t\t\terr = unpopulate(val, \"ResourceID\", &u.ResourceID)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d38b24e0ec6b25f2c306a61684fd8005", "score": "0.56061894", "text": "func (b *BackupItemProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", b, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"BackupID\", &b.BackupID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"blobName\":\n\t\t\terr = unpopulate(val, \"BlobName\", &b.BlobName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"correlationId\":\n\t\t\terr = unpopulate(val, \"CorrelationID\", &b.CorrelationID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"created\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"Created\", &b.Created)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"databases\":\n\t\t\terr = unpopulate(val, \"Databases\", &b.Databases)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"finishedTimeStamp\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"FinishedTimeStamp\", &b.FinishedTimeStamp)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"lastRestoreTimeStamp\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"LastRestoreTimeStamp\", &b.LastRestoreTimeStamp)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"log\":\n\t\t\terr = unpopulate(val, \"Log\", &b.Log)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &b.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"scheduled\":\n\t\t\terr = unpopulate(val, \"Scheduled\", &b.Scheduled)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sizeInBytes\":\n\t\t\terr = unpopulate(val, \"SizeInBytes\", &b.SizeInBytes)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"status\":\n\t\t\terr = unpopulate(val, \"Status\", &b.Status)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"storageAccountUrl\":\n\t\t\terr = unpopulate(val, \"StorageAccountURL\", &b.StorageAccountURL)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"websiteSizeInBytes\":\n\t\t\terr = unpopulate(val, \"WebsiteSizeInBytes\", &b.WebsiteSizeInBytes)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", b, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "83c4a8852a5e0b3c19226ce3c3b16f7f", "score": "0.56034875", "text": "func (s *SecurityGroupEntityProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"additionalData\":\n\t\t\terr = unpopulate(val, \"AdditionalData\", &s.AdditionalData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"distinguishedName\":\n\t\t\terr = unpopulate(val, \"DistinguishedName\", &s.DistinguishedName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"friendlyName\":\n\t\t\terr = unpopulate(val, \"FriendlyName\", &s.FriendlyName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"objectGuid\":\n\t\t\terr = unpopulate(val, \"ObjectGUID\", &s.ObjectGUID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"sid\":\n\t\t\terr = unpopulate(val, \"Sid\", &s.Sid)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e34760434315836dd4052988302e8f84", "score": "0.56001914", "text": "func (u *PublishedGroupLayers) UnmarshalJSON(data []byte) error {\n\tvar raw interface{}\n\terr := json.Unmarshal(data, &raw)\n\tif err != nil {\n\t\treturn err\n\t}\n\tswitch raw := raw.(type) {\n\tcase map[string]interface{}:\n\t\tvar layers PublishedGroupLayers\n\t\t*u = append(layers, &GroupPublishableItem{Name: raw[\"name\"].(string), Href: raw[\"href\"].(string), Type: raw[\"@type\"].(string)})\n\tcase []interface{}:\n\t\tvar publishedGroupLayers []*GroupPublishableItem\n\t\tjson.Unmarshal(data, &publishedGroupLayers)\n\t\t*u = publishedGroupLayers\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6684fefcbbd675723874b3a45b57a264", "score": "0.5598118", "text": "func (r *RegistryKeyEntityProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"additionalData\":\n\t\t\terr = unpopulate(val, \"AdditionalData\", &r.AdditionalData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"friendlyName\":\n\t\t\terr = unpopulate(val, \"FriendlyName\", &r.FriendlyName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"hive\":\n\t\t\terr = unpopulate(val, \"Hive\", &r.Hive)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"key\":\n\t\t\terr = unpopulate(val, \"Key\", &r.Key)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "468cf705475866cfcdd251b7b37fa505", "score": "0.5593004", "text": "func (p *ProductProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"autoRenew\":\n\t\t\terr = unpopulate(val, \"AutoRenew\", &p.AutoRenew)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"availabilityId\":\n\t\t\terr = unpopulate(val, \"AvailabilityID\", &p.AvailabilityID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"billingFrequency\":\n\t\t\terr = unpopulate(val, \"BillingFrequency\", &p.BillingFrequency)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"billingProfileDisplayName\":\n\t\t\terr = unpopulate(val, \"BillingProfileDisplayName\", &p.BillingProfileDisplayName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"billingProfileId\":\n\t\t\terr = unpopulate(val, \"BillingProfileID\", &p.BillingProfileID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"customerDisplayName\":\n\t\t\terr = unpopulate(val, \"CustomerDisplayName\", &p.CustomerDisplayName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"customerId\":\n\t\t\terr = unpopulate(val, \"CustomerID\", &p.CustomerID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"displayName\":\n\t\t\terr = unpopulate(val, \"DisplayName\", &p.DisplayName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"endDate\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"EndDate\", &p.EndDate)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"invoiceSectionDisplayName\":\n\t\t\terr = unpopulate(val, \"InvoiceSectionDisplayName\", &p.InvoiceSectionDisplayName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"invoiceSectionId\":\n\t\t\terr = unpopulate(val, \"InvoiceSectionID\", &p.InvoiceSectionID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"lastCharge\":\n\t\t\terr = unpopulate(val, \"LastCharge\", &p.LastCharge)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"lastChargeDate\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"LastChargeDate\", &p.LastChargeDate)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"productType\":\n\t\t\terr = unpopulate(val, \"ProductType\", &p.ProductType)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"productTypeId\":\n\t\t\terr = unpopulate(val, \"ProductTypeID\", &p.ProductTypeID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"purchaseDate\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"PurchaseDate\", &p.PurchaseDate)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"quantity\":\n\t\t\terr = unpopulate(val, \"Quantity\", &p.Quantity)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"reseller\":\n\t\t\terr = unpopulate(val, \"Reseller\", &p.Reseller)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"skuDescription\":\n\t\t\terr = unpopulate(val, \"SKUDescription\", &p.SKUDescription)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"skuId\":\n\t\t\terr = unpopulate(val, \"SKUID\", &p.SKUID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"status\":\n\t\t\terr = unpopulate(val, \"Status\", &p.Status)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"tenantId\":\n\t\t\terr = unpopulate(val, \"TenantID\", &p.TenantID)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c6ab34d7983eb16e9ead05d77f6bacf0", "score": "0.5591023", "text": "func (r *AWSEC2VolumeAttachment) UnmarshalJSON(b []byte) error {\n\ttype Properties AWSEC2VolumeAttachment\n\tres := &struct {\n\t\tType string\n\t\tProperties *Properties\n\t}{}\n\tif err := json.Unmarshal(b, &res); err != nil {\n\t\tfmt.Printf(\"ERROR: %s\\n\", err)\n\t\treturn err\n\t}\n\n\t// If the resource has no Properties set, it could be nil\n\tif res.Properties != nil {\n\t\t*r = AWSEC2VolumeAttachment(*res.Properties)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "213933443137fb4147b50d3ecf976fab", "score": "0.5584494", "text": "func (x *JSONProperties) UnmarshalJSON(b []byte) (xerr error) {\n\tdefer fail.OnPanic(&xerr)()\n\n\t// Decode JSON data\n\tunjsoned := map[string]string{}\n\terr := FromJSON(b, &unjsoned)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Now do the real work\n\tfor key, value := range unjsoned {\n\t\tzeroValue := PropertyTypeRegistry.ZeroValue(x.module, key)\n\t\terr := FromJSON([]byte(value), zeroValue)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\titem := &jsonProperty{\n\t\t\tData: zeroValue,\n\t\t\tmodule: x.module,\n\t\t\tkey: key,\n\t\t}\n\t\tx.Properties[key] = item\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e54bf23b4b84de008bcf2df4275bf235", "score": "0.55835164", "text": "func (w *WorkspaceProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", w, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &w.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", w, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "cd522372d9eb08179da5b684a2eb8bf6", "score": "0.55788827", "text": "func (eag EndpointAddrGroup) UnmarshalJSON(b []byte) error {\n\tvar raw interface{}\n\tif err := json.Unmarshal(b, &raw); err != nil {\n\t\treturn err\n\t}\n\treturn eag.decode(raw)\n}", "title": "" }, { "docid": "837cf19549db8037e488e42362e27b1e", "score": "0.5578586", "text": "func (a *SshKey_Tags) UnmarshalJSON(b []byte) error {\n\tobject := make(map[string]json.RawMessage)\n\terr := json.Unmarshal(b, &object)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(object) != 0 {\n\t\ta.AdditionalProperties = make(map[string]string)\n\t\tfor fieldName, fieldBuf := range object {\n\t\t\tvar fieldVal string\n\t\t\terr := json.Unmarshal(fieldBuf, &fieldVal)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrap(err, fmt.Sprintf(\"error unmarshaling field %s\", fieldName))\n\t\t\t}\n\t\t\ta.AdditionalProperties[fieldName] = fieldVal\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "05bc212d9262e3bf0ef3cfc13c882b8b", "score": "0.55721724", "text": "func (p *ProactiveLogCollectionSettingsProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"userConsent\":\n\t\t\terr = unpopulate(val, \"UserConsent\", &p.UserConsent)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "b4f26010d5e30a4d24b5bbdc27e61c05", "score": "0.5560083", "text": "func (g *GroupingConfiguration) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", g, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"enabled\":\n\t\t\terr = unpopulate(val, \"Enabled\", &g.Enabled)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"groupByAlertDetails\":\n\t\t\terr = unpopulate(val, \"GroupByAlertDetails\", &g.GroupByAlertDetails)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"groupByCustomDetails\":\n\t\t\terr = unpopulate(val, \"GroupByCustomDetails\", &g.GroupByCustomDetails)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"groupByEntities\":\n\t\t\terr = unpopulate(val, \"GroupByEntities\", &g.GroupByEntities)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"lookbackDuration\":\n\t\t\terr = unpopulate(val, \"LookbackDuration\", &g.LookbackDuration)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"matchingMethod\":\n\t\t\terr = unpopulate(val, \"MatchingMethod\", &g.MatchingMethod)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"reopenClosedIncident\":\n\t\t\terr = unpopulate(val, \"ReopenClosedIncident\", &g.ReopenClosedIncident)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", g, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "918d96c3719e6c76d3b3b842aabb5432", "score": "0.55548847", "text": "func (a *Snapshot_Tags) UnmarshalJSON(b []byte) error {\n\tobject := make(map[string]json.RawMessage)\n\terr := json.Unmarshal(b, &object)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(object) != 0 {\n\t\ta.AdditionalProperties = make(map[string]string)\n\t\tfor fieldName, fieldBuf := range object {\n\t\t\tvar fieldVal string\n\t\t\terr := json.Unmarshal(fieldBuf, &fieldVal)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrap(err, fmt.Sprintf(\"error unmarshaling field %s\", fieldName))\n\t\t\t}\n\t\t\ta.AdditionalProperties[fieldName] = fieldVal\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "b94beecf378da131edf6e97614b62b5d", "score": "0.55507416", "text": "func (a *SecurityGroup_Tags) UnmarshalJSON(b []byte) error {\n\tobject := make(map[string]json.RawMessage)\n\terr := json.Unmarshal(b, &object)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(object) != 0 {\n\t\ta.AdditionalProperties = make(map[string]string)\n\t\tfor fieldName, fieldBuf := range object {\n\t\t\tvar fieldVal string\n\t\t\terr := json.Unmarshal(fieldBuf, &fieldVal)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrap(err, fmt.Sprintf(\"error unmarshaling field %s\", fieldName))\n\t\t\t}\n\t\t\ta.AdditionalProperties[fieldName] = fieldVal\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "b077a40afd13b2ff35dcf33f2851d32f", "score": "0.5545547", "text": "func (k *KikChannel) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn err\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, &k.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif err := k.Channel.unmarshalInternal(rawMsg); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "030e89d9b9637e569aaed8cfc3ed9917", "score": "0.55416363", "text": "func (v *VolumeSnapshotData) UnmarshalJSON(data []byte) error {\n\ttmp := VolumeSnapshotDataCopy{}\n\terr := json.Unmarshal(data, &tmp)\n\tif err != nil {\n\t\treturn err\n\t}\n\ttmp2 := VolumeSnapshotData(tmp)\n\t*v = tmp2\n\treturn nil\n}", "title": "" }, { "docid": "6f7f1bf6245cdbe78f45ceb32daaf34a", "score": "0.5537748", "text": "func (r *ReservationSKUProperty) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &r.Name)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "aa669099c21d0e5c0888690fcfb776d0", "score": "0.55322915", "text": "func (r *RoleProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"password\":\n\t\t\terr = unpopulate(val, \"Password\", &r.Password)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6dfa69fbd98a395139418490c12949c8", "score": "0.5528342", "text": "func (v *VirtualNetworkLinkSubResourceProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"virtualNetworkLink\":\n\t\t\terr = unpopulate(val, \"VirtualNetworkLink\", &v.VirtualNetworkLink)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "835e5a5057b6c47fb7c2fae9890a0db6", "score": "0.55281866", "text": "func (d *DefenderSettingsPropertiesMdeIntegration) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"status\":\n\t\t\terr = unpopulate(val, \"Status\", &d.Status)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a2aa2f971a94ab31f352cbfc52d57f22", "score": "0.5523015", "text": "func (v VolumeGroupUpdate) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"properties\", v.Properties)\n\treturn json.Marshal(objectMap)\n}", "title": "" }, { "docid": "5fd81cb5438e87cdd8f07f5799fd37ff", "score": "0.5522848", "text": "func (p *PostgresInstanceProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"admin\":\n\t\t\terr = unpopulate(val, \"Admin\", &p.Admin)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"basicLoginInformation\":\n\t\t\terr = unpopulate(val, \"BasicLoginInformation\", &p.BasicLoginInformation)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"dataControllerId\":\n\t\t\terr = unpopulate(val, \"DataControllerID\", &p.DataControllerID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"k8sRaw\":\n\t\t\terr = unpopulate(val, \"K8SRaw\", &p.K8SRaw)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"lastUploadedDate\":\n\t\t\terr = unpopulateTimeRFC3339(val, \"LastUploadedDate\", &p.LastUploadedDate)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &p.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6c0cfb842683369d1f68ae78909c61a4", "score": "0.55162984", "text": "func (c *CertificateProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"certificateVirtualPath\":\n\t\t\terr = unpopulate(val, \"CertificateVirtualPath\", &c.CertificateVirtualPath)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"keyVaultSecretId\":\n\t\t\terr = unpopulate(val, \"KeyVaultSecretID\", &c.KeyVaultSecretID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"keyVirtualPath\":\n\t\t\terr = unpopulate(val, \"KeyVirtualPath\", &c.KeyVirtualPath)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &c.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4212c32a8b28188ed8ee4a0c0f68fef9", "score": "0.55134594", "text": "func (v *VolumeStats) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjson6601e8cdDecodeK8sIoKubernetesPkgKubeletApisStatsV1alpha1(&r, v)\n\treturn r.Error()\n}", "title": "" }, { "docid": "b0f423a1af82632fa4d2808d71d56fc1", "score": "0.5513226", "text": "func (p *Properties) UnmarshalJSON(raw []byte) error {\n\tif p == nil {\n\t\treturn fmt.Errorf(\"unmarshal event.Properties to nil\")\n\t}\n\n\tdata := map[string]json.RawMessage{}\n\tif err := json.Unmarshal(raw, &data); err != nil {\n\t\treturn err\n\t}\n\n\tunmarshal := func(key string, target interface{}) error {\n\t\tif v, ok := data[key]; ok {\n\t\t\tif err := json.Unmarshal(v, target); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tdelete(data, key)\n\t\t}\n\n\t\treturn nil\n\t}\n\n\tif err := unmarshal(\"$insert_id\", &p.InsertID); err != nil {\n\t\treturn err\n\t}\n\n\tif err := unmarshal(\"distinct_id\", &p.DistinctID); err != nil {\n\t\treturn err\n\t}\n\n\tif err := unmarshal(\"ip\", &p.IP); err != nil {\n\t\treturn err\n\t}\n\n\tvar unix int64\n\tif err := unmarshal(\"time\", &unix); err != nil {\n\t\treturn err\n\t}\n\n\tif unix != 0 {\n\t\tp.Time = time.Unix(unix, 0).UTC()\n\t}\n\n\tif err := unmarshal(\"token\", &p.Token); err != nil {\n\t\treturn err\n\t}\n\n\tif len(data) > 0 {\n\t\tif p.CustomProperties == nil {\n\t\t\tp.CustomProperties = CustomProperties{}\n\t\t}\n\n\t\tfor k, v := range data {\n\t\t\tvar c interface{}\n\t\t\tif err := json.Unmarshal(v, &c); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tp.CustomProperties[k] = c\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "eaa45defeecdf04b1e041a7f2f95b82d", "score": "0.55108017", "text": "func (d *DicomServiceProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"authenticationConfiguration\":\n\t\t\terr = unpopulate(val, \"AuthenticationConfiguration\", &d.AuthenticationConfiguration)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"privateEndpointConnections\":\n\t\t\terr = unpopulate(val, \"PrivateEndpointConnections\", &d.PrivateEndpointConnections)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"provisioningState\":\n\t\t\terr = unpopulate(val, \"ProvisioningState\", &d.ProvisioningState)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"publicNetworkAccess\":\n\t\t\terr = unpopulate(val, \"PublicNetworkAccess\", &d.PublicNetworkAccess)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"serviceUrl\":\n\t\t\terr = unpopulate(val, \"ServiceURL\", &d.ServiceURL)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c54628e78e92a7d3028b9788c4eb35e6", "score": "0.5509519", "text": "func (redundantGroup *RedundantGroup) UnmarshalJSON(b []byte) error {\n\ttype temp RedundantGroup\n\ttype groupReference struct {\n\t\tRedundancyGroup common.Links\n\t\tRedundancyGroupCount int `json:\"RedundancyGroup@odata.count\"`\n\t}\n\n\tvar t struct {\n\t\ttemp\n\t\tGroup groupReference\n\t}\n\n\tif err := json.Unmarshal(b, &t); err != nil {\n\t\treturn err\n\t}\n\n\t*redundantGroup = RedundantGroup(t.temp)\n\tredundantGroup.redundancyGroup = t.Group.RedundancyGroup.ToStrings()\n\tredundantGroup.RedundancyGroupCount = t.Group.RedundancyGroupCount\n\n\treturn nil\n}", "title": "" }, { "docid": "fede7e985d972d16327149addf65cf7f", "score": "0.5506246", "text": "func (v *VolumeSnapshot) UnmarshalJSON(data []byte) error {\n\ttmp := VolumeSnapshotCopy{}\n\terr := json.Unmarshal(data, &tmp)\n\tif err != nil {\n\t\treturn err\n\t}\n\ttmp2 := VolumeSnapshot(tmp)\n\t*v = tmp2\n\treturn nil\n}", "title": "" }, { "docid": "6f5fc126bfbb4825f3b52310180f3ba1", "score": "0.55056316", "text": "func (s *StorageAccountProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"blobEndpoint\":\n\t\t\terr = unpopulate(val, \"BlobEndpoint\", &s.BlobEndpoint)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"containerCount\":\n\t\t\terr = unpopulate(val, \"ContainerCount\", &s.ContainerCount)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"dataPolicy\":\n\t\t\terr = unpopulate(val, \"DataPolicy\", &s.DataPolicy)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"description\":\n\t\t\terr = unpopulate(val, \"Description\", &s.Description)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"storageAccountCredentialId\":\n\t\t\terr = unpopulate(val, \"StorageAccountCredentialID\", &s.StorageAccountCredentialID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"storageAccountStatus\":\n\t\t\terr = unpopulate(val, \"StorageAccountStatus\", &s.StorageAccountStatus)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d0185b5dbfa11e4d0f6b108b27a7b428", "score": "0.5504181", "text": "func (v *GetPropertiesParams) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjsonC5a4559bDecodeGithubComChromedpCdprotoRuntime23(&r, v)\n\treturn r.Error()\n}", "title": "" }, { "docid": "3671257747eea54d067f3dccf3c48bd4", "score": "0.54999906", "text": "func (p *PeriodProperties) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"billingPeriodEndDate\":\n\t\t\terr = unpopulateDateType(val, \"BillingPeriodEndDate\", &p.BillingPeriodEndDate)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"billingPeriodStartDate\":\n\t\t\terr = unpopulateDateType(val, \"BillingPeriodStartDate\", &p.BillingPeriodStartDate)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"invoiceIds\":\n\t\t\terr = unpopulate(val, \"InvoiceIDs\", &p.InvoiceIDs)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" } ]
b8816bc2cc4c12ad2ea2b330d5023a53
NewClient creates a new Client.
[ { "docid": "9a52bc6a2727de2660337fbf244362ec", "score": "0.0", "text": "func NewClient(hc *http.Client) *Client {\n\tbaseURL, _ := url.Parse(defaultBaseURL)\n\n\tif hc == nil {\n\t\thc = &http.Client{Timeout: 5 * time.Second}\n\t}\n\n\treturn &Client{baseURL: baseURL, httpClient: hc}\n}", "title": "" } ]
[ { "docid": "21911d3a0d52a41c9c37d1b39184e210", "score": "0.7689636", "text": "func NewClient() Client {\n\treturn Client{New()}\n}", "title": "" }, { "docid": "20f7ae91e1113531ddcd87ef40645132", "score": "0.76310474", "text": "func NewClient(ctx context.Context, args []string) (ClientInterface, error) {\n\treturn newClientStruct(ctx, args)\n}", "title": "" }, { "docid": "ac6ccd3979bdade6f02e3e7f0687237d", "score": "0.76013047", "text": "func NewClient() *Client {\n d := new(Client)\n return d\n}", "title": "" }, { "docid": "81d5f82bf08338a4c7a38e552936dae6", "score": "0.751989", "text": "func newClient() (*client, error) {\n\treturn nil, errUnimplemented\n}", "title": "" }, { "docid": "2c572deec730908a6950f19c8647e49f", "score": "0.74611545", "text": "func New(client client.Client) *Client {\n\treturn &Client{client}\n}", "title": "" }, { "docid": "5321afbeb8ac6655d76be53a427b7a0f", "score": "0.7411922", "text": "func New() (*Client, error) {\n\t// TODO: код писать здесь\n}", "title": "" }, { "docid": "4a2c735e81b42d32d3329e0dd8123405", "score": "0.7397527", "text": "func (p Pipeline) NewClient(in Client) (out Client, err error) {\n\treq := p.newResquest(API_NEWCLIENT, &out, &in)\n\t_, err = p.do(req, errorHandler(map[int]string{\n\t\t400: fmt.Sprintf(\"Client with id %v may already exist\", in.Id),\n\t}))\n\treturn\n}", "title": "" }, { "docid": "7ea8c0707f0dc1c73bcb1cc14dcfc1ff", "score": "0.7389645", "text": "func NewClient(db database.Database) (*Client, error) {\n\treturn &Client{\n\t\tdb: db,\n\t}, nil\n}", "title": "" }, { "docid": "f5f903db1edbaf4fadaa2660fc730ea9", "score": "0.7386158", "text": "func newClient() (client.Client, error) {\n\tcfg, err := config.GetConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tc, err := client.New(cfg, client.Options{})\n\treturn c, err\n}", "title": "" }, { "docid": "c6dcd6963af585e11c513f1e82cabf93", "score": "0.735622", "text": "func NewClient() (Client, error) {\n\tclient := Client{}\n\treturn client, nil\n}", "title": "" }, { "docid": "d7e8d0cae1b753f8b45b4292f3f00f7f", "score": "0.7350887", "text": "func NewClient() (*Client, error) {\n\treturn newClient(false)\n}", "title": "" }, { "docid": "da0a1bfb30f7ba342e7dfb44ecefe546", "score": "0.7335848", "text": "func NewClient(ctx context.Context) (*Client, error) {\n\tssn, err := GetSession(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to get session\")\n\t}\n\n\tclient := &Client{\n\t\tssn: ssn,\n\t}\n\treturn client, nil\n}", "title": "" }, { "docid": "9762b7108fc480209162e1edbde3dbcd", "score": "0.7334502", "text": "func NewClient(config *config.Config) *Client {\n\treturn &Client{config: config}\n}", "title": "" }, { "docid": "de2170fa37877e8ad0fda37a659337b8", "score": "0.7288532", "text": "func NewClient(new, add, enqueue, status, next, complete goa.Endpoint) *Client {\n\treturn &Client{\n\t\tNewEndpoint: new,\n\t\tAddEndpoint: add,\n\t\tEnqueueEndpoint: enqueue,\n\t\tStatusEndpoint: status,\n\t\tNextEndpoint: next,\n\t\tCompleteEndpoint: complete,\n\t}\n}", "title": "" }, { "docid": "30047a1a94adf785b133d9e3f2629113", "score": "0.72848016", "text": "func New() (*Client, error) {\n\tc, err := newClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Client{\n\t\tc: c,\n\t}, nil\n}", "title": "" }, { "docid": "50dfd0e966683e4ffdd61a36a56e808b", "score": "0.72812074", "text": "func NewClient(cfg ClientConfig) (Client, error) {\n\tvar c Client\n\tif err := c.init(cfg); err != nil {\n\t\treturn c, errors.Wrap(err, \"client new instance error\")\n\t}\n\treturn c, nil\n}", "title": "" }, { "docid": "aacb3f915df44ae81ba973b5ce4bd61c", "score": "0.72748786", "text": "func NewClient(cli *client.Client) *Client {\n\treturn &Client{Client: cli}\n}", "title": "" }, { "docid": "993aba55d47b4b3b04ed5679a710a070", "score": "0.72634107", "text": "func NewClient(options ...Option) Client {\n\treturn &client{\n\t\tparent: nil,\n\t\toptions: options,\n\t}\n}", "title": "" }, { "docid": "b6c89af4bd1814baa41ec902917702cc", "score": "0.7261855", "text": "func NewClient() (*Client, error) {\n\treturn NewRemoteClient(\"\")\n}", "title": "" }, { "docid": "b6c89af4bd1814baa41ec902917702cc", "score": "0.7261855", "text": "func NewClient() (*Client, error) {\n\treturn NewRemoteClient(\"\")\n}", "title": "" }, { "docid": "9b6206f6e165b5b7ada0bc0e59185e9c", "score": "0.7261715", "text": "func NewClient() *Client {\n\treturn &Client{Client: onet.NewClient(Name)}\n}", "title": "" }, { "docid": "662b88305f06bb834ee5db1a461ef1b0", "score": "0.7261193", "text": "func NewClient(key string, name string) Client {\n\treturn Client{serviceKey: key, clientName: name}\n}", "title": "" }, { "docid": "ff9a7aa0054868d6c89d9d818d683df3", "score": "0.7249452", "text": "func ExampleNewClient() {\n\t// Create the new client first (we'll create one that can send\n\t// 20 span packets in parallel):\n\tcl, err := trace.NewClient(trace.DefaultVeneurAddress, trace.Capacity(20))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Replace the old client:\n\ttrace.SetDefaultClient(cl)\n\t// Output:\n}", "title": "" }, { "docid": "93943ea922f0cb7f03ccabf5a08d3b19", "score": "0.7237112", "text": "func New() *Client {\n\treturn &Client{}\n}", "title": "" }, { "docid": "93943ea922f0cb7f03ccabf5a08d3b19", "score": "0.7237112", "text": "func New() *Client {\n\treturn &Client{}\n}", "title": "" }, { "docid": "2e6e11b2654dadb7478260753b9ce546", "score": "0.72173136", "text": "func NewClient() *Client {\n\tclient := Client{}\n\treturn &client\n}", "title": "" }, { "docid": "d6b6baf84685d207baca6dfa0b57d9a1", "score": "0.7208196", "text": "func newClient() (*client, error) { return nil, errUnimplemented }", "title": "" }, { "docid": "5b11c42d88b5b4e75e40338de0acc337", "score": "0.7206401", "text": "func NewClient(input *NewClientInput) Client {\n\treturn &client{\n\t\tuserID: input.UserID,\n\t\tcheckAllPosts: input.CheckAllPosts,\n\t\tdryRun: input.DryRun,\n\t\tapi: input.API,\n\t\tstorage: input.Storage,\n\t}\n}", "title": "" }, { "docid": "1027ade1dcb2b77d813d1b6f1f1c63bd", "score": "0.7202575", "text": "func New(clientname string) (*Client, util.Gerror) {\n\tvar found bool\n\tvar err util.Gerror\n\tif config.UsingDB() {\n\t\tvar cerr error\n\t\tfound, cerr = checkForClientSQL(datastore.Dbh, clientname)\n\t\tif cerr != nil {\n\t\t\terr = util.Errorf(cerr.Error())\n\t\t\terr.SetStatus(http.StatusInternalServerError)\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tds := datastore.New()\n\t\t_, found = ds.Get(\"client\", clientname)\n\t}\n\tif found {\n\t\terr = util.Errorf(\"Client already exists\")\n\t\terr.SetStatus(http.StatusConflict)\n\t\treturn nil, err\n\t}\n\tif err := validateClientName(clientname); err != nil {\n\t\treturn nil, err\n\t}\n\tclient := &Client{\n\t\tName: clientname,\n\t\tNodeName: clientname,\n\t\tChefType: \"client\",\n\t\tJSONClass: \"Chef::ApiClient\",\n\t\tValidator: false,\n\t\tOrgname: \"\",\n\t\tpubKey: \"\",\n\t\tAdmin: false,\n\t\tCertificate: \"\",\n\t}\n\treturn client, nil\n}", "title": "" }, { "docid": "8649af4266ca3c3bd37eae9e2c1f5f5f", "score": "0.7194417", "text": "func NewClient(Key string) Client {\n\tc := Client{}\n\tc.Key = Key\n\treturn c\n}", "title": "" }, { "docid": "679852991125944955968d0bc4f78e21", "score": "0.719207", "text": "func NewClient(conf *configure.ClientConfigure) *Client {\n\treturn &Client{\n\t\tconf: conf,\n\t}\n}", "title": "" }, { "docid": "c946e0154266f53441d34d903418e3cf", "score": "0.719003", "text": "func NewClient(project, data, postMessage, updateMessage, deleteMessage goa.Endpoint) *Client {\n\treturn &Client{\n\t\tProjectEndpoint: project,\n\t\tDataEndpoint: data,\n\t\tPostMessageEndpoint: postMessage,\n\t\tUpdateMessageEndpoint: updateMessage,\n\t\tDeleteMessageEndpoint: deleteMessage,\n\t}\n}", "title": "" }, { "docid": "0068547dcd5201c56d45c49cca829d0e", "score": "0.7189603", "text": "func NewClient() *Client {\n\treturn &Client{\n\t\tclient: C.td_json_client_create(),\n\t}\n}", "title": "" }, { "docid": "53fcf8bbd99f90532f9f2bc7050803c1", "score": "0.71794206", "text": "func NewClient(token, domainPrefix, tz string) Client {\n\treturn Client{token, domainPrefix, tz}\n}", "title": "" }, { "docid": "90db0dca3fde81320ee2d00ddf6c8613", "score": "0.71652305", "text": "func NewClient(config *Config) Client {\n\treturn &client{\n\t\tconfig: config,\n\t}\n}", "title": "" }, { "docid": "90db0dca3fde81320ee2d00ddf6c8613", "score": "0.71652305", "text": "func NewClient(config *Config) Client {\n\treturn &client{\n\t\tconfig: config,\n\t}\n}", "title": "" }, { "docid": "27ac0cf52749b12860f223bb497e400e", "score": "0.71589226", "text": "func (f *clientFactory) NewClient() (secrethub.Client, error) {\n\tcredential, err := f.store.Get()\n\tif err != nil {\n\t\treturn nil, errio.Error(err)\n\t}\n\n\treturn secrethub.NewClient(credential, f.NewClientOptions()), nil\n}", "title": "" }, { "docid": "768376237b47b7409a7212cb94e0d937", "score": "0.7157201", "text": "func New() (*Client, error) {\n\tcs, err := newClients()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Client{\n\t\tcs: cs,\n\t}, nil\n}", "title": "" }, { "docid": "d55337b7eb917170ba43f4e186a01dd4", "score": "0.71465284", "text": "func NewClient(config Config) Client {\n\tclient := Client{}\n\tclient.init(config)\n\treturn client\n}", "title": "" }, { "docid": "503a675d1bcade7507f07e38e05fe109", "score": "0.7140603", "text": "func NewClient() *Client {\n\treturn new(Client)\n}", "title": "" }, { "docid": "d87bb9623f5ddb9c4973e609c9376c40", "score": "0.7138032", "text": "func NewClient() *Client {\n\treturn &Client{}\n}", "title": "" }, { "docid": "d87bb9623f5ddb9c4973e609c9376c40", "score": "0.7138032", "text": "func NewClient() *Client {\n\treturn &Client{}\n}", "title": "" }, { "docid": "e937996a562b5e41b81cfb9c4869291f", "score": "0.71304995", "text": "func NewClient(dbc db.Config) Client {\n\treturn Client{dbc: dbc}\n}", "title": "" }, { "docid": "163fa1303aa555707622f19d78fc7408", "score": "0.7129286", "text": "func NewClient(options ...ClientOption) Client {\n\treturn newClient(options...)\n}", "title": "" }, { "docid": "7325c6f105eba338273d79576eaba80a", "score": "0.71255904", "text": "func NewClient(list, show, add, remove, update, rename, publish goa.Endpoint) *Client {\n\treturn &Client{\n\t\tListEndpoint: list,\n\t\tShowEndpoint: show,\n\t\tAddEndpoint: add,\n\t\tRemoveEndpoint: remove,\n\t\tUpdateEndpoint: update,\n\t\tRenameEndpoint: rename,\n\t\tPublishEndpoint: publish,\n\t}\n}", "title": "" }, { "docid": "da741d6a549baf4713a35fa67e86cb69", "score": "0.7111248", "text": "func NewClient(ctx context.CLIContext, key string) Client {\n\treturn &client{ctx: ctx, key: key}\n}", "title": "" }, { "docid": "b1e7a6aefc902904d0634bed48ee6006", "score": "0.7105691", "text": "func NewClient() Client {\n\treturn &client{}\n}", "title": "" }, { "docid": "08028a86d1f1e7dcabbeb10a38884355", "score": "0.7091491", "text": "func StartNewClient() (*Client, error) {\n\treturn StartNewClientWithConfig(NewConfig())\n}", "title": "" }, { "docid": "7983aa12ec4fa1704b733f76ea9f52d1", "score": "0.7089465", "text": "func NewClient(submit, status, hints goa.Endpoint) *Client {\n\treturn &Client{\n\t\tSubmitEndpoint: submit,\n\t\tStatusEndpoint: status,\n\t\tHintsEndpoint: hints,\n\t}\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "af0444d773562e0d3119d44886439013", "score": "0.70862705", "text": "func NewClient(config *Config) (*Client, error) {\n\tclient := new(Client)\n\terr := client.Init(config)\n\treturn client, err\n}", "title": "" }, { "docid": "50f505946af66439a38585c04cdfc22b", "score": "0.70762676", "text": "func NewClient() Client {\n\treturn &client{sC: newBackend()}\n}", "title": "" }, { "docid": "3e830cb3c89e3a159657878e340f9686", "score": "0.7075344", "text": "func NewClient() *Client {\n\tr := &Client{}\n\n\treturn r\n}", "title": "" }, { "docid": "410106c283b4707b7fa7dd1cd3d01527", "score": "0.7072689", "text": "func NewClient(p *properties.Properties, workload ycsb.Workload, db ycsb.DB) *Client {\n\treturn &Client{p: p, workload: workload, db: db}\n}", "title": "" }, { "docid": "d11eaca85168a6cd7cc9da3cf9c7a816", "score": "0.7071641", "text": "func NewClient(ethclient bind.ContractBackend) *Client {\n\treturn &Client{ethclient}\n}", "title": "" }, { "docid": "fb5b3603b689c9940455272e0a49ff68", "score": "0.70716035", "text": "func NewClient(master string, addr string) *Client {\n\tc := &Client{\n\t\tmaster: master,\n\t\treplicaBuffer: newReplicaBuffer(master, gfs.ReplicaBufferTick),\n\t\tidentifier: addr,\n\t}\n\n\tgo func() {\n\t\thttp.HandleFunc(\"/create\", c.CreateHandler)\n\t\thttp.HandleFunc(\"/delete\", c.DeleteHandler)\n\t\thttp.HandleFunc(\"/rename\", c.RenameHandler)\n\t\thttp.HandleFunc(\"/mkdir\", c.MkdirHandler)\n\t\thttp.HandleFunc(\"/read\", c.ReadHandler)\n\t\thttp.HandleFunc(\"/write\", c.WriteHandler)\n\t\thttp.HandleFunc(\"/append\", c.AppendHandler)\n\n\t\terr := http.ListenAndServe(addr, nil)\n\t\tfmt.Println(\"[client]server fail:\", err)\n\t}()\n\n\treturn c\n}", "title": "" }, { "docid": "7f557dfdb8fb6b8b92a371c5d9a3ffa7", "score": "0.70692223", "text": "func NewClient(config bca.Config) Client {\n\treturn Client{\n\t\tCorporateID: config.CorporateID,\n\t\tClient: bca.NewAPI(config),\n\t}\n}", "title": "" }, { "docid": "548174740714c1fb2e6b47cf8f18086d", "score": "0.7060352", "text": "func NewClient(caller Caller, net *chaincfg.Params) *Client {\n\treturn &Client{Caller: caller, net: net}\n}", "title": "" }, { "docid": "c86b8eeee667d5b05248c7b7b784bfbd", "score": "0.7060303", "text": "func NewClient() (Client, error) {\n\tc, err := st.NewDefaultClient()\n\tif err != nil {\n\t\treturn client{}, err\n\t}\n\n\treturn client{client: c}, nil\n}", "title": "" }, { "docid": "440791606b5f382912ebae70e568cb69", "score": "0.7032708", "text": "func New(client *client.Client, properties ClientProperties) *Client {\n\treturn &Client{\n\t\tclient: client,\n\n\t\taccountSid: properties.AccountSid,\n\t}\n}", "title": "" }, { "docid": "bff7d7dc89e2e89e77a1ae0db367c2da", "score": "0.70322186", "text": "func NewClient(exec *executor.Executor) *Client {\n\treturn &Client{exec}\n}", "title": "" }, { "docid": "f51250eff8f692355d9599d81a2933c4", "score": "0.7030663", "text": "func NewClient(ClientKey string) *Client {\n\treturn &Client{ClientKey: ClientKey, URL: URL}\n}", "title": "" }, { "docid": "e80b8b378a4fa0893f238b21b952face", "score": "0.7029607", "text": "func NewClient(add, resta, multiplicacion, division goa.Endpoint) *Client {\n\treturn &Client{\n\t\tAddEndpoint: add,\n\t\tRestaEndpoint: resta,\n\t\tMultiplicacionEndpoint: multiplicacion,\n\t\tDivisionEndpoint: division,\n\t}\n}", "title": "" }, { "docid": "cf990c7a68240467db71619f9e9db658", "score": "0.702673", "text": "func NewClient(tokClient pb.TokenClient) *Client {\n\treturn &Client{authClient: tokClient}\n}", "title": "" }, { "docid": "c9128b45c72e7a95add8a6e6488c2487", "score": "0.7022765", "text": "func NewClient(bc utils.Blockchain) *Client {\n\treturn &Client{\n\t\tbc: bc,\n\t}\n}", "title": "" }, { "docid": "cf7b565d35b16faf401297b19826272a", "score": "0.70128775", "text": "func NewClient(hostname string) *Client {\r\n\treturn &Client{\r\n\t\thostname: hostname,\r\n\t}\r\n}", "title": "" }, { "docid": "2516987c6c3023fde2ce15b0591113c9", "score": "0.70088804", "text": "func NewClient(username, password, clientID, clientSecret string) *Client {\n\treturn &Client{\n\t\tcred: NewCredentials(username, password, clientID, clientSecret),\n\t\trandomArticleURL: randomArticleURLFormat,\n\t}\n}", "title": "" }, { "docid": "2917619d7c42bcce882aece5a119ccfc", "score": "0.7007359", "text": "func NewClient(ownerID string, clientID string, clientSecret string) *Client {\n\treturn &Client{\n\t\tOwnerID: ownerID,\n\t\tClientID: clientID,\n\t\tClientSecret: clientSecret,\n\t}\n}", "title": "" }, { "docid": "b8ff615442b4a4c3381584a1a18b6163", "score": "0.7004972", "text": "func NewClient(serverAddress string) Client {\n\tconn := NewConnection(serverAddress)\n\n\treturn Client{conn}\n}", "title": "" }, { "docid": "e322c8cb16ff297e5da9a8d14c6c8194", "score": "0.70042676", "text": "func NewClient(service, version string) *Client {\n\treturn &Client{\n\t\tServiceContext: ServiceContext{\n\t\t\tService: service,\n\t\t\tVersion: version,\n\t\t},\n\t\tTransport: DefaultTransport,\n\t}\n}", "title": "" }, { "docid": "e31b825059cf3e3ace53fb20e601d19f", "score": "0.699056", "text": "func NewClient(addr string, disableKeepAlive bool, logger log.Logger) (cli *Client, err error) {\n\n\tlogger.Debug(fmt.Sprintf(\"New connect to %s, disableKeepAlive=%t\", addr, disableKeepAlive))\n\n\tcli = &Client{\n\t\treqSent: list.New(),\n\t\taddr: addr,\n\t\tdisableKeepAlive: disableKeepAlive,\n\t\tcounter: 0,\n\t\tlogger: logger}\n\n\terr = cli.connect()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tcli.isRunning = true\n\tgo cli.recvResponseRoutine()\n\n\treturn\n}", "title": "" }, { "docid": "4b03f2c7892b7ee08bca49596331ec95", "score": "0.6986627", "text": "func NewClient(ServerVars *Variables, conn *net.Conn, t time.Time) Client {\n\tvar u string // Saves the username of the client\n\tServerVars.UsernamesMu.Lock()\n\tdefer ServerVars.UsernamesMu.Unlock()\n\n\tfor { // Runs until the client enters a valid username\n\t\t_, err := (*conn).Write([]byte(\"Enter name: \"))\n\t\tCheckError(ServerVars, err)\n\n\t\tbuff := make([]byte, 100)\n\t\tn, err := (*conn).Read(buff)\n\t\tif err != nil {\n\t\t\tServerVars.ErrorLogsChannel <- err // Logging errors\n\t\t\tcontinue\n\t\t}\n\n\t\tu = string(buff[:n])\n\t\tu = strings.Trim(u, \"\\r\\n\")\n\t\t_, exists := ServerVars.Usernames[u] // Checking if the username already exists or not\n\t\tif exists {\n\t\t\t_, err = (*conn).Write([]byte(Colors[\"Red\"](\"Username already exists!!\\n\")))\n\t\t\tCheckError(ServerVars, err)\n\t\t\tcontinue\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tServerVars.ActivityLogsChannel <- fmt.Sprintf(ServerVars.LogActivityFormat, \"New User\", \"\", u, u, t.Format(\"2006-01-02 15:04:05\")) // Logging activity\n\tServerVars.Usernames[u] = conn\n\n\tc := Client{\n\t\tUsername: u,\n\t\tChannels: make([]string, 0),\n\t\tConn: conn,\n\t}\n\n\tConnWrite(ServerVars, &c, \"%s %s\\n\\n\", \"Hey\", Colors[\"Blue\"](c.Username))\n\tc.JoinChannel(ServerVars, \"all\", \"nil\", time.Now()) // Joining the channel ALL\n\n\tServerVars.BlockListMu.Lock()\n\tdefer ServerVars.BlockListMu.Unlock()\n\n\tServerVars.BlockList[c.Username] = make(map[string]bool) // Creating a blocklist for the client\n\n\treturn c\n}", "title": "" }, { "docid": "1a71f74a047e5c46dd8c2a41f2e774de", "score": "0.6984237", "text": "func NewClient(cfg api.Config) (*Client, error) {\n\tif err := cfg.Validate(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Client{}, nil\n}", "title": "" }, { "docid": "b8e3ca5a2330d386c9180f3dc370c851", "score": "0.698244", "text": "func New(base string) *Client {\n\treturn &Client{base}\n}", "title": "" }, { "docid": "bf25bf56619288dfedaab8941c472a9d", "score": "0.69824225", "text": "func NewClient(config *Config) *Client {\n\tif config == nil {\n\t\treturn nil\n\t}\n\tclient := &Client{\n\t\tConfig: config,\n\t\tend: make(chan bool),\n\t}\n\treturn client\n}", "title": "" }, { "docid": "b924c1e5aa6c722a740d1bbdc3e0ea0c", "score": "0.6982289", "text": "func newClient(id int, con net.Conn) *client {\n\treturn &client{\n\t\tID: id,\n\t\tmsg: make(chan []byte),\n\t\texit: make(chan bool),\n\t\tconn: con,\n\t}\n}", "title": "" }, { "docid": "53ab12de821eeb72383101bc7e72beab", "score": "0.69814146", "text": "func NewClient(add, addresume, list goa.Endpoint) *Client {\n\treturn &Client{\n\t\tAddEndpoint: add,\n\t\tAddresumeEndpoint: addresume,\n\t\tListEndpoint: list,\n\t}\n}", "title": "" }, { "docid": "ea5c5ca956fa77deac99fde0f5596ca7", "score": "0.6979237", "text": "func New(conf *config.Config) client.Client {\n\treturn client.Client{\n\t\tConfig: conf,\n\t}\n}", "title": "" }, { "docid": "3eb496d6baf63d404e2f62d504106c7a", "score": "0.6978316", "text": "func newClient(cfg Config, dialer ContextDialer, tlsConfig *tls.Config) *Client {\n\treturn &Client{\n\t\tc: cfg,\n\t\tdialer: dialer,\n\t\ttlsConfig: ConfigureALPN(tlsConfig, cfg.ALPNSNIAuthDialClusterName),\n\t\tclosedFlag: new(int32),\n\t}\n}", "title": "" }, { "docid": "e950c76444c373a3cdf337f40c5d784c", "score": "0.6977986", "text": "func New(addr string) *Client {\n\treturn &Client{addr: addr}\n}", "title": "" }, { "docid": "cd5691baef65a5ef5b627d1dec9aa5ff", "score": "0.69751644", "text": "func NewClient(clientName string) (Client, error) {\n\tif clientName == \"jira\" {\n\t\treturn NewJiraClient(), nil\n\t}\n\n\tif clientName == \"pivotal\" {\n\t\treturn NewPivotalClient(), nil\n\t}\n\n\treturn nil, fmt.Errorf(\"Could not find client: %v\", clientName)\n}", "title": "" }, { "docid": "798023957b86e8b43ec8d5bffc9002a3", "score": "0.69727415", "text": "func NewClient(ip string, psk string) Client {\n\treturn Client{IP: ip, PSK: psk}\n}", "title": "" }, { "docid": "65a0ed81d0d2a61e53aad22e1d6d604a", "score": "0.69712645", "text": "func (client *gocloak) CreateClient(token string, realm string, newClient Client) error {\n\tresp, err := getRequestWithBearerAuth(token).\n\t\tSetBody(newClient).\n\t\tPost(client.basePath + authRealm + realm + \"/clients\")\n\n\terr = checkForError(resp, err)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "8475dddd624788461321e22e7d22a8d9", "score": "0.6965154", "text": "func NewClient(cfg Config) (*Client, error) {\n\tif err := cfg.checkAndSetDefaults(); err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\treturn &Client{\n\t\tConfig: cfg,\n\t\tcloseC: make(chan struct{}),\n\t\tpauseC: make(chan bool),\n\t\tvoterC: make(chan bool),\n\t}, nil\n}", "title": "" }, { "docid": "1b0aec1d44eba09461d92fdbd114b53e", "score": "0.6961604", "text": "func NewClient(token string) *Client {\n\treturn &Client{token: token}\n}", "title": "" }, { "docid": "380f8148bbf2048ad0772e2f25450495", "score": "0.6960541", "text": "func New(conf *Configuration) *Client {\n\treturn &Client{Conf: conf}\n}", "title": "" }, { "docid": "63b2f1bf5aa6a1d4df088254b02d81df", "score": "0.69582444", "text": "func NewClient(delphiClient delphisdk.Client, name string) *Client {\n\tproto.SysmgrRebootReqMountKey(delphiClient, name, delphi.MountMode_ReadWriteMode)\n\treturn &Client{\n\t\tdelphiClient: delphiClient,\n\t\tname: name,\n\t}\n}", "title": "" }, { "docid": "684d5c2dfbdc5a6e92486ff90698bfa5", "score": "0.6949926", "text": "func NewClient(httpClient *http.Client, key string) *Client {\n\treturn &Client{httpClient, key}\n}", "title": "" }, { "docid": "f1254b4329bad21b20b38a05cd4b7216", "score": "0.6947219", "text": "func NewClient(db *sqlx.DB) *Client {\n\treturn &Client{\n\t\tDB: db,\n\t}\n}", "title": "" }, { "docid": "bb5372d102264abde23ccf31c618dfea", "score": "0.6940385", "text": "func NewClient(list, add goa.Endpoint) *Client {\n\treturn &Client{\n\t\tListEndpoint: list,\n\t\tAddEndpoint: add,\n\t}\n}", "title": "" }, { "docid": "4bd09150c008743dfbea869da710a636", "score": "0.6939703", "text": "func NewClient(conf *Config) *Client {\n\tclient := &Client{conf, nil}\n\tclient.Login()\n\treturn client\n}", "title": "" }, { "docid": "5c0258c514d325df8a2b2addde13923d", "score": "0.6928993", "text": "func NewClient() *Client {\n\treturn &Client{\n\t\ttunnelOpen: false,\n\t}\n}", "title": "" } ]
e33bfa8c217ef122cb925f115e0f0bfa
ValueFromNative returns a typeasserted pointer to the Value.
[ { "docid": "710491316a46b0e458ed96c1a35fa7b3", "score": "0.778617", "text": "func ValueFromNative(l unsafe.Pointer) *Value {\n\t//TODO why it does not add finalizer to the value?\n\treturn &Value{(*C.GValue)(l)}\n}", "title": "" } ]
[ { "docid": "4fdffc3f26a098d49c2f3df8df95d668", "score": "0.74173355", "text": "func (v *Value) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(v.native()))\n}", "title": "" }, { "docid": "4813d572f5b320685ddd0b1f097102f1", "score": "0.72816515", "text": "func (gen *Generator) ValueToNative(val string, t types.Type) string {\n\tk := t.Kind()\n\tswitch k {\n\tcase types.BoolKind, types.Float32Kind, types.Float64Kind, types.Int16Kind, types.Int32Kind, types.Int64Kind, types.Int8Kind, types.Uint16Kind, types.Uint32Kind, types.Uint64Kind, types.Uint8Kind:\n\t\tn := kindToString(k)\n\t\treturn fmt.Sprintf(\"%s(%s.(%s%s))\", strings.ToLower(n), val, gen.TypesPackage, n)\n\tcase types.StringKind:\n\t\treturn fmt.Sprintf(\"%s.(%sString).String()\", val, gen.TypesPackage)\n\t}\n\tpanic(\"unreachable\")\n}", "title": "" }, { "docid": "1cdb4c1c7522d0d0a0ff14fa0856cb23", "score": "0.71255773", "text": "func NativeToValue(value interface{}) ref.Val {\n\tswitch value.(type) {\n\tcase ref.Val:\n\t\treturn value.(ref.Val)\n\tcase bool:\n\t\treturn Bool(value.(bool))\n\tcase *bool:\n\t\treturn Bool(*value.(*bool))\n\tcase int:\n\t\treturn Int(value.(int))\n\tcase int32:\n\t\treturn Int(value.(int32))\n\tcase int64:\n\t\treturn Int(value.(int64))\n\tcase *int:\n\t\treturn Int(*value.(*int))\n\tcase *int32:\n\t\treturn Int(*value.(*int32))\n\tcase *int64:\n\t\treturn Int(*value.(*int64))\n\tcase uint:\n\t\treturn Uint(value.(uint))\n\tcase uint32:\n\t\treturn Uint(value.(uint32))\n\tcase uint64:\n\t\treturn Uint(value.(uint64))\n\tcase *uint:\n\t\treturn Uint(*value.(*uint))\n\tcase *uint32:\n\t\treturn Uint(*value.(*uint32))\n\tcase *uint64:\n\t\treturn Uint(*value.(*uint64))\n\tcase float32:\n\t\treturn Double(value.(float32))\n\tcase float64:\n\t\treturn Double(value.(float64))\n\tcase *float32:\n\t\treturn Double(*value.(*float32))\n\tcase *float64:\n\t\treturn Double(*value.(*float64))\n\tcase string:\n\t\treturn String(value.(string))\n\tcase *string:\n\t\treturn String(*value.(*string))\n\tcase []byte:\n\t\treturn Bytes(value.([]byte))\n\tcase []string:\n\t\treturn NewStringList(value.([]string))\n\tcase map[string]string:\n\t\treturn NewStringStringMap(value.(map[string]string))\n\tcase *dpb.Duration:\n\t\treturn Duration{value.(*dpb.Duration)}\n\tcase *structpb.ListValue:\n\t\treturn NewJSONList(value.(*structpb.ListValue))\n\tcase structpb.NullValue:\n\t\treturn NullValue\n\tcase *structpb.Struct:\n\t\treturn NewJSONStruct(value.(*structpb.Struct))\n\tcase *structpb.Value:\n\t\tv := value.(*structpb.Value)\n\t\tswitch v.Kind.(type) {\n\t\tcase *structpb.Value_BoolValue:\n\t\t\treturn NativeToValue(v.GetBoolValue())\n\t\tcase *structpb.Value_ListValue:\n\t\t\treturn NativeToValue(v.GetListValue())\n\t\tcase *structpb.Value_NullValue:\n\t\t\treturn NullValue\n\t\tcase *structpb.Value_NumberValue:\n\t\t\treturn NativeToValue(v.GetNumberValue())\n\t\tcase *structpb.Value_StringValue:\n\t\t\treturn NativeToValue(v.GetStringValue())\n\t\tcase *structpb.Value_StructValue:\n\t\t\treturn NativeToValue(v.GetStructValue())\n\t\t}\n\tcase *tpb.Timestamp:\n\t\treturn Timestamp{value.(*tpb.Timestamp)}\n\tcase *anypb.Any:\n\t\tval := value.(*anypb.Any)\n\t\tunpackedAny := ptypes.DynamicAny{}\n\t\tif ptypes.UnmarshalAny(val, &unpackedAny) != nil {\n\t\t\tNewErr(\"Fail to unmarshal any.\")\n\t\t}\n\t\treturn NativeToValue(unpackedAny.Message)\n\tcase proto.Message:\n\t\treturn NewObject(value.(proto.Message))\n\tdefault:\n\t\trefValue := reflect.ValueOf(value)\n\t\tif refValue.Kind() == reflect.Ptr {\n\t\t\trefValue = refValue.Elem()\n\t\t}\n\t\trefKind := refValue.Kind()\n\t\tswitch refKind {\n\t\tcase reflect.Array, reflect.Slice:\n\t\t\treturn NewDynamicList(value)\n\t\tcase reflect.Map:\n\t\t\treturn NewDynamicMap(value)\n\t\t// Enums are a type alias of int32, so they cannot be asserted as an\n\t\t// int32 value, but rather need to be downcast to int32 before being\n\t\t// converted to an Int representation.\n\t\tcase reflect.Int32:\n\t\t\tintType := reflect.TypeOf(int32(0))\n\t\t\treturn Int(refValue.Convert(intType).Interface().(int32))\n\t\t}\n\t}\n\treturn NewErr(\"unsupported type conversion for value '%v'\", value)\n}", "title": "" }, { "docid": "ba1eea55a7991e0deef967c62e0ebed0", "score": "0.7115034", "text": "func (v *Value) native() *C.GValue {\n\treturn v.GValue\n}", "title": "" }, { "docid": "c3c743729d4973a6739211ca35cf08cf", "score": "0.68117726", "text": "func Native(rv reflect.Value) dgo.Native {\n\treturn native(rv)\n}", "title": "" }, { "docid": "04720024052944806c8844d958550a51", "score": "0.66611224", "text": "func (gen *Generator) NativeToValue(val string, t types.Type) string {\n\tt = gen.R.Resolve(t)\n\tk := t.Kind()\n\tswitch k {\n\tcase types.BoolKind, types.Float32Kind, types.Float64Kind, types.Int16Kind, types.Int32Kind, types.Int64Kind, types.Int8Kind, types.Uint16Kind, types.Uint32Kind, types.Uint64Kind, types.Uint8Kind:\n\t\treturn fmt.Sprintf(\"%s%s(%s)\", gen.TypesPackage, kindToString(k), val)\n\tcase types.StringKind:\n\t\treturn fmt.Sprintf(\"%sNewString(%s)\", gen.TypesPackage, val)\n\t}\n\tpanic(\"unreachable\")\n}", "title": "" }, { "docid": "1437ec3090f11dc6c41329334ec5ff51", "score": "0.6400274", "text": "func ValuePtr(v interface{}) unsafe.Pointer {\n\treturn ((*iface)(unsafe.Pointer(&v))).data\n}", "title": "" }, { "docid": "57c22c4443bfce169ae2dc35242817eb", "score": "0.63596326", "text": "func go2GnoValue(rv reflect.Value) (tv TypedValue) {\n\tif rv.Type().PkgPath() != \"\" {\n\t\trt := rv.Type()\n\t\ttv.T = &nativeType{Type: rt}\n\t\ttv.V = &nativeValue{Value: rv}\n\t\treturn\n\t}\n\ttv.T = go2GnoType(rv.Type())\n\tswitch rk := rv.Kind(); rk {\n\tcase reflect.Bool:\n\t\ttv.SetBool(rv.Bool())\n\tcase reflect.String:\n\t\ttv.V = StringValue(rv.String())\n\tcase reflect.Int:\n\t\ttv.SetInt(int(rv.Int()))\n\tcase reflect.Int8:\n\t\ttv.SetInt8(int8(rv.Int()))\n\tcase reflect.Int16:\n\t\ttv.SetInt16(int16(rv.Int()))\n\tcase reflect.Int32:\n\t\ttv.SetInt32(int32(rv.Int()))\n\tcase reflect.Int64:\n\t\ttv.SetInt64(int64(rv.Int()))\n\tcase reflect.Uint:\n\t\ttv.SetUint(uint(rv.Uint()))\n\tcase reflect.Uint8:\n\t\ttv.SetUint8(uint8(rv.Uint()))\n\tcase reflect.Uint16:\n\t\ttv.SetUint16(uint16(rv.Uint()))\n\tcase reflect.Uint32:\n\t\ttv.SetUint32(uint32(rv.Uint()))\n\tcase reflect.Uint64:\n\t\ttv.SetUint64(uint64(rv.Uint()))\n\tcase reflect.Array:\n\t\ttv.V = &nativeValue{rv}\n\tcase reflect.Slice:\n\t\ttv.V = &nativeValue{rv}\n\tcase reflect.Chan:\n\t\ttv.V = &nativeValue{rv}\n\tcase reflect.Func:\n\t\ttv.V = &nativeValue{rv}\n\tcase reflect.Interface:\n\t\ttv.V = &nativeValue{rv}\n\tcase reflect.Map:\n\t\ttv.V = &nativeValue{rv}\n\tcase reflect.Ptr:\n\t\ttv.V = &nativeValue{rv}\n\tcase reflect.Struct:\n\t\ttv.V = &nativeValue{rv}\n\tcase reflect.UnsafePointer:\n\t\tpanic(\"not yet implemented\")\n\tdefault:\n\t\tpanic(\"not yet implemented\")\n\t}\n\treturn\n}", "title": "" }, { "docid": "5dddfb085d117de6b46a42d155d7ebe4", "score": "0.6312791", "text": "func ToNative(v sqltypes.Value) (interface{}, error) {\n\tvar out interface{}\n\tvar err error\n\tswitch {\n\tcase v.Type() == sqltypes.Null:\n\t\t// no-op\n\tcase v.IsSigned():\n\t\treturn ToInt64(v)\n\tcase v.IsUnsigned():\n\t\treturn ToUint64(v)\n\tcase v.IsFloat():\n\t\treturn ToFloat64(v)\n\tcase v.IsQuoted() || v.Type() == sqltypes.Bit || v.Type() == sqltypes.Decimal:\n\t\tout = v.ToBytes()\n\tcase v.Type() == sqltypes.Expression:\n\t\terr = vterrors.Errorf(vtrpcpb.Code_INVALID_ARGUMENT, \"%v cannot be converted to a go type\", v)\n\t}\n\treturn out, err\n}", "title": "" }, { "docid": "9ec40d924e9c68e4313782bde9452b81", "score": "0.6163626", "text": "func (v Value) Pointer() uintptr", "title": "" }, { "docid": "1bd1a4aa75cb4d48828e2010b9baef10", "score": "0.61584836", "text": "func Value(ptr interface{}) (ret Object) {\n\treturn fromPtr(reflect.ValueOf(ptr))\n}", "title": "" }, { "docid": "049d012d0e68247fc3c1a5c7294d8edb", "score": "0.615655", "text": "func (v *Object) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(v.native()))\n}", "title": "" }, { "docid": "1a82d9229db7690d4a1aeaaa365e59b0", "score": "0.61424553", "text": "func Value(p interface{}) js.Value {\n\tvr, ok := p.(JSValuer)\n\tif ok {\n\t\treturn vr.JSValue()\n\t}\n\n\tt := reflect.TypeOf(p)\n\trv := reflect.ValueOf(p)\n\n\tswitch t.Kind() {\n\tcase reflect.Struct:\n\t\t// If the struct has an embedded js.Value then we return that.\n\t\tf, ok := t.FieldByName(valueFieldName)\n\t\tif ok && f.Anonymous && f.Type == jsValueType {\n\t\t\treturn rv.FieldByName(valueFieldName).Interface().(js.Value)\n\t\t}\n\n\t\tv := js.Global().Get(\"Object\").New()\n\t\tstructValue(v, p)\n\t\treturn v\n\tcase reflect.Ptr:\n\t\treturn Value(rv.Elem().Interface())\n\tdefault:\n\t\treturn js.ValueOf(p)\n\t}\n}", "title": "" }, { "docid": "8a90a4e6d6c4f6a6bbc15c13a5cc3899", "score": "0.6050098", "text": "func (v *Event) Native() unsafe.Pointer {\n\tif v == nil {\n\t\treturn nil\n\t}\n\treturn unsafe.Pointer(v.native())\n}", "title": "" }, { "docid": "b2a5179ebbabbdc8dbf9770da42a5dd7", "score": "0.6031881", "text": "func (v *Value) GetPointer() unsafe.Pointer {\n\treturn unsafe.Pointer(C.g_value_get_pointer(v.native()))\n}", "title": "" }, { "docid": "62e7a6cdfd937ad681b36a81a1521dae", "score": "0.6014225", "text": "func (v *Raw) Value() *Value {\n\treturn (*Value)(unsafe.Pointer(v))\n}", "title": "" }, { "docid": "db17e9effc978ec9713b13b8071dbd6d", "score": "0.59462154", "text": "func (s *Uint) GetValuePointer() interface{} {\n\treturn &s.Value\n}", "title": "" }, { "docid": "e942f2d36dd47eb6bc4e0e12a6cece8c", "score": "0.59429336", "text": "func callNativeFunction(handle *Function, args []interface{}) (retVal *Value, err error) {\n argsIn := make([]*Value, len(args))\n var typeCodes []int32\n if len(args) != 0 {\n typeCodes = make([]int32, len(args))\n } else {\n typeCodes = make([]int32, 1)\n }\n\n for ii := range args {\n argsIn[ii] = newTVMValue()\n if typeCodes[ii], err = argsIn[ii].setValue(args[ii]); err != nil {\n return\n }\n }\n\n retVal = newTVMValue()\n argsOut := []*Value{retVal}\n retTypeCode := KNull\n err = nativeTVMFuncCall(handle, argsIn, typeCodes, argsOut, &retTypeCode)\n if err != nil {\n retVal = nil\n return\n }\n retVal.isLocal = false\n retVal.dtype = retTypeCode\n return\n}", "title": "" }, { "docid": "e2a764cf6d186e2fcfc1d600a0b20668", "score": "0.59321994", "text": "func nativeFromGoSlice(argValues []*Value) (nptr (*C.void)) {\n nargValues := ((uintptr)(C.malloc(C.ulong(C.sizeof_TVMValue * len(argValues)))))\n for ii := range argValues {\n C._TVMValueNativeSet(unsafe.Pointer(nargValues),\n unsafe.Pointer(argValues[ii].nativeCPtr()),\n C.int(int32(ii)))\n }\n nptr = (*C.void)(unsafe.Pointer(nargValues))\n return\n}", "title": "" }, { "docid": "57ef1f0887cb609f1b61210274dab748", "score": "0.57932323", "text": "func (v *Event) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(v.native()))\n}", "title": "" }, { "docid": "3e20712cf46630d67bcb04a583b3ef8a", "score": "0.5757456", "text": "func (v RemoteEndpointInfo) GetValuePtr() unsafe.Pointer { return unsafe.Pointer(&v) }", "title": "" }, { "docid": "6d0e7e77de6912788bbca6409f120428", "score": "0.5746219", "text": "func (v *EventKey) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(v.native()))\n}", "title": "" }, { "docid": "d3938dc6a997506b1a06dff74e019c47", "score": "0.57387155", "text": "func (v *PixbufFormat) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(v.native()))\n}", "title": "" }, { "docid": "f6aa3ad99ea7b34eb0d6e7f893915077", "score": "0.5728587", "text": "func gno2GoValue(tv *TypedValue, rv reflect.Value) reflect.Value {\n\tif tv.IsUndefined() {\n\t\tif debug {\n\t\t\tif !rv.IsValid() {\n\t\t\t\tpanic(\"unexpected undefined gno value\")\n\t\t\t}\n\t\t}\n\t\treturn rv\n\t}\n\tbt := baseOf(tv.T)\n\tvar rt reflect.Type\n\tif !rv.IsValid() {\n\t\trt = gno2GoType(bt)\n\t\trv = reflect.New(rt).Elem()\n\t}\n\tswitch ct := bt.(type) {\n\tcase PrimitiveType:\n\t\tswitch ct {\n\t\tcase BoolType, UntypedBoolType:\n\t\t\trv.SetBool(tv.GetBool())\n\t\tcase StringType, UntypedStringType:\n\t\t\trv.SetString(string(tv.GetString()))\n\t\tcase IntType:\n\t\t\trv.SetInt(int64(tv.GetInt()))\n\t\tcase Int8Type:\n\t\t\trv.SetInt(int64(tv.GetInt8()))\n\t\tcase Int16Type:\n\t\t\trv.SetInt(int64(tv.GetInt16()))\n\t\tcase Int32Type, UntypedRuneType:\n\t\t\trv.SetInt(int64(tv.GetInt32()))\n\t\tcase Int64Type:\n\t\t\trv.SetInt(int64(tv.GetInt64()))\n\t\tcase UintType:\n\t\t\trv.SetUint(uint64(tv.GetUint()))\n\t\tcase Uint8Type:\n\t\t\trv.SetUint(uint64(tv.GetUint8()))\n\t\tcase Uint16Type:\n\t\t\trv.SetUint(uint64(tv.GetUint16()))\n\t\tcase Uint32Type:\n\t\t\trv.SetUint(uint64(tv.GetUint32()))\n\t\tcase Uint64Type:\n\t\t\trv.SetUint(uint64(tv.GetUint64()))\n\t\tdefault:\n\t\t\tpanic(fmt.Sprintf(\n\t\t\t\t\"unexpected type %s\",\n\t\t\t\ttv.T.String()))\n\t\t}\n\tcase PointerType:\n\t\t// This doesn't take into account pointer relativity, or even\n\t\t// identical pointers -- every non-nil gno pointer type results in a\n\t\t// new addressable value in go.\n\t\trv2 := gno2GoValue(tv.V.(PointerValue).TypedValue, reflect.Value{})\n\t\trv.Set(rv2.Addr())\n\tcase *ArrayType:\n\t\tif debug {\n\t\t\tif tv.V == nil {\n\t\t\t\t// all arguments and recursively fetched arrays\n\t\t\t\t// should have been initialized if not already so.\n\t\t\t\tpanic(\"unexpected uninitialized array\")\n\t\t\t}\n\t\t}\n\t\t// General case.\n\t\tav := tv.V.(*ArrayValue)\n\t\tif av.Data == nil {\n\t\t\tfor i := 0; i < ct.Len; i++ {\n\t\t\t\tetv := &av.List[i]\n\t\t\t\tif etv.IsUndefined() {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tgno2GoValue(etv, rv.Index(i))\n\t\t\t}\n\t\t} else {\n\t\t\tpanic(\"not yet implemented\")\n\t\t\t/*\n\t\t\t\tel := av.GetLength()\n\t\t\t\tec := av.GetCapacity()\n\t\t\t\tdata := make([]byte, el, ec)\n\t\t\t\tcopy(data, av.Data)\n\t\t\t\trv = reflect.ValueOf(data)\n\t\t\t*/\n\t\t}\n\tcase *SliceType:\n\t\tst := gno2GoType(ct)\n\t\t// If uninitialized slice, return zero value.\n\t\tif tv.V == nil {\n\t\t\treturn rv\n\t\t}\n\t\t// General case.\n\t\tsv := tv.V.(*SliceValue)\n\t\tsvo := sv.Offset\n\t\tsvl := sv.Length\n\t\tsvc := sv.Maxcap\n\t\tif sv.Base.Data == nil {\n\t\t\trv.Set(reflect.MakeSlice(st, svl, svc))\n\t\t\tfor i := 0; i < svl; i++ {\n\t\t\t\tetv := &(sv.Base.List[svo+i])\n\t\t\t\tif etv.IsUndefined() {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tgno2GoValue(etv, rv.Index(i))\n\t\t\t}\n\t\t} else {\n\t\t\tdata := make([]byte, svl, svc)\n\t\t\tcopy(data[:svc], sv.Base.Data[svo:svo+svc])\n\t\t\trv.Set(reflect.ValueOf(data))\n\t\t}\n\tcase *StructType:\n\t\t// If uninitialized struct, return zero value.\n\t\tif tv.V == nil {\n\t\t\treturn rv\n\t\t}\n\t\t// General case.\n\t\tsv := tv.V.(*StructValue)\n\t\t// Use st.Mapping to translate from Go to Gno field numbers.\n\t\tfor orig, flat := range ct.Mapping {\n\t\t\tftv := &(sv.Fields[flat])\n\t\t\tif ftv.IsUndefined() {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tgno2GoValue(ftv, rv.Field(orig))\n\t\t}\n\tcase *MapType:\n\t\t// If uninitialized map, return zero value.\n\t\tif tv.V == nil {\n\t\t\treturn rv\n\t\t}\n\t\t// General case.\n\t\tmv := tv.V.(*MapValue)\n\t\thead := mv.List.Head\n\t\tfor head != nil {\n\t\t\tktv, vtv := &head.Key, &head.Value\n\t\t\tkrv := gno2GoValue(ktv, reflect.Value{})\n\t\t\tvrv := gno2GoValue(vtv, reflect.Value{})\n\t\t\trv.SetMapIndex(krv, vrv)\n\t\t\thead = head.Next\n\t\t}\n\tcase *nativeType:\n\t\t// If uninitialized native type, leave rv uninitialized.\n\t\tif tv.V == nil {\n\t\t\treturn rv\n\t\t}\n\t\t// General case.\n\t\trv.Set(tv.V.(*nativeValue).Value)\n\tcase *DeclaredType:\n\t\t// See corresponding note on gno2GoType().\n\t\tpanic(\"should not happen\") // we switch on baseOf().\n\tdefault:\n\t\tpanic(fmt.Sprintf(\n\t\t\t\"unexpected type %s\",\n\t\t\ttv.T.String()))\n\t}\n\treturn rv\n}", "title": "" }, { "docid": "663f596f6fa2922d1eabce8cba8edd58", "score": "0.5719996", "text": "func (v *Real) Value() *Value {\n\treturn (*Value)(unsafe.Pointer(v))\n}", "title": "" }, { "docid": "836e899bde7a750d95df2214ca0db7cd", "score": "0.5683189", "text": "func Value(v interface{}) values.Value {\n\tswitch v := v.(type) {\n\tcase *int64:\n\t\tif v == nil {\n\t\t\treturn values.NewNull(semantic.BasicInt)\n\t\t}\n\t\treturn values.NewInt(*v)\n\tcase *uint64:\n\t\tif v == nil {\n\t\t\treturn values.NewNull(semantic.BasicUint)\n\t\t}\n\t\treturn values.NewUInt(*v)\n\tcase *float64:\n\t\tif v == nil {\n\t\t\treturn values.NewNull(semantic.BasicFloat)\n\t\t}\n\t\treturn values.NewFloat(*v)\n\tcase *string:\n\t\tif v == nil {\n\t\t\treturn values.NewNull(semantic.BasicString)\n\t\t}\n\t\treturn values.NewString(*v)\n\tcase *bool:\n\t\tif v == nil {\n\t\t\treturn values.NewNull(semantic.BasicBool)\n\t\t}\n\t\treturn values.NewBool(*v)\n\tcase *values.Time:\n\t\tif v == nil {\n\t\t\treturn values.NewNull(semantic.BasicTime)\n\t\t}\n\t\treturn values.NewTime(*v)\n\tcase *values.Duration:\n\t\tif v == nil {\n\t\t\treturn values.NewNull(semantic.BasicDuration)\n\t\t}\n\t\treturn values.NewDuration(*v)\n\t}\n\treturn values.New(v)\n}", "title": "" }, { "docid": "363d96ef4c3648cef6af9aab0a4505c5", "score": "0.5680416", "text": "func ptrToValue(value reflect.Value) reflect.Value {\n\tif isValueStructPtr(value) {\n\t\treturn value.Elem()\n\t}\n\treturn value\n}", "title": "" }, { "docid": "ae578a3dfdbfd0a537f944748de400d7", "score": "0.5680169", "text": "func (v *Float) ValuePointer() interface{} { return v.valPtr }", "title": "" }, { "docid": "6dc7746bba98485e992963b6b52a974c", "score": "0.56399226", "text": "func pointerOf(v reflect.Value) pointer { return unsafePointerOf(v) }", "title": "" }, { "docid": "14e5d7ae71e95d68e1591ad5535e57f4", "score": "0.5619413", "text": "func (v *Object) goValue() (interface{}, error) {\n\tobjType := Type(C._g_type_from_instance(C.gpointer(v.native())))\n\tf, err := gValueMarshalers.lookupType(objType)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// The marshalers expect Values, not Objects\n\tval, err := ValueInit(objType)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tval.SetInstance(uintptr(unsafe.Pointer(v.GObject)))\n\trv, err := f(uintptr(unsafe.Pointer(val.native())))\n\treturn rv, err\n}", "title": "" }, { "docid": "e65d064869c6b3d84d2bc21488099b32", "score": "0.56062025", "text": "func nativeToGoSlice(nargValues (*C.void), argValues []*Value, typeCodes []int32) {\n for ii := range argValues {\n C._TVMValueNativeGet(unsafe.Pointer(argValues[ii].nativeCPtr()),\n unsafe.Pointer(nargValues),\n C.int(int32(ii)))\n argValues[ii].dtype = typeCodes[ii]\n }\n}", "title": "" }, { "docid": "086748d8ab3d9cd9c6ead0e6bff58e30", "score": "0.56054074", "text": "func nativeFromApp(app interface{}) *gobject.GValue {\n\targapp := app.(gobject.ObjectLike)\n\tgv := gobject.CreateCGValue(GtkType.APPLICATION, argapp.ToNative())\n\treturn gv\n}", "title": "" }, { "docid": "022f596ba8897c73e271e9d47e2b7f6b", "score": "0.56017095", "text": "func PtrValue(v reflect.Value) reflect.Value {\n\tif v.CanAddr() && v.Kind() != reflect.Ptr {\n\t\tv = v.Addr()\n\t}\n\treturn v\n}", "title": "" }, { "docid": "2b99a27b6311d75d3051bcbd490404ac", "score": "0.5574262", "text": "func (v *GlyphString) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(v.native()))\n}", "title": "" }, { "docid": "6672bf8c2b5e38066e1db172a4e45514", "score": "0.55723864", "text": "func (v Value) UnsafeAddr() uintptr", "title": "" }, { "docid": "95893e36e0d98403af40d5cfab6c346e", "score": "0.55327976", "text": "func toValue(p interface{}) reflect.Value {\n\tv := reflect.ValueOf(p)\n\tif v.Kind() == reflect.Ptr {\n\t\tv = v.Elem()\n\t}\n\treturn v\n}", "title": "" }, { "docid": "c03b9f900abed2d4e5e06f122977cfdf", "score": "0.552212", "text": "func (c *FloatPtrConverter) Value() *float64 {\n\treturn c.value\n}", "title": "" }, { "docid": "425c3dd5c00a4322e11d2b9b5f0fac14", "score": "0.5519868", "text": "func go2GnoValue2(rv reflect.Value) (tv TypedValue) {\n\ttv.T = go2GnoType2(rv.Type())\n\tswitch rk := rv.Kind(); rk {\n\tcase reflect.Bool:\n\t\ttv.SetBool(rv.Bool())\n\tcase reflect.String:\n\t\ttv.V = StringValue(rv.String())\n\tcase reflect.Int:\n\t\ttv.SetInt(int(rv.Int()))\n\tcase reflect.Int8:\n\t\ttv.SetInt8(int8(rv.Int()))\n\tcase reflect.Int16:\n\t\ttv.SetInt16(int16(rv.Int()))\n\tcase reflect.Int32:\n\t\ttv.SetInt32(int32(rv.Int()))\n\tcase reflect.Int64:\n\t\ttv.SetInt64(int64(rv.Int()))\n\tcase reflect.Uint:\n\t\ttv.SetUint(uint(rv.Uint()))\n\tcase reflect.Uint8:\n\t\ttv.SetUint8(uint8(rv.Uint()))\n\tcase reflect.Uint16:\n\t\ttv.SetUint16(uint16(rv.Uint()))\n\tcase reflect.Uint32:\n\t\ttv.SetUint32(uint32(rv.Uint()))\n\tcase reflect.Uint64:\n\t\ttv.SetUint64(uint64(rv.Uint()))\n\tcase reflect.Array:\n\t\trvl := rv.Len()\n\t\tlist := make([]TypedValue, rvl)\n\t\tfor i := 0; i < rvl; i++ {\n\t\t\tlist[i] = go2GnoValue(rv.Index(i))\n\t\t}\n\t\ttv.V = &ArrayValue{\n\t\t\tList: list,\n\t\t}\n\tcase reflect.Slice:\n\t\trvl := rv.Len()\n\t\trvc := rv.Cap()\n\t\tlist := make([]TypedValue, rvl, rvc)\n\t\tfor i := 0; i < rvl; i++ {\n\t\t\tlist[i] = go2GnoValue(rv.Index(i))\n\t\t}\n\t\ttv.V = newSliceFromList(list)\n\tcase reflect.Chan:\n\t\tpanic(\"not yet implemented\")\n\tcase reflect.Func:\n\t\tpanic(\"not yet implemented\")\n\tcase reflect.Interface:\n\t\tpanic(\"not yet implemented\")\n\tcase reflect.Map:\n\t\tpanic(\"not yet implemented\")\n\tcase reflect.Ptr:\n\t\ttv.T = PointerType{Elt: go2GnoType2(rv.Type().Elem())}\n\t\tval := go2GnoValue2(rv.Elem())\n\t\ttv.V = PointerValue{TypedValue: &val} // heap alloc\n\tcase reflect.Struct:\n\t\tpanic(\"not yet implemented\")\n\tcase reflect.UnsafePointer:\n\t\tpanic(\"not yet implemented\")\n\tdefault:\n\t\tpanic(\"not yet implemented\")\n\t}\n\treturn\n}", "title": "" }, { "docid": "4c765ae75ef7e89b3ead9a54cc71bc3e", "score": "0.5505228", "text": "func (v *Integer) Value() *Value {\n\treturn (*Value)(unsafe.Pointer(v))\n}", "title": "" }, { "docid": "8b5b4d219bf5f73d609bf6c22a476fc6", "score": "0.5454031", "text": "func (v *InitiallyUnowned) Native() uintptr {\n\tif v == nil || v.Object == nil {\n\t\treturn uintptr(unsafe.Pointer(nil))\n\t}\n\treturn v.Object.Native()\n}", "title": "" }, { "docid": "57c6696ee3a51b33e081e6fcaebb7bf7", "score": "0.5432921", "text": "func CastToValue(object *gobject.Object) *Value {\n\treturn ValueNewFromC(object.ToC())\n}", "title": "" }, { "docid": "6b9155c939361a8962412389cf4d65af", "score": "0.5432343", "text": "func NativeTensorToNative(t *NativeTensor) (interface{}, error) {\n\tif int(t.Type) > len(types) {\n\t\treturn nil, fmt.Errorf(\"Unknown type: %d\", t.Type)\n\t}\n\n\tmeta := types[t.Type]\n\ttyp := meta.typ\n\tif typ.Kind() == reflect.Struct {\n\t\treturn nil, fmt.Errorf(\"Invalid type: %d\", t.Type)\n\t}\n\tshapeLen := len(t.Shape)\n\tshape := make([]int, shapeLen)\n\telems := 1\n\tfor i := 0; i < shapeLen; i++ {\n\t\ttyp = reflect.SliceOf(typ)\n\t\tshape[i] = int(t.Shape[i])\n\t\telems *= shape[i]\n\t}\n\tif shapeLen == 0 {\n\t\telems = 0\n\t}\n\n\tvar data interface{}\n\tif t.Type == graphpipefb.TypeString {\n\t\tnum := len(t.StringVals)\n\t\tif num != elems {\n\t\t\treturn nil, fmt.Errorf(\"Incorrect number of elements in (%d) != (%d)\",\n\t\t\t\tnum, elems)\n\t\t}\n\t\tstrs := make([]string, num)\n\t\tfor i := 0; i < num; i++ {\n\t\t\tstrs[i] = t.StringVals[i]\n\t\t}\n\t\tdata = strs\n\t} else {\n\t\tif len(t.Data) != elems*int(meta.size) {\n\t\t\treturn nil, fmt.Errorf(\"Incorrect number of elements in (%d) != (%d)\",\n\t\t\t\tlen(t.Data)/int(meta.size), elems)\n\t\t}\n\t\tdata = meta.conv(t.Data)\n\t}\n\n\treturn sliceData(typ, reflect.ValueOf(data), shape).Interface(), nil\n}", "title": "" }, { "docid": "a61188d09e97a0af658aa687979bcdc6", "score": "0.5409263", "text": "func (n Number) Value() interface{} { return &n }", "title": "" }, { "docid": "fab2dc13b613816098f0ea1c0ec0090a", "score": "0.53966326", "text": "func (v *String) Value() *Value {\n\treturn (*Value)(unsafe.Pointer(v))\n}", "title": "" }, { "docid": "847c3f246b4cde4e1fc0adb0ce55b603", "score": "0.53545845", "text": "func (v Value) Value() Value { return v }", "title": "" }, { "docid": "6e48c516f6b17822a3731ad2a1a24d02", "score": "0.5338292", "text": "func (v *Object) native() *C.GObject {\n\tif v == nil || v.GObject == nil {\n\t\treturn nil\n\t}\n\tp := unsafe.Pointer(v.GObject)\n\treturn C.toGObject(p)\n}", "title": "" }, { "docid": "c2d144f5d754782a38159b4664cb0575", "score": "0.53253067", "text": "func (v *Settings) native() *C.WebKitSettings {\n\tif v == nil || v.GObject == nil {\n\t\treturn nil\n\t}\n\tp := unsafe.Pointer(v.GObject)\n\treturn C.toWebKitSettings(p)\n}", "title": "" }, { "docid": "ebe4a878e362fdb53193c75dcdf9e0a3", "score": "0.5314611", "text": "func Value(v interface{}) dgo.Value {\n\t// This function is kept very small to enable inlining so this\n\t// if statement should not be baked in to the grand switch\n\t// in the value function\n\tif gv, ok := v.(dgo.Value); ok {\n\t\treturn gv\n\t}\n\tif dv := value(v); dv != nil {\n\t\treturn dv\n\t}\n\treturn ValueFromReflected(reflect.ValueOf(v))\n}", "title": "" }, { "docid": "5ae26cd782765408a1e458a8c08ba24c", "score": "0.53088576", "text": "func FromValue(v reflect.Value) Value {\n\tt := v.Type()\n\n\tif t.Kind() == reflect.Ptr {\n\t\tt = t.Elem()\n\n\t\tif t.Kind() == reflect.Slice {\n\t\t\tswitch t.Elem().Name() {\n\t\t\tcase \"float64\":\n\t\t\t\treturn NewFloatSliceFromPtr(v.Interface().(*[]float64))\n\t\t\tcase \"uint64\":\n\t\t\t\treturn NewUIntSliceFromPtr(v.Interface().(*[]uint64))\n\t\t\tcase \"int64\":\n\t\t\t\treturn NewIntSliceFromPtr(v.Interface().(*[]int64))\n\t\t\tcase \"bool\":\n\t\t\t\treturn NewBoolSliceFromPtr(v.Interface().(*[]bool))\n\t\t\tcase \"string\":\n\t\t\t\treturn NewStringSliceFromPtr(v.Interface().(*[]string))\n\t\t\t}\n\t\t} else {\n\t\t\tswitch t.Name() {\n\t\t\tcase \"float64\":\n\t\t\t\treturn NewFloatFromPtr(v.Interface().(*float64))\n\t\t\tcase \"uint64\":\n\t\t\t\treturn NewUIntFromPtr(v.Interface().(*uint64))\n\t\t\tcase \"int64\":\n\t\t\t\treturn NewIntFromPtr(v.Interface().(*int64))\n\t\t\tcase \"bool\":\n\t\t\t\treturn NewBoolFromPtr(v.Interface().(*bool))\n\t\t\tcase \"string\":\n\t\t\t\treturn NewStringFromPtr(v.Interface().(*string))\n\t\t\t}\n\t\t}\n\t} else if t.Kind() == reflect.Slice {\n\t\tswitch t.Elem().Name() {\n\t\tcase \"float64\":\n\t\t\treturn NewFloatSlice(v.Interface().([]float64)...)\n\t\tcase \"uint64\":\n\t\t\treturn NewUIntSlice(v.Interface().([]uint64)...)\n\t\tcase \"int64\":\n\t\t\treturn NewIntSlice(v.Interface().([]int64)...)\n\t\tcase \"bool\":\n\t\t\treturn NewBoolSlice(v.Interface().([]bool)...)\n\t\tcase \"string\":\n\t\t\treturn NewStringSlice(v.Interface().([]string)...)\n\t\t}\n\t} else {\n\t\tswitch t.Name() {\n\t\tcase \"float64\":\n\t\t\treturn NewFloat(v.Interface().(float64))\n\t\tcase \"uint64\":\n\t\t\treturn NewUInt(v.Interface().(uint64))\n\t\tcase \"int64\":\n\t\t\treturn NewInt(v.Interface().(int64))\n\t\tcase \"bool\":\n\t\t\treturn NewBool(v.Interface().(bool))\n\t\tcase \"string\":\n\t\t\treturn NewString(v.Interface().(string))\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "446a1d13dd6f466bafd9f624ab83b5f1", "score": "0.52891475", "text": "func (v *GlyphVisAttr) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(v.native()))\n}", "title": "" }, { "docid": "e5b84b271f7bdfd800406425cab7e032", "score": "0.52805036", "text": "func (n NullUint32) Value() (driver.Value, error) {\n\tif !n.Valid {\n\t\treturn nil, nil\n\t}\n\treturn n.Uint32, nil\n}", "title": "" }, { "docid": "592191026d0a581076ba2983b593e5f2", "score": "0.5274203", "text": "func GValue(v interface{}) (gvalue *Value, err error) {\n\tif v == nil {\n\t\tval, err := ValueInit(TYPE_POINTER)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetPointer(uintptr(unsafe.Pointer(nil)))\n\t\treturn val, nil\n\t}\n\n\tswitch e := v.(type) {\n\tcase bool:\n\t\tval, err := ValueInit(TYPE_BOOLEAN)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetBool(e)\n\t\treturn val, nil\n\n\tcase int8:\n\t\tval, err := ValueInit(TYPE_CHAR)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetSChar(e)\n\t\treturn val, nil\n\n\tcase int64:\n\t\tval, err := ValueInit(TYPE_INT64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetInt64(e)\n\t\treturn val, nil\n\n\tcase int:\n\t\tval, err := ValueInit(TYPE_INT)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetInt(e)\n\t\treturn val, nil\n\n\tcase uint8:\n\t\tval, err := ValueInit(TYPE_UCHAR)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetUChar(e)\n\t\treturn val, nil\n\n\tcase uint64:\n\t\tval, err := ValueInit(TYPE_UINT64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetUInt64(e)\n\t\treturn val, nil\n\n\tcase uint:\n\t\tval, err := ValueInit(TYPE_UINT)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetUInt(e)\n\t\treturn val, nil\n\n\tcase float32:\n\t\tval, err := ValueInit(TYPE_FLOAT)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetFloat(e)\n\t\treturn val, nil\n\n\tcase float64:\n\t\tval, err := ValueInit(TYPE_DOUBLE)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetDouble(e)\n\t\treturn val, nil\n\n\tcase string:\n\t\tval, err := ValueInit(TYPE_STRING)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetString(e)\n\t\treturn val, nil\n\n\tcase *Object:\n\t\tval, err := ValueInit(TYPE_OBJECT)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tval.SetInstance(uintptr(unsafe.Pointer(e.GObject)))\n\t\treturn val, nil\n\n\tdefault:\n\t\t/* Try this since above doesn't catch constants under other types */\n\t\trval := reflect.ValueOf(v)\n\t\tswitch rval.Kind() {\n\t\tcase reflect.Int8:\n\t\t\tval, err := ValueInit(TYPE_CHAR)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tval.SetSChar(int8(rval.Int()))\n\t\t\treturn val, nil\n\n\t\tcase reflect.Int16:\n\t\t\treturn nil, errors.New(\"Type not implemented\")\n\n\t\tcase reflect.Int32:\n\t\t\treturn nil, errors.New(\"Type not implemented\")\n\n\t\tcase reflect.Int64:\n\t\t\tval, err := ValueInit(TYPE_INT64)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tval.SetInt64(rval.Int())\n\t\t\treturn val, nil\n\n\t\tcase reflect.Int:\n\t\t\tval, err := ValueInit(TYPE_INT)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tval.SetInt(int(rval.Int()))\n\t\t\treturn val, nil\n\n\t\tcase reflect.Uintptr, reflect.Ptr:\n\t\t\tval, err := ValueInit(TYPE_POINTER)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tval.SetPointer(rval.Pointer())\n\t\t\treturn val, nil\n\t\t}\n\t}\n\n\treturn nil, errors.New(\"Type not implemented\")\n}", "title": "" }, { "docid": "f247e28c8a434cb23513ffe3c9b927cb", "score": "0.526342", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlvAny) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlv_ValueAny {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlv_ValueAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "8e01a06b8d97fb1d10ded5da19f56d60", "score": "0.52455497", "text": "func (t *Target) Native(b []byte) usm.Native {\n\treturn b\n}", "title": "" }, { "docid": "4ea193a4f5aba1e17c10281e0593df00", "score": "0.52437496", "text": "func (v *V8Context) ToValue(val interface{}) (*Value, error) {\n\tif fn, isFunction := val.(func(Loc, ...*Value) (*Value, error)); isFunction {\n\t\treturn v.CreateRawFunc(fn)\n\t}\n\tdata, err := json.Marshal(val)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Cannot marshal value as JSON: %v\\nVal: %#v\", err, val)\n\t}\n\treturn v.FromJSON(string(data))\n}", "title": "" }, { "docid": "f4c936659ed428e151497a04b4f4528e", "score": "0.5226016", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_RouterInformation_Tlv_SegmentRoutingSidLabelRange_Tlv_UnknownTlvAny) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_RouterInformation_Tlv_SegmentRoutingSidLabelRange_Tlv_UnknownTlv_ValueAny {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_RouterInformation_Tlv_SegmentRoutingSidLabelRange_Tlv_UnknownTlv_ValueAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "4c109a423631f3a9bee3830abd1cbbd8", "score": "0.5222835", "text": "func GetValueAsRaw(homeID uint32, valueID uint64) ([]byte, error) {\n\tcvalueid := C.valueid_create(C.uint32_t(homeID), C.uint64_t(valueID))\n\tdefer C.valueid_free(cvalueid)\n\tzwbytes := C.zwbytes_new()\n\tok := bool(C.manager_getValueAsRaw(cmanager, cvalueid, zwbytes))\n\tif ok == false {\n\t\treturn nil, fmt.Errorf(\"value is not of raw type\")\n\t}\n\tgobytes := make([]byte, zwbytes.size)\n\tfor i := 0; i < int(zwbytes.size); i++ {\n\t\tgobytes[i] = byte(C.zwbytes_at(zwbytes, C.size_t(i)))\n\t}\n\treturn gobytes, nil\n}", "title": "" }, { "docid": "8accc5fdb79de795eccc8229c99d0e32", "score": "0.5219665", "text": "func (v Value) Value() interface{} {\n\treturn v.value\n}", "title": "" }, { "docid": "3a26c0a58ab5b3bc6c451c472fc80cae", "score": "0.5214419", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_ExtendedLink_Tlv_UnknownTlvAny) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_ExtendedLink_Tlv_UnknownTlv_ValueAny {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_ExtendedLink_Tlv_UnknownTlv_ValueAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "cbf86c15981fe1aeca3ad77fd218c011", "score": "0.5211266", "text": "func (Type) SafeValue() {}", "title": "" }, { "docid": "0a7667483a94df647e4a8ed2046fdb4d", "score": "0.5196288", "text": "func (n NullInterface) Value() (driver.Value, error) {\n\tif n.Err() == nil && !driver.IsValue(n.V()) {\n\t\treturn nil, ErrConvert\n\t}\n\tif n.Err() != nil || !n.Present() {\n\t\treturn nil, n.Err()\n\t}\n\treturn n.InterfaceCommon.Value()\n}", "title": "" }, { "docid": "c9e1fe56ab7528d9c17663bb722d4594", "score": "0.5191623", "text": "func (v *Float) Value() interface{} { return *v.valPtr }", "title": "" }, { "docid": "43e0bc0b41cc8328b7408ede725a1b19", "score": "0.5188408", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_GraceLsa_Tlv_UnknownTlvAny) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_GraceLsa_Tlv_UnknownTlv_ValueAny {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_GraceLsa_Tlv_UnknownTlv_ValueAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "ea4e4728e9f90d60c30025952560c59a", "score": "0.5187408", "text": "func FromPcore(pv px.Value) dgo.Value {\n\tif pv.Equals(px.Undef, nil) {\n\t\treturn vf.Nil\n\t}\n\tvar v dgo.Value\n\tswitch pv := pv.(type) {\n\tcase px.StringValue:\n\t\tv = vf.String(pv.String())\n\tcase px.Integer:\n\t\tv = vf.Integer(pv.Int())\n\tcase px.Float:\n\t\tv = vf.Float(pv.Float())\n\tcase px.Boolean:\n\t\tv = vf.Boolean(pv.Bool())\n\tcase px.OrderedMap:\n\t\tv = fromMap(pv)\n\tcase *types.Binary:\n\t\tv = vf.Binary(pv.Bytes(), false)\n\tcase px.List:\n\t\tv = fromList(pv)\n\tcase *types.Regexp:\n\t\tv = vf.Regexp(pv.Regexp())\n\tcase *types.Sensitive:\n\t\tv = vf.Sensitive(FromPcore(pv.Unwrap()))\n\tcase *types.Timestamp:\n\t\tv = vf.Time(*(*time.Time)(pv))\n\tcase px.Type:\n\t\tv = fromType(pv)\n\tdefault:\n\t\tpanic(fmt.Errorf(`unable to create a dgo.Value from a pcore %s`, pv.PType().Name()))\n\t}\n\treturn v\n}", "title": "" }, { "docid": "661d7e4f5bce54f4930246e84132141d", "score": "0.51830024", "text": "func (v PtrVar) Get() any {\n\treturn vals.FromGo(v.GetRaw())\n}", "title": "" }, { "docid": "2980a1a142c2f08b9a6045ad43cb1a3d", "score": "0.5178662", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_ExtendedPrefix_Tlv_UnknownTlvAny) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_ExtendedPrefix_Tlv_UnknownTlv_ValueAny {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_ExtendedPrefix_Tlv_UnknownTlv_ValueAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "e65cbd2479ccaa5f488c85c1435f7b76", "score": "0.5169456", "text": "func (i *CertificateInfo) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(i.native()))\n}", "title": "" }, { "docid": "993c5cbdbdb24062fce287a305c86944", "score": "0.5160412", "text": "func NewValue(value any) (Value, error) {\n\tswitch v := value.(type) {\n\tcase string:\n\t\treturn NewString(v)\n\tcase int:\n\t\treturn NewInt(v), nil\n\tcase int8:\n\t\treturn NewInt8(v), nil\n\tcase int16:\n\t\treturn NewInt16(v), nil\n\tcase int32:\n\t\treturn NewInt32(v), nil\n\tcase int64:\n\t\treturn NewInt64(v), nil\n\tcase uint8:\n\t\treturn NewUInt8(v), nil\n\tcase uint16:\n\t\treturn NewUInt16(v), nil\n\tcase uint32:\n\t\treturn NewUInt32(v), nil\n\tcase uint64:\n\t\treturn NewUInt64(v), nil\n\tcase []any:\n\t\tvalues := make([]Value, len(v))\n\n\t\tfor i, v := range v {\n\t\t\tt, err := NewValue(v)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tvalues[i] = t\n\t\t}\n\n\t\treturn NewArray(values), nil\n\tcase nil:\n\t\treturn NewOptional(nil), nil\n\t}\n\n\treturn nil, fmt.Errorf(\"value type %T cannot be converted to ABI value type\", value)\n}", "title": "" }, { "docid": "96d5608e866706d378e9641ab516ee34", "score": "0.5158911", "text": "func ValueFromReflected(vr reflect.Value) dgo.Value {\n\t// Invalid shouldn't happen, but needs a check\n\tif !vr.IsValid() {\n\t\treturn Nil\n\t}\n\n\tswitch vr.Kind() {\n\tcase reflect.Slice:\n\t\treturn ArrayFromReflected(vr, true)\n\tcase reflect.Map:\n\t\treturn MapFromReflected(vr, true)\n\tcase reflect.Ptr:\n\t\tif vr.IsNil() {\n\t\t\treturn Nil\n\t\t}\n\t}\n\tvi := vr.Interface()\n\tif v := value(vi); v != nil {\n\t\treturn v\n\t}\n\tif v, ok := vi.(dgo.Value); ok {\n\t\treturn v\n\t}\n\t// Value as unsafe. Immutability is not guaranteed\n\treturn native(vr)\n}", "title": "" }, { "docid": "7ce12e921246f5aa86371eb263f1e673", "score": "0.51411927", "text": "func (src Int8) Value() (driver.Value, error) {\n\tif !src.Valid {\n\t\treturn nil, nil\n\t}\n\treturn int64(src.Int64), nil\n}", "title": "" }, { "docid": "9b501715a86c1c34202a6e6c5c6b81bb", "score": "0.5139627", "text": "func (v *Logical) Value() *Value {\n\treturn (*Value)(unsafe.Pointer(v))\n}", "title": "" }, { "docid": "59302c54b0bcd27ad944609a16b2cec6", "score": "0.5129634", "text": "func (v *WeakReference) Value() *Value {\n\treturn (*Value)(unsafe.Pointer(v))\n}", "title": "" }, { "docid": "57d34dc4ed6569045503bfb9945c3a32", "score": "0.51258755", "text": "func (nb NullBase) Value() interface{} {\n\tif nb.IsNil() {\n\t\treturn nil\n\t}\n\treturn nb.value\n}", "title": "" }, { "docid": "214374327e7ceeb1ff542394669ccd9a", "score": "0.5121627", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_RouterInformation_Tlv_UnknownTlvAny) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_RouterInformation_Tlv_UnknownTlv_ValueAny {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_RouterInformation_Tlv_UnknownTlv_ValueAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "149edcb2d5ec74ad0adb09a943712a6a", "score": "0.51205796", "text": "func (i Int64Value) ValueInt64Pointer() *int64 {\n\tif i.IsNull() {\n\t\treturn nil\n\t}\n\n\treturn &i.value\n}", "title": "" }, { "docid": "550b4c047bdde1061e59eaba9c372867", "score": "0.5108055", "text": "func (n *Node) ValueOnDevice(dev Device, extern External) (retVal Value, allocOnExtern bool, err error) {\n\treturn n.Value(), false, nil\n}", "title": "" }, { "docid": "336ade334feecafbad7e5d8672cb9bab", "score": "0.5102779", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_TrafficEngineering_Tlv_UnknownTlvAny) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_TrafficEngineering_Tlv_UnknownTlv_ValueAny {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_TrafficEngineering_Tlv_UnknownTlv_ValueAny{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "bef62c9ab852524f275ecb2e99b0c05f", "score": "0.5092734", "text": "func NewValue(v any) (*Value, error) {\n\tswitch x := v.(type) {\n\tcase isValue_Value:\n\t\treturn &Value{Value: x}, nil\n\tcase nil:\n\t\treturn &Value{Value: &Value_Null{}}, nil\n\tcase int8, int16, int32, int64, int:\n\t\treturn &Value{Value: &Value_Int{reflect.ValueOf(v).Int()}}, nil\n\tcase uint8, uint16, uint32, uint64, uint:\n\t\treturn &Value{Value: &Value_Uint{reflect.ValueOf(v).Uint()}}, nil\n\tcase float32, float64:\n\t\treturn &Value{Value: &Value_Float{reflect.ValueOf(v).Float()}}, nil\n\tcase string:\n\t\treturn &Value{Value: &Value_Str{x}}, nil\n\tcase []byte:\n\t\treturn &Value{Value: &Value_Bytes{x}}, nil\n\tcase bool:\n\t\treturn &Value{Value: &Value_Bool{x}}, nil\n\tcase map[string]any:\n\t\tret, err := json.Marshal(x)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn &Value{Value: &Value_Object{string(ret)}}, nil\n\tcase []any:\n\t\tret, err := json.Marshal(x)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn &Value{Value: &Value_Array{string(ret)}}, nil\n\t}\n\treturn nil, fmt.Errorf(\"unknown type %T\", v)\n}", "title": "" }, { "docid": "e2c973a801ed309edb43b2185b403eef", "score": "0.5088669", "text": "func (ti *datetimeType) ConvertNomsValueToValue(v types.Value) (interface{}, error) {\n\tif val, ok := v.(types.Timestamp); ok {\n\t\tif ti.Equals(DateType) {\n\t\t\treturn time.Time(val).Truncate(24 * time.Hour).UTC(), nil\n\t\t}\n\t\treturn time.Time(val).UTC(), nil\n\t}\n\tif _, ok := v.(types.Null); ok || v == nil {\n\t\treturn nil, nil\n\t}\n\treturn nil, fmt.Errorf(`\"%v\" cannot convert NomsKind \"%v\" to a value`, ti.String(), v.Kind())\n}", "title": "" }, { "docid": "036e4f85151a7beccf4d1a513287cb83", "score": "0.5086541", "text": "func ToGoValue(v Value) *GoValue {\n\tif u, ok := v.(*GoValue); ok {\n\t\treturn u\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "277ef34ba6f007ee5508ba8caec9f7c2", "score": "0.50852865", "text": "func ValueNewStringFromBytes(context *Context, bytes *glib.Bytes) *Value {\n\tc_context := (*C.JSCContext)(C.NULL)\n\tif context != nil {\n\t\tc_context = (*C.JSCContext)(context.ToC())\n\t}\n\n\tc_bytes := (*C.GBytes)(C.NULL)\n\tif bytes != nil {\n\t\tc_bytes = (*C.GBytes)(bytes.ToC())\n\t}\n\n\tretC := C.jsc_value_new_string_from_bytes(c_context, c_bytes)\n\tretGo := ValueNewFromC(unsafe.Pointer(retC))\n\n\tif retC != nil {\n\t\tC.g_object_unref((C.gpointer)(retC))\n\t}\n\n\treturn retGo\n}", "title": "" }, { "docid": "2e760c14a0f5ef3578f706e4864512f3", "score": "0.5084138", "text": "func (v *Value) Value() interface{} {\n\treturn v.value\n}", "title": "" }, { "docid": "2e66555ff7067738ec12f815be9f5d8c", "score": "0.5081167", "text": "func (n InterfaceCommon) Value() (driver.Value, error) {\n\tif n.Err() == nil && !driver.IsValue(n.V()) {\n\t\treturn nil, ErrConvert\n\t}\n\treturn n.V(), nil\n}", "title": "" }, { "docid": "1d6ab751bb0e7e7112ca9e0e326c7c97", "score": "0.50746363", "text": "func FromValue(v reflect.Value) Reflector {\n\tr := Reflector{\n\t\tstype: v.Type(),\n\t\tvalue: v,\n\t}\n\n\tif r.stype.Kind() == reflect.Ptr {\n\t\tr.stype = r.stype.Elem()\n\t\tr.value = r.value.Elem()\n\t}\n\n\treturn r\n}", "title": "" }, { "docid": "a87d9b03cf6fff342e2a3f4da184d017", "score": "0.5063799", "text": "func getValueOfPtr(ptrToValue interface{}) (reflect.Value, error) {\n\tptr := reflect.ValueOf(ptrToValue)\n\t// Check if ptrToValue is really a pointer\n\tif ptr.Kind() != reflect.Ptr {\n\t\treturn reflect.Value{}, errors.New(\"Argument is not a pointer.\")\n\t}\n\n\t// Get the value the ptrToValue points to\n\tvalue := reflect.Indirect(ptr)\n\treturn value, nil\n}", "title": "" }, { "docid": "12ece6d078dcd3cfb8ed97b229fa87c3", "score": "0.5062682", "text": "func (ti *bitType) ConvertNomsValueToValue(v types.Value) (interface{}, error) {\n\tif val, ok := v.(types.Uint); ok {\n\t\treturn uint64(val), nil\n\t}\n\tif _, ok := v.(types.Null); ok || v == nil {\n\t\treturn nil, nil\n\t}\n\treturn nil, fmt.Errorf(`\"%v\" cannot convert NomsKind \"%v\" to a value`, ti.String(), v.Kind())\n}", "title": "" }, { "docid": "12816d336dcfe246942347d7dba801f5", "score": "0.5052192", "text": "func NewValue(value interface{}, kind ValueKind) Value {\n\toutput, err := fromGoValue(value, kind)\n\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"Cannot create a Wasm `%s` value from `%T`\", err, value))\n\t}\n\n\treturn newValue(&output)\n}", "title": "" }, { "docid": "82f257b0e642c40d84315e8a27e175a9", "score": "0.50502443", "text": "func (v *GlyphGeometry) Native() uintptr {\n\treturn uintptr(unsafe.Pointer(v.native()))\n}", "title": "" }, { "docid": "f15894c72edeb6005846a818d762ed66", "score": "0.5048753", "text": "func ValueOf(v any) Value {\n\tswitch v := v.(type) {\n\tcase Number:\n\t\treturn v\n\tcase String:\n\t\treturn v\n\tcase Bool:\n\t\treturn v\n\tcase Numbers:\n\t\treturn v\n\tcase Strings:\n\t\treturn v\n\tcase Bools:\n\t\treturn v\n\tcase Table:\n\t\treturn v\n\tcase Array:\n\t\treturn v\n\tcase int:\n\t\treturn Number(v)\n\tcase int8:\n\t\treturn Number(v)\n\tcase int16:\n\t\treturn Number(v)\n\tcase int32:\n\t\treturn Number(v)\n\tcase int64:\n\t\treturn Number(v)\n\tcase uint:\n\t\treturn Number(v)\n\tcase uint8:\n\t\treturn Number(v)\n\tcase uint16:\n\t\treturn Number(v)\n\tcase uint32:\n\t\treturn Number(v)\n\tcase uint64:\n\t\treturn Number(v)\n\tcase float32:\n\t\treturn Number(v)\n\tcase float64:\n\t\treturn Number(v)\n\tcase bool:\n\t\treturn Bool(v)\n\tcase string:\n\t\treturn String(v)\n\tcase []int:\n\t\treturn numbersOf(v)\n\tcase []int8:\n\t\treturn numbersOf(v)\n\tcase []int16:\n\t\treturn numbersOf(v)\n\tcase []int32:\n\t\treturn numbersOf(v)\n\tcase []int64:\n\t\treturn numbersOf(v)\n\tcase []uint:\n\t\treturn numbersOf(v)\n\tcase []uint8:\n\t\treturn numbersOf(v)\n\tcase []uint16:\n\t\treturn numbersOf(v)\n\tcase []uint32:\n\t\treturn numbersOf(v)\n\tcase []uint64:\n\t\treturn numbersOf(v)\n\tcase []float32:\n\t\treturn numbersOf(v)\n\tcase []float64:\n\t\treturn Numbers(v)\n\tcase []bool:\n\t\treturn Bools(v)\n\tcase []string:\n\t\treturn Strings(v)\n\tcase map[string]any:\n\t\treturn mapAsTable(v)\n\tcase []any:\n\t\treturn sliceAsArray(v)\n\tcase nil, Nil:\n\t\treturn Nil{}\n\tdefault:\n\t\tout, err := json.Marshal(v)\n\t\tif err != nil {\n\t\t\treturn Nil{}\n\t\t}\n\n\t\tvar resp any\n\t\tif err := json.Unmarshal(out, &resp); err != nil {\n\t\t\treturn Nil{}\n\t\t}\n\n\t\treturn ValueOf(resp)\n\t}\n}", "title": "" }, { "docid": "bf219527c54a724f4f2b2fa30ecd2a2a", "score": "0.50469536", "text": "func WrapValue(rv reflect.Value) *Wrapper {\n\trv = reflect.Indirect(rv)\n\tif rv.Kind() != reflect.Struct {\n\t\tpanic(\"must be provider an struct value\")\n\t}\n\n\treturn &Wrapper{rv: rv}\n}", "title": "" }, { "docid": "dce3f23bedcc04c8254876a901678c07", "score": "0.5044488", "text": "func (n MapStringInterface) Value() (driver.Value, error) {\n\tvalue, err := json.Marshal(n)\n\tif err != nil {\n\t\treturn nil, errors.WithStack(err)\n\t}\n\treturn string(value), nil\n}", "title": "" }, { "docid": "7f8ef8d1f9d35fe7964fa95614cc7cfb", "score": "0.504064", "text": "func (v *UserScript) native() *C.WebKitUserScript {\n\tif v == nil {\n\t\treturn nil\n\t}\n\treturn v.WebKitUserScript\n}", "title": "" }, { "docid": "5258ce51b706ab67abf5a05b8168bf85", "score": "0.5026592", "text": "func (mrb *MrbState) Value(o interface{}) Value {\n\tswitch v := o.(type) {\n\tcase nil:\n\t\treturn nilValue\n\tcase bool:\n\t\treturn Bool(v)\n\tcase int:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase *int:\n\t\tif v == nil {\n\t\t\treturn nilValue\n\t\t}\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(*v))}\n\tcase int32:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase int8:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase int16:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase int64:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase uint:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase uint32:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase uint64:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase uint8:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase uint16:\n\t\treturn Value{C.mrb_fixnum_value(C.mrb_int(v))}\n\tcase float32:\n\t\treturn mrb.FloatValue(float64(v))\n\tcase float64:\n\t\treturn mrb.FloatValue(v)\n\tcase string:\n\t\treturn mrb.StrNew(v)\n\tcase uintptr:\n\t\treturn mrb.CPtrValue(v)\n\tcase unsafe.Pointer:\n\t\treturn Value{C.mrb_cptr_value(mrb.p, v)}\n\tcase []byte:\n\t\treturn mrb.BytesValue(v)\n\tcase map[string]interface{}:\n\t\thash := mrb.HashNewCapa(len(v))\n\t\tfor key, val := range v {\n\t\t\thash.Set(mrb.Value(key), mrb.Value(val))\n\t\t}\n\t\treturn hash.Value()\n\tcase map[MrbSym]interface{}:\n\t\thash := mrb.HashNewCapa(len(v))\n\t\tfor key, val := range v {\n\t\t\thash.Set(mrb.SymbolValue(key), mrb.Value(val))\n\t\t}\n\t\treturn hash.Value()\n\tcase map[interface{}]interface{}:\n\t\thash := mrb.HashNewCapa(len(v))\n\t\tfor key, val := range v {\n\t\t\thash.Set(mrb.Value(key), mrb.Value(val))\n\t\t}\n\t\treturn hash.Value()\n\tcase []string:\n\t\tary := mrb.AryNewCapa(len(v))\n\t\tfor i := 0; i < len(v); i++ {\n\t\t\tary.Push(mrb.StrNew(v[i]))\n\t\t}\n\t\treturn ary.Value()\n\tcase []interface{}:\n\t\tary := mrb.AryNewCapa(len(v))\n\t\tfor i := 0; i < len(v); i++ {\n\t\t\tary.Push(mrb.Value(v[i]))\n\t\t}\n\t\treturn ary.Value()\n\tcase []int:\n\t\tary := mrb.AryNewCapa(len(v))\n\t\tfor i := 0; i < len(v); i++ {\n\t\t\tary.Push(mrb.FixnumValue(v[i]))\n\t\t}\n\t\treturn ary.Value()\n\tcase []float64:\n\t\tary := mrb.AryNewCapa(len(v))\n\t\tfor i := 0; i < len(v); i++ {\n\t\t\tary.Push(mrb.FloatValue(v[i]))\n\t\t}\n\t\treturn ary.Value()\n\tcase Value:\n\t\treturn v\n\tcase MrbFuncT:\n\t\treturn mrb.ProcNewCFunc(v).Value()\n\tcase complex64:\n\t\treturn mrb.NewInstance(\"Complex\", real(v), imag(v)).Value()\n\tcase complex128:\n\t\treturn mrb.NewInstance(\"Complex\", real(v), imag(v)).Value()\n\tcase MrbValue:\n\t\treturn v.Value()\n\tcase ValueMigrator:\n\t\treturn v.MigrateTo(mrb)\n\tdefault:\n\t\trv := reflect.ValueOf(o)\n\t\treturn mrb.valueValue(rv)\n\t}\n}", "title": "" }, { "docid": "2fec4bd7ecc712812515b9e3b7d0307d", "score": "0.5021763", "text": "func ValueInterface(v r.Value) interface{} {\n\tif !v.IsValid() || !v.CanInterface() || v == None {\n\t\treturn nil\n\t}\n\treturn v.Interface()\n}", "title": "" }, { "docid": "fcbc426b433dbe164286c9c6ef001dd0", "score": "0.5012778", "text": "func (v *Vector) Value() *Value {\n\treturn (*Value)(unsafe.Pointer(v))\n}", "title": "" }, { "docid": "a7657bd283904506e54038f08f394013", "score": "0.50067025", "text": "func (src *JSON) Value() (driver.Value, error) {\n\tswitch src.Status {\n\tcase Present:\n\t\treturn src.Bytes, nil\n\tcase Null:\n\t\treturn nil, nil\n\tdefault:\n\t\treturn nil, errUndefined\n\t}\n}", "title": "" }, { "docid": "7db0c35242224cd9b023eabc2ce2155a", "score": "0.49982625", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlv) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlv_Value {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlv_Value{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "7db0c35242224cd9b023eabc2ce2155a", "score": "0.49967095", "text": "func (n *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlv) Value() *NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlv_Value {\n\treturn &NetworkInstance_Protocol_Ospfv2_Area_Lsdb_LsaType_Lsa_OpaqueLsa_UnknownTlv_Value{\n\t\tNodePath: ygot.NewNodePath(\n\t\t\t[]string{\"state\", \"value\"},\n\t\t\tmap[string]interface{}{},\n\t\t\tn,\n\t\t),\n\t}\n}", "title": "" }, { "docid": "b81d8e2172bdb3541d2968d074e11f4f", "score": "0.4995596", "text": "func ValueNewObject(context *Context, instance uintptr, jscClass *Class) *Value {\n\tc_context := (*C.JSCContext)(C.NULL)\n\tif context != nil {\n\t\tc_context = (*C.JSCContext)(context.ToC())\n\t}\n\n\tc_instance := (C.gpointer)(instance)\n\n\tc_jsc_class := (*C.JSCClass)(C.NULL)\n\tif jscClass != nil {\n\t\tc_jsc_class = (*C.JSCClass)(jscClass.ToC())\n\t}\n\n\tretC := C.jsc_value_new_object(c_context, c_instance, c_jsc_class)\n\tretGo := ValueNewFromC(unsafe.Pointer(retC))\n\n\tif retC != nil {\n\t\tC.g_object_unref((C.gpointer)(retC))\n\t}\n\n\treturn retGo\n}", "title": "" }, { "docid": "9243d9bfb905e11053891cc986aaabb5", "score": "0.4992749", "text": "func (v Value) pointer() unsafe.Pointer {\n\tif v.typ.size != goarch.PtrSize || !v.typ.pointers() {\n\t\tpanic(\"can't call pointer on a non-pointer Value\")\n\t}\n\tif v.flag&flagIndir != 0 {\n\t\treturn *(*unsafe.Pointer)(v.ptr)\n\t}\n\treturn v.ptr\n}", "title": "" } ]
0ef44c503617f1cae95e3de3a854365e
Values returns all known values for H265SpatialAdaptiveQuantization. Note that this can be expanded in the future, and so it is only as up to date as the client. The ordering of this slice is not guaranteed to be stable across updates.
[ { "docid": "589b81fb689d9e92c8cc46c0b566b759", "score": "0.79505223", "text": "func (H265SpatialAdaptiveQuantization) Values() []H265SpatialAdaptiveQuantization {\n\treturn []H265SpatialAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" } ]
[ { "docid": "77235113a9abe6e837271dec2f4049b9", "score": "0.7787381", "text": "func (H264SpatialAdaptiveQuantization) Values() []H264SpatialAdaptiveQuantization {\n\treturn []H264SpatialAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "ed41b520a9868a27c3568d711f6eba60", "score": "0.7710216", "text": "func (Mpeg2SpatialAdaptiveQuantization) Values() []Mpeg2SpatialAdaptiveQuantization {\n\treturn []Mpeg2SpatialAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "54c294d2e22feb957e68ae3397eb04be", "score": "0.7696452", "text": "func (XavcSpatialAdaptiveQuantization) Values() []XavcSpatialAdaptiveQuantization {\n\treturn []XavcSpatialAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "7ed2b5a7a602139bf4d5142d2a7c8361", "score": "0.7509513", "text": "func (Av1SpatialAdaptiveQuantization) Values() []Av1SpatialAdaptiveQuantization {\n\treturn []Av1SpatialAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "7b1e7e845402935da1bfd44ccd87eb8a", "score": "0.7419677", "text": "func (H265AdaptiveQuantization) Values() []H265AdaptiveQuantization {\n\treturn []H265AdaptiveQuantization{\n\t\t\"OFF\",\n\t\t\"LOW\",\n\t\t\"MEDIUM\",\n\t\t\"HIGH\",\n\t\t\"HIGHER\",\n\t\t\"MAX\",\n\t\t\"AUTO\",\n\t}\n}", "title": "" }, { "docid": "2fdeb4a0fb448edeab1b1a47847c66c3", "score": "0.73293346", "text": "func (H264AdaptiveQuantization) Values() []H264AdaptiveQuantization {\n\treturn []H264AdaptiveQuantization{\n\t\t\"OFF\",\n\t\t\"AUTO\",\n\t\t\"LOW\",\n\t\t\"MEDIUM\",\n\t\t\"HIGH\",\n\t\t\"HIGHER\",\n\t\t\"MAX\",\n\t}\n}", "title": "" }, { "docid": "fa31f66a27cac0b4f498b8b45aa4f727", "score": "0.7292676", "text": "func (XavcAdaptiveQuantization) Values() []XavcAdaptiveQuantization {\n\treturn []XavcAdaptiveQuantization{\n\t\t\"OFF\",\n\t\t\"AUTO\",\n\t\t\"LOW\",\n\t\t\"MEDIUM\",\n\t\t\"HIGH\",\n\t\t\"HIGHER\",\n\t\t\"MAX\",\n\t}\n}", "title": "" }, { "docid": "c1a4ac65d39f7bd87493b131728b67e7", "score": "0.7239072", "text": "func (H265TemporalAdaptiveQuantization) Values() []H265TemporalAdaptiveQuantization {\n\treturn []H265TemporalAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "0b19965442e8ecd5f9acf9bc6f0a81f3", "score": "0.7198025", "text": "func (Mpeg2AdaptiveQuantization) Values() []Mpeg2AdaptiveQuantization {\n\treturn []Mpeg2AdaptiveQuantization{\n\t\t\"OFF\",\n\t\t\"LOW\",\n\t\t\"MEDIUM\",\n\t\t\"HIGH\",\n\t}\n}", "title": "" }, { "docid": "a9574e73bea14cc6c81078ab61b06496", "score": "0.7121925", "text": "func (H264TemporalAdaptiveQuantization) Values() []H264TemporalAdaptiveQuantization {\n\treturn []H264TemporalAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "4b9fc8735aec46daa32d859683c20f66", "score": "0.70776725", "text": "func (XavcTemporalAdaptiveQuantization) Values() []XavcTemporalAdaptiveQuantization {\n\treturn []XavcTemporalAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "72cf4a219d9098f8f637ff6f87d11d33", "score": "0.70181465", "text": "func (H265FlickerAdaptiveQuantization) Values() []H265FlickerAdaptiveQuantization {\n\treturn []H265FlickerAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "6bedc9dd759e2cbf3a40e8ef46e10c13", "score": "0.700378", "text": "func (Mpeg2TemporalAdaptiveQuantization) Values() []Mpeg2TemporalAdaptiveQuantization {\n\treturn []Mpeg2TemporalAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "c073ce9c3c07d7072e4cd34d35b5870e", "score": "0.6997093", "text": "func (Av1AdaptiveQuantization) Values() []Av1AdaptiveQuantization {\n\treturn []Av1AdaptiveQuantization{\n\t\t\"OFF\",\n\t\t\"LOW\",\n\t\t\"MEDIUM\",\n\t\t\"HIGH\",\n\t\t\"HIGHER\",\n\t\t\"MAX\",\n\t}\n}", "title": "" }, { "docid": "ca56ae5537d0b93b1ca042d11cd42857", "score": "0.6977432", "text": "func (H264FlickerAdaptiveQuantization) Values() []H264FlickerAdaptiveQuantization {\n\treturn []H264FlickerAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "c7d0255fce7a68fe994c83186800aa99", "score": "0.6953929", "text": "func (XavcFlickerAdaptiveQuantization) Values() []XavcFlickerAdaptiveQuantization {\n\treturn []XavcFlickerAdaptiveQuantization{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "d38e12b9e77ee4612f55c12eceff4e59", "score": "0.62992734", "text": "func (H265QualityTuningLevel) Values() []H265QualityTuningLevel {\n\treturn []H265QualityTuningLevel{\n\t\t\"SINGLE_PASS\",\n\t\t\"SINGLE_PASS_HQ\",\n\t\t\"MULTI_PASS_HQ\",\n\t}\n}", "title": "" }, { "docid": "e129c95638fe9d6e6a8a2bcc8147bdd6", "score": "0.6235294", "text": "func (AvcIntraUhdQualityTuningLevel) Values() []AvcIntraUhdQualityTuningLevel {\n\treturn []AvcIntraUhdQualityTuningLevel{\n\t\t\"SINGLE_PASS\",\n\t\t\"MULTI_PASS\",\n\t}\n}", "title": "" }, { "docid": "cf9f2bc0ff33529a48f9753b0611a38f", "score": "0.62321264", "text": "func (Eac3AtmosDynamicRangeCompressionRf) Values() []Eac3AtmosDynamicRangeCompressionRf {\n\treturn []Eac3AtmosDynamicRangeCompressionRf{\n\t\t\"NONE\",\n\t\t\"FILM_STANDARD\",\n\t\t\"FILM_LIGHT\",\n\t\t\"MUSIC_STANDARD\",\n\t\t\"MUSIC_LIGHT\",\n\t\t\"SPEECH\",\n\t}\n}", "title": "" }, { "docid": "e42c2ad29b14335249f44ad4370d94f3", "score": "0.61754835", "text": "func (XavcHdProfileQualityTuningLevel) Values() []XavcHdProfileQualityTuningLevel {\n\treturn []XavcHdProfileQualityTuningLevel{\n\t\t\"SINGLE_PASS\",\n\t\t\"SINGLE_PASS_HQ\",\n\t\t\"MULTI_PASS_HQ\",\n\t}\n}", "title": "" }, { "docid": "950cacce8dff8a68c1a2ab3e80956ba7", "score": "0.61751896", "text": "func (H264QualityTuningLevel) Values() []H264QualityTuningLevel {\n\treturn []H264QualityTuningLevel{\n\t\t\"SINGLE_PASS\",\n\t\t\"SINGLE_PASS_HQ\",\n\t\t\"MULTI_PASS_HQ\",\n\t}\n}", "title": "" }, { "docid": "f96c868029d0f07025452fabbaf3ae2b", "score": "0.6157872", "text": "func (AacVbrQuality) Values() []AacVbrQuality {\n\treturn []AacVbrQuality{\n\t\t\"LOW\",\n\t\t\"MEDIUM_LOW\",\n\t\t\"MEDIUM_HIGH\",\n\t\t\"HIGH\",\n\t}\n}", "title": "" }, { "docid": "26d84f5802f93d074a4a68688cba0076", "score": "0.6156186", "text": "func (Xavc4kProfileQualityTuningLevel) Values() []Xavc4kProfileQualityTuningLevel {\n\treturn []Xavc4kProfileQualityTuningLevel{\n\t\t\"SINGLE_PASS\",\n\t\t\"SINGLE_PASS_HQ\",\n\t\t\"MULTI_PASS_HQ\",\n\t}\n}", "title": "" }, { "docid": "9c05220706ede754437b8af39835538f", "score": "0.6141928", "text": "func (m *UserExperienceAnalyticsInsight) GetValues()([]UserExperienceAnalyticsInsightValueable) {\n val, err := m.GetBackingStore().Get(\"values\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.([]UserExperienceAnalyticsInsightValueable)\n }\n return nil\n}", "title": "" }, { "docid": "80ad2cf52e464d872837048b35fe1de8", "score": "0.61060846", "text": "func (Ac3DynamicRangeCompressionRf) Values() []Ac3DynamicRangeCompressionRf {\n\treturn []Ac3DynamicRangeCompressionRf{\n\t\t\"FILM_STANDARD\",\n\t\t\"FILM_LIGHT\",\n\t\t\"MUSIC_STANDARD\",\n\t\t\"MUSIC_LIGHT\",\n\t\t\"SPEECH\",\n\t\t\"NONE\",\n\t}\n}", "title": "" }, { "docid": "b7bd20692fb7b27ef632006a8b0f62e2", "score": "0.6058084", "text": "func (Eac3DynamicRangeCompressionRf) Values() []Eac3DynamicRangeCompressionRf {\n\treturn []Eac3DynamicRangeCompressionRf{\n\t\t\"NONE\",\n\t\t\"FILM_STANDARD\",\n\t\t\"FILM_LIGHT\",\n\t\t\"MUSIC_STANDARD\",\n\t\t\"MUSIC_LIGHT\",\n\t\t\"SPEECH\",\n\t}\n}", "title": "" }, { "docid": "1042d4f79284638cd4e342c62e2ea31d", "score": "0.6027153", "text": "func (AvcIntraFramerateConversionAlgorithm) Values() []AvcIntraFramerateConversionAlgorithm {\n\treturn []AvcIntraFramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "cb21723bed3703b7e7d05a781315f715", "score": "0.6010096", "text": "func (Vp8QualityTuningLevel) Values() []Vp8QualityTuningLevel {\n\treturn []Vp8QualityTuningLevel{\n\t\t\"MULTI_PASS\",\n\t\t\"MULTI_PASS_HQ\",\n\t}\n}", "title": "" }, { "docid": "1910effdc9c23982e57f14861641b2a8", "score": "0.60057926", "text": "func (c *Channel) Values() math32.ArrayF32 {\r\n\r\n\treturn c.values\r\n}", "title": "" }, { "docid": "b1912772bff43fb223f5d53fd62eff30", "score": "0.5994605", "text": "func (H265FramerateConversionAlgorithm) Values() []H265FramerateConversionAlgorithm {\n\treturn []H265FramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "c56b9b4d13a867922baf775395b19633", "score": "0.5982772", "text": "func (Vp9QualityTuningLevel) Values() []Vp9QualityTuningLevel {\n\treturn []Vp9QualityTuningLevel{\n\t\t\"MULTI_PASS\",\n\t\t\"MULTI_PASS_HQ\",\n\t}\n}", "title": "" }, { "docid": "7d6af5ea675995fbec9e25846cb4c92e", "score": "0.59494174", "text": "func (s *ExpDecaySample) Values() []int64 {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\tvals := s.values.Values()\n\tvalues := make([]int64, len(vals))\n\tfor i, v := range vals {\n\t\tvalues[i] = v.v\n\t}\n\treturn values\n}", "title": "" }, { "docid": "f7359b118ca7937319a018e36bf3146a", "score": "0.5943364", "text": "func (df *DiscreteFeature) AvailableValues() []string {\n\treturn df.availableValues\n}", "title": "" }, { "docid": "e8cea777da3e6cabba80c48e9177bd27", "score": "0.5932362", "text": "func (Vc3FramerateConversionAlgorithm) Values() []Vc3FramerateConversionAlgorithm {\n\treturn []Vc3FramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "115a907d431e8d0dd1c85e09414b385c", "score": "0.59093595", "text": "func (ProresFramerateConversionAlgorithm) Values() []ProresFramerateConversionAlgorithm {\n\treturn []ProresFramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "5d4b4a9df06b08fcca79ee903f52f684", "score": "0.5902697", "text": "func (NielsenUniqueTicPerAudioTrackType) Values() []NielsenUniqueTicPerAudioTrackType {\n\treturn []NielsenUniqueTicPerAudioTrackType{\n\t\t\"RESERVE_UNIQUE_TICS_PER_TRACK\",\n\t\t\"SAME_TICS_PER_TRACK\",\n\t}\n}", "title": "" }, { "docid": "0e3145839d6e06439bc5e22caf5a5475", "score": "0.5892354", "text": "func (H265SampleAdaptiveOffsetFilterMode) Values() []H265SampleAdaptiveOffsetFilterMode {\n\treturn []H265SampleAdaptiveOffsetFilterMode{\n\t\t\"DEFAULT\",\n\t\t\"ADAPTIVE\",\n\t\t\"OFF\",\n\t}\n}", "title": "" }, { "docid": "9f89a08d3e6dcdced168dad0000de827", "score": "0.58845663", "text": "func (Vp9FramerateConversionAlgorithm) Values() []Vp9FramerateConversionAlgorithm {\n\treturn []Vp9FramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "2d8f91315118c38f5ad3b95ebd2a6a8d", "score": "0.58696103", "text": "func (Mp4FreeSpaceBox) Values() []Mp4FreeSpaceBox {\n\treturn []Mp4FreeSpaceBox{\n\t\t\"INCLUDE\",\n\t\t\"EXCLUDE\",\n\t}\n}", "title": "" }, { "docid": "6cdf47a6fc92619fe1730c8b3b7c804e", "score": "0.585125", "text": "func (H265DynamicSubGop) Values() []H265DynamicSubGop {\n\treturn []H265DynamicSubGop{\n\t\t\"ADAPTIVE\",\n\t\t\"STATIC\",\n\t}\n}", "title": "" }, { "docid": "45ee2f4abcd1168ce04230fbcf248152", "score": "0.5838271", "text": "func (Vp8FramerateConversionAlgorithm) Values() []Vp8FramerateConversionAlgorithm {\n\treturn []Vp8FramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "0af0df7805a48e43e496df2c09460b17", "score": "0.58358103", "text": "func (XavcFramerateConversionAlgorithm) Values() []XavcFramerateConversionAlgorithm {\n\treturn []XavcFramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "08e42a2c0f57a28f8fb4b927c8034eaf", "score": "0.5822999", "text": "func (PredefinedResolution) Values() []PredefinedResolution {\n\treturn []PredefinedResolution{\n\t\t\"HIGHEST\",\n\t\t\"LOWEST\",\n\t\t\"AVERAGE\",\n\t}\n}", "title": "" }, { "docid": "f372a4294f8e54994bdb2953e5a1e4ee", "score": "0.5805393", "text": "func (AlgorithmNameGeoMosaic) Values() []AlgorithmNameGeoMosaic {\n\treturn []AlgorithmNameGeoMosaic{\n\t\t\"NEAR\",\n\t\t\"BILINEAR\",\n\t\t\"CUBIC\",\n\t\t\"CUBICSPLINE\",\n\t\t\"LANCZOS\",\n\t\t\"AVERAGE\",\n\t\t\"RMS\",\n\t\t\"MODE\",\n\t\t\"MAX\",\n\t\t\"MIN\",\n\t\t\"MED\",\n\t\t\"Q1\",\n\t\t\"Q3\",\n\t\t\"SUM\",\n\t}\n}", "title": "" }, { "docid": "051bf11deb6ab46b88695aea5d053edc", "score": "0.58018154", "text": "func (Eac3AtmosMeteringMode) Values() []Eac3AtmosMeteringMode {\n\treturn []Eac3AtmosMeteringMode{\n\t\t\"LEQ_A\",\n\t\t\"ITU_BS_1770_1\",\n\t\t\"ITU_BS_1770_2\",\n\t\t\"ITU_BS_1770_3\",\n\t\t\"ITU_BS_1770_4\",\n\t}\n}", "title": "" }, { "docid": "8c7cc88b39be3ef0f7bd8d94402ef86c", "score": "0.57537866", "text": "func (H265GopSizeUnits) Values() []H265GopSizeUnits {\n\treturn []H265GopSizeUnits{\n\t\t\"FRAMES\",\n\t\t\"SECONDS\",\n\t\t\"AUTO\",\n\t}\n}", "title": "" }, { "docid": "2a8170f2138333fe865679b7a383d293", "score": "0.5747546", "text": "func (set *Set) Values() []interface{} {\n\tvalues := make([]interface{}, set.Size())\n\tcount := 0\n\tfor item, _ := range set.items {\n\t\tvalues[count] = item\n\t\tcount += 1\n\t}\n\treturn values\n}", "title": "" }, { "docid": "2a51dded8d59db6332426a1a85ad4da2", "score": "0.5738759", "text": "func (AudioNormalizationAlgorithm) Values() []AudioNormalizationAlgorithm {\n\treturn []AudioNormalizationAlgorithm{\n\t\t\"ITU_BS_1770_1\",\n\t\t\"ITU_BS_1770_2\",\n\t\t\"ITU_BS_1770_3\",\n\t\t\"ITU_BS_1770_4\",\n\t}\n}", "title": "" }, { "docid": "a70b2ffa7f346f9698a6fed1cf0162bb", "score": "0.5734261", "text": "func (M2tsEsRateInPes) Values() []M2tsEsRateInPes {\n\treturn []M2tsEsRateInPes{\n\t\t\"INCLUDE\",\n\t\t\"EXCLUDE\",\n\t}\n}", "title": "" }, { "docid": "fcd5ff8955d54842823ad28a3d023b3a", "score": "0.5727784", "text": "func (H264FramerateConversionAlgorithm) Values() []H264FramerateConversionAlgorithm {\n\treturn []H264FramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "ea63e999d06070b75c4489d9b3dbd50e", "score": "0.5725705", "text": "func (Ac3DynamicRangeCompressionProfile) Values() []Ac3DynamicRangeCompressionProfile {\n\treturn []Ac3DynamicRangeCompressionProfile{\n\t\t\"FILM_STANDARD\",\n\t\t\"NONE\",\n\t}\n}", "title": "" }, { "docid": "8570b895d07a6d743464b5f4f22d8c10", "score": "0.57239807", "text": "func (DashIsoPtsOffsetHandlingForBFrames) Values() []DashIsoPtsOffsetHandlingForBFrames {\n\treturn []DashIsoPtsOffsetHandlingForBFrames{\n\t\t\"ZERO_BASED\",\n\t\t\"MATCH_INITIAL_PTS\",\n\t}\n}", "title": "" }, { "docid": "f3145dca7a414d575791c0f57756cda2", "score": "0.57204807", "text": "func (PresetSpeke20Video) Values() []PresetSpeke20Video {\n\treturn []PresetSpeke20Video{\n\t\t\"PRESET-VIDEO-1\",\n\t\t\"PRESET-VIDEO-2\",\n\t\t\"PRESET-VIDEO-3\",\n\t\t\"PRESET-VIDEO-4\",\n\t\t\"PRESET-VIDEO-5\",\n\t\t\"PRESET-VIDEO-6\",\n\t\t\"PRESET-VIDEO-7\",\n\t\t\"PRESET-VIDEO-8\",\n\t\t\"SHARED\",\n\t\t\"UNENCRYPTED\",\n\t}\n}", "title": "" }, { "docid": "7e6f642ce62432685685107717577adf", "score": "0.57200295", "text": "func (q *CircularFifoQueue) Values() []interface{} {\n\tq.RLock()\n\tdefer q.RUnlock()\n\tb := make([]interface{}, q.capacity())\n\tcopy(b, q.buffer)\n\treturn b\n}", "title": "" }, { "docid": "818dcf036ee8823986ddd8e271877e23", "score": "0.5709225", "text": "func (BandwidthReductionFilterSharpening) Values() []BandwidthReductionFilterSharpening {\n\treturn []BandwidthReductionFilterSharpening{\n\t\t\"LOW\",\n\t\t\"MEDIUM\",\n\t\t\"HIGH\",\n\t\t\"OFF\",\n\t}\n}", "title": "" }, { "docid": "461f6cefb8d1819f9c3734be12909165", "score": "0.5701789", "text": "func (AlgorithmNameResampling) Values() []AlgorithmNameResampling {\n\treturn []AlgorithmNameResampling{\n\t\t\"NEAR\",\n\t\t\"BILINEAR\",\n\t\t\"CUBIC\",\n\t\t\"CUBICSPLINE\",\n\t\t\"LANCZOS\",\n\t\t\"AVERAGE\",\n\t\t\"RMS\",\n\t\t\"MODE\",\n\t\t\"MAX\",\n\t\t\"MIN\",\n\t\t\"MED\",\n\t\t\"Q1\",\n\t\t\"Q3\",\n\t\t\"SUM\",\n\t}\n}", "title": "" }, { "docid": "a4913ba37484933ceabe76f565e6d9f1", "score": "0.57013196", "text": "func (Eac3AtmosDynamicRangeCompressionLine) Values() []Eac3AtmosDynamicRangeCompressionLine {\n\treturn []Eac3AtmosDynamicRangeCompressionLine{\n\t\t\"NONE\",\n\t\t\"FILM_STANDARD\",\n\t\t\"FILM_LIGHT\",\n\t\t\"MUSIC_STANDARD\",\n\t\t\"MUSIC_LIGHT\",\n\t\t\"SPEECH\",\n\t}\n}", "title": "" }, { "docid": "4fa87da99fd2353a73733f8cec128c32", "score": "0.5695063", "text": "func (CmafPtsOffsetHandlingForBFrames) Values() []CmafPtsOffsetHandlingForBFrames {\n\treturn []CmafPtsOffsetHandlingForBFrames{\n\t\t\"ZERO_BASED\",\n\t\t\"MATCH_INITIAL_PTS\",\n\t}\n}", "title": "" }, { "docid": "d8576387c864d3001180a82f47d59a0c", "score": "0.56894135", "text": "func (m *Map) Values() []interface{} {\n\tresult := make([]interface{}, 0)\n\tfor b := 0; b < m.concurrency; b++ {\n\t\tm.mutices[b].RLock()\n\t\tinternalResult := make([]interface{}, len(m.ducklings[b]))\n\t\ti := 0\n\t\tfor _, v := range m.ducklings[b] {\n\t\t\tinternalResult[i] = v\n\t\t\ti++\n\t\t}\n\t\tm.mutices[b].RUnlock()\n\t\tresult = append(result, internalResult...)\n\t}\n\treturn result\n}", "title": "" }, { "docid": "d9bce25c6caee735a0242b2831796c30", "score": "0.5668448", "text": "func (M2tsEbpAudioInterval) Values() []M2tsEbpAudioInterval {\n\treturn []M2tsEbpAudioInterval{\n\t\t\"VIDEO_AND_FIXED_INTERVALS\",\n\t\t\"VIDEO_INTERVAL\",\n\t}\n}", "title": "" }, { "docid": "fd692dcc35c6ae321eeefd8a6ff6564f", "score": "0.56614083", "text": "func (TranscodePreset) Values() []TranscodePreset {\n\treturn []TranscodePreset{\n\t\t\"HIGHER_BANDWIDTH_DELIVERY\",\n\t\t\"CONSTRAINED_BANDWIDTH_DELIVERY\",\n\t}\n}", "title": "" }, { "docid": "a6da55af1df0447dfd6de103f4c625fa", "score": "0.56594414", "text": "func (Ac3DynamicRangeCompressionLine) Values() []Ac3DynamicRangeCompressionLine {\n\treturn []Ac3DynamicRangeCompressionLine{\n\t\t\"FILM_STANDARD\",\n\t\t\"FILM_LIGHT\",\n\t\t\"MUSIC_STANDARD\",\n\t\t\"MUSIC_LIGHT\",\n\t\t\"SPEECH\",\n\t\t\"NONE\",\n\t}\n}", "title": "" }, { "docid": "6c9ad62313b6c57bf863d21e12e1d058", "score": "0.5653987", "text": "func (PresetSpeke20Audio) Values() []PresetSpeke20Audio {\n\treturn []PresetSpeke20Audio{\n\t\t\"PRESET-AUDIO-1\",\n\t\t\"PRESET-AUDIO-2\",\n\t\t\"PRESET-AUDIO-3\",\n\t\t\"SHARED\",\n\t\t\"UNENCRYPTED\",\n\t}\n}", "title": "" }, { "docid": "b9fe3a7de77099e5c02972413f3033c8", "score": "0.56501925", "text": "func (RenditionConfigurationRendition) Values() []RenditionConfigurationRendition {\n\treturn []RenditionConfigurationRendition{\n\t\t\"FULL_HD\",\n\t\t\"HD\",\n\t\t\"SD\",\n\t\t\"LOWEST_RESOLUTION\",\n\t}\n}", "title": "" }, { "docid": "eec4b435b6ac0b3a5952a992399a104d", "score": "0.56475496", "text": "func (ProresChromaSampling) Values() []ProresChromaSampling {\n\treturn []ProresChromaSampling{\n\t\t\"PRESERVE_444_SAMPLING\",\n\t\t\"SUBSAMPLE_TO_422\",\n\t}\n}", "title": "" }, { "docid": "2e7004a4f4f4db2504ca88224ad93f98", "score": "0.564418", "text": "func (H265TemporalIds) Values() []H265TemporalIds {\n\treturn []H265TemporalIds{\n\t\t\"DISABLED\",\n\t\t\"ENABLED\",\n\t}\n}", "title": "" }, { "docid": "e99bce7d2b4d2188894c1bd6d3f9c4d4", "score": "0.56374", "text": "func (OrganizationFeatureSet) Values() []OrganizationFeatureSet {\n\treturn []OrganizationFeatureSet{\n\t\t\"ALL\",\n\t\t\"CONSOLIDATED_BILLING\",\n\t}\n}", "title": "" }, { "docid": "7981789cad6eb9db4fcfcbb64069d955", "score": "0.56364226", "text": "func (Mpeg2FramerateConversionAlgorithm) Values() []Mpeg2FramerateConversionAlgorithm {\n\treturn []Mpeg2FramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "2b8d8f8d0fcea4ebaf8fdbf590172f07", "score": "0.5636283", "text": "func (Av1FramerateConversionAlgorithm) Values() []Av1FramerateConversionAlgorithm {\n\treturn []Av1FramerateConversionAlgorithm{\n\t\t\"DUPLICATE_DROP\",\n\t\t\"INTERPOLATE\",\n\t\t\"FRAMEFORMER\",\n\t}\n}", "title": "" }, { "docid": "6db55a6df7fe0948c701e3d60d192f80", "score": "0.5635796", "text": "func (a *Assumption) GetValues() []*Value {\n if a.Values != nil {\n return a.Values\n }\n\n return nil\n}", "title": "" }, { "docid": "a04e86d32c7338ea209f55a04050e9db", "score": "0.56354994", "text": "func (TemporalStatistics) Values() []TemporalStatistics {\n\treturn []TemporalStatistics{\n\t\t\"MEAN\",\n\t\t\"MEDIAN\",\n\t\t\"STANDARD_DEVIATION\",\n\t}\n}", "title": "" }, { "docid": "651c2b71f7c8c6941cb5937ecd4ee840", "score": "0.56249416", "text": "func (m *Map[K, V]) Values() []V {\n\tret := []V{}\n\tfor i := 0; i < len(m.shards); i++ {\n\t\tret = append(ret, m.shards[i].Values()...)\n\t}\n\treturn ret\n}", "title": "" }, { "docid": "ab697571522c17d3fa62bb508c5613b3", "score": "0.5623605", "text": "func (H264DynamicSubGop) Values() []H264DynamicSubGop {\n\treturn []H264DynamicSubGop{\n\t\t\"ADAPTIVE\",\n\t\t\"STATIC\",\n\t}\n}", "title": "" }, { "docid": "6b8265f17c0a4ac49ef38edb58626fe7", "score": "0.56170315", "text": "func (H265InterlaceMode) Values() []H265InterlaceMode {\n\treturn []H265InterlaceMode{\n\t\t\"PROGRESSIVE\",\n\t\t\"TOP_FIELD\",\n\t\t\"BOTTOM_FIELD\",\n\t\t\"FOLLOW_TOP_FIELD\",\n\t\t\"FOLLOW_BOTTOM_FIELD\",\n\t}\n}", "title": "" }, { "docid": "67c2a51d1e58ade3f7dbc2a477e439a7", "score": "0.561695", "text": "func (Mpeg2QualityTuningLevel) Values() []Mpeg2QualityTuningLevel {\n\treturn []Mpeg2QualityTuningLevel{\n\t\t\"SINGLE_PASS\",\n\t\t\"MULTI_PASS\",\n\t}\n}", "title": "" }, { "docid": "e32a3115ef580273916b1b127dd91d56", "score": "0.5615934", "text": "func (H265WriteMp4PackagingType) Values() []H265WriteMp4PackagingType {\n\treturn []H265WriteMp4PackagingType{\n\t\t\"HVC1\",\n\t\t\"HEV1\",\n\t}\n}", "title": "" }, { "docid": "cdbd4de9d70c662a270015a9aac0c364", "score": "0.5615082", "text": "func (m *SampledMetric) Values(n int) []float64 {\n\t_, values := m.values(n)\n\treturn values\n}", "title": "" }, { "docid": "014d20e7cc15948772541a7567484f2c", "score": "0.56128424", "text": "func (m *SecretMap) Values() []SecretTime {\n\tm.lock.RLock()\n\tvalues := make([]SecretTime, len(m.m))\n\ti := 0\n\tfor _, value := range m.m {\n\t\tvalues[i] = value\n\t\ti++\n\t}\n\tm.lock.RUnlock()\n\treturn values\n}", "title": "" }, { "docid": "ecbf2e0db9e9bdb4ca814159daff8a8b", "score": "0.5600838", "text": "func (Mpeg2IntraDcPrecision) Values() []Mpeg2IntraDcPrecision {\n\treturn []Mpeg2IntraDcPrecision{\n\t\t\"AUTO\",\n\t\t\"INTRA_DC_PRECISION_8\",\n\t\t\"INTRA_DC_PRECISION_9\",\n\t\t\"INTRA_DC_PRECISION_10\",\n\t\t\"INTRA_DC_PRECISION_11\",\n\t}\n}", "title": "" }, { "docid": "64fbdd4e56a2c11018b15f51c9c7730e", "score": "0.5579953", "text": "func (s *UniformSample) Values() []int64 {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\tvalues := make([]int64, len(s.values))\n\tcopy(values, s.values)\n\treturn values\n}", "title": "" }, { "docid": "9dc57fc703ff886931137a9617364724", "score": "0.5562705", "text": "func (ThumbnailConfigurationResolution) Values() []ThumbnailConfigurationResolution {\n\treturn []ThumbnailConfigurationResolution{\n\t\t\"FULL_HD\",\n\t\t\"HD\",\n\t\t\"SD\",\n\t\t\"LOWEST_RESOLUTION\",\n\t}\n}", "title": "" }, { "docid": "aaeb01d38c74f5747128f6afd5f50788", "score": "0.5557652", "text": "func (is IntSet) Values() []uint64 {\n\treturn is.ba.ToNums()\n}", "title": "" }, { "docid": "16f78ca6e704811bb20cc461016bb7a1", "score": "0.55561316", "text": "func (page QuotaLimitsPage) Values() []CurrentQuotaLimitBase {\n\tif page.ql.IsEmpty() {\n\t\treturn nil\n\t}\n\treturn *page.ql.Value\n}", "title": "" }, { "docid": "040ddc27dfa8ae1cb694e4d52953ad30", "score": "0.5554576", "text": "func (Eac3DynamicRangeCompressionLine) Values() []Eac3DynamicRangeCompressionLine {\n\treturn []Eac3DynamicRangeCompressionLine{\n\t\t\"NONE\",\n\t\t\"FILM_STANDARD\",\n\t\t\"FILM_LIGHT\",\n\t\t\"MUSIC_STANDARD\",\n\t\t\"MUSIC_LIGHT\",\n\t\t\"SPEECH\",\n\t}\n}", "title": "" }, { "docid": "232bf82090e45f4b184b714e06642289", "score": "0.55486864", "text": "func (H265Telecine) Values() []H265Telecine {\n\treturn []H265Telecine{\n\t\t\"NONE\",\n\t\t\"SOFT\",\n\t\t\"HARD\",\n\t}\n}", "title": "" }, { "docid": "e95ae6ecd890a1810732a070cdee1f48", "score": "0.5544665", "text": "func (e *Eigen) Values(dst []complex128) []complex128 {\n\tif !e.succFact() {\n\t\tpanic(badFact)\n\t}\n\tif dst == nil {\n\t\tdst = make([]complex128, e.n)\n\t}\n\tif len(dst) != e.n {\n\t\tpanic(ErrSliceLengthMismatch)\n\t}\n\tcopy(dst, e.values)\n\treturn dst\n}", "title": "" }, { "docid": "7d9b7a053d4e6df2a2fdd2c6db32c3b1", "score": "0.55436975", "text": "func (Eac3AtmosBitstreamMode) Values() []Eac3AtmosBitstreamMode {\n\treturn []Eac3AtmosBitstreamMode{\n\t\t\"COMPLETE_MAIN\",\n\t}\n}", "title": "" }, { "docid": "6b88a0ed65888dea92a5ba7444603cb1", "score": "0.5543331", "text": "func (H265RateControlMode) Values() []H265RateControlMode {\n\treturn []H265RateControlMode{\n\t\t\"VBR\",\n\t\t\"CBR\",\n\t\t\"QVBR\",\n\t}\n}", "title": "" }, { "docid": "0315a741ae59f59334b349762e061529", "score": "0.5536674", "text": "func (H264InterlaceMode) Values() []H264InterlaceMode {\n\treturn []H264InterlaceMode{\n\t\t\"PROGRESSIVE\",\n\t\t\"TOP_FIELD\",\n\t\t\"BOTTOM_FIELD\",\n\t\t\"FOLLOW_TOP_FIELD\",\n\t\t\"FOLLOW_BOTTOM_FIELD\",\n\t}\n}", "title": "" }, { "docid": "32c8df6cd198a3e103a585877a8802e8", "score": "0.55353934", "text": "func QualityFilter_Values() []string {\n\treturn []string{\n\t\tQualityFilterNone,\n\t\tQualityFilterAuto,\n\t\tQualityFilterLow,\n\t\tQualityFilterMedium,\n\t\tQualityFilterHigh,\n\t}\n}", "title": "" }, { "docid": "377676ecc968365f1e0f39f27c8c0cf5", "score": "0.552334", "text": "func (s *Series) values() []interface{} {\n\tif s.which == typeInt {\n\t\tresult := make([]interface{}, len(s.valInts))\n\t\tfor i, elem := range s.valInts {\n\t\t\tresult[i] = elem\n\t\t}\n\t\treturn result\n\t} else if s.which == typeFloat {\n\t\tresult := make([]interface{}, len(s.valFloats))\n\t\tfor i, elem := range s.valFloats {\n\t\t\tresult[i] = elem\n\t\t}\n\t\treturn result\n\t}\n\tresult := make([]interface{}, len(s.valObjs))\n\tfor i, elem := range s.valObjs {\n\t\tresult[i] = elem\n\t}\n\treturn result\n}", "title": "" }, { "docid": "e4d6c734e0ae90dbae41ae6c15c4d3f2", "score": "0.55229384", "text": "func (NoiseFilterPostTemporalSharpeningStrength) Values() []NoiseFilterPostTemporalSharpeningStrength {\n\treturn []NoiseFilterPostTemporalSharpeningStrength{\n\t\t\"LOW\",\n\t\t\"MEDIUM\",\n\t\t\"HIGH\",\n\t}\n}", "title": "" }, { "docid": "82fe2e3d07ebee3930997a8eecab0a77", "score": "0.5520971", "text": "func (v *Values) All() []interface{} {\n\tvar i []interface{}\n\tfor _, value := range v.values {\n\t\ti = append(i, value)\n\t}\n\treturn i\n}", "title": "" }, { "docid": "de8d66237703bcba5fe6aa1195a289c0", "score": "0.55164814", "text": "func (H265CodecProfile) Values() []H265CodecProfile {\n\treturn []H265CodecProfile{\n\t\t\"MAIN_MAIN\",\n\t\t\"MAIN_HIGH\",\n\t\t\"MAIN10_MAIN\",\n\t\t\"MAIN10_HIGH\",\n\t\t\"MAIN_422_8BIT_MAIN\",\n\t\t\"MAIN_422_8BIT_HIGH\",\n\t\t\"MAIN_422_10BIT_MAIN\",\n\t\t\"MAIN_422_10BIT_HIGH\",\n\t}\n}", "title": "" }, { "docid": "d283a6d770fa1190d805e145d99f640f", "score": "0.5507858", "text": "func (CmafInitializationVectorInManifest) Values() []CmafInitializationVectorInManifest {\n\treturn []CmafInitializationVectorInManifest{\n\t\t\"INCLUDE\",\n\t\t\"EXCLUDE\",\n\t}\n}", "title": "" }, { "docid": "f7adca26a9323d925bf2f529107af8c5", "score": "0.5507699", "text": "func (page PolicyTrackedResourcesQueryResultsPage) Values() []PolicyTrackedResource {\n\tif page.ptrqr.IsEmpty() {\n\t\treturn nil\n\t}\n\treturn *page.ptrqr.Value\n}", "title": "" }, { "docid": "49f06412bb6dcbf27aafa654e3b3f3de", "score": "0.55041397", "text": "func (ThumbnailConfigurationStorage) Values() []ThumbnailConfigurationStorage {\n\treturn []ThumbnailConfigurationStorage{\n\t\t\"SEQUENTIAL\",\n\t\t\"LATEST\",\n\t}\n}", "title": "" }, { "docid": "7c926993dd80e3179b87f5364ac06e4e", "score": "0.5502754", "text": "func (EmbeddedConvert608To708) Values() []EmbeddedConvert608To708 {\n\treturn []EmbeddedConvert608To708{\n\t\t\"UPCONVERT\",\n\t\t\"DISABLED\",\n\t}\n}", "title": "" }, { "docid": "c56a37f95ddee44b56421955f68c534e", "score": "0.5494563", "text": "func (cr *Float64ChunkIterator) ChunkValues() []float64 { return cr.Chunk().Float64Values() }", "title": "" }, { "docid": "17cd31a38d14275a41d548bd7bc1663c", "score": "0.54940933", "text": "func (PriceUnits) Values() []PriceUnits {\n\treturn []PriceUnits{\n\t\t\"HOURLY\",\n\t}\n}", "title": "" } ]
00410598bacaf9316b2c6b889aa96480
GoString returns the string representation. API parameter values that are decorated as "sensitive" in the API will not be included in the string output. The member name will be present, but the value will be replaced with "sensitive".
[ { "docid": "902bc9ca7b8abe223dacbd811350c2a8", "score": "0.0", "text": "func (s TableFieldLinkContentConfiguration) GoString() string {\n\treturn s.String()\n}", "title": "" } ]
[ { "docid": "4ef987fe3192563c6dca972fb1a50812", "score": "0.6264239", "text": "func (v *ParamsStruct) String() string {\n\tif v == nil {\n\t\treturn \"<nil>\"\n\t}\n\n\tvar fields [1]string\n\ti := 0\n\tfields[i] = fmt.Sprintf(\"UserUUID: %v\", v.UserUUID)\n\ti++\n\n\treturn fmt.Sprintf(\"ParamsStruct{%v}\", strings.Join(fields[:i], \", \"))\n}", "title": "" }, { "docid": "eaeb58a79f8ef47f37d143a9e12ceaea", "score": "0.6223128", "text": "func String(v interface{}) (string, error) {\n\tfields, err := extractFields(nil, v)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar s strings.Builder\n\tfor i, fld := range fields {\n\t\tif !fld.options.noprint {\n\t\t\ts.WriteString(flagUsage(fld))\n\t\t\ts.WriteString(\"=\")\n\t\t\ts.WriteString(fmt.Sprintf(\"%v\", fld.field.Interface()))\n\t\t\tif i < len(fields)-1 {\n\t\t\t\ts.WriteString(\"\\n\")\n\t\t\t}\n\t\t}\n\t}\n\n\treturn s.String(), nil\n}", "title": "" }, { "docid": "043b8a0b9d5d1a906547c2275588b9e8", "score": "0.6189073", "text": "func (p Param) String() string {\n\tjp, _ := json.Marshal(p)\n\treturn string(jp)\n}", "title": "" }, { "docid": "0d37436bdc92770c00a49991307eaffe", "score": "0.5911454", "text": "func (this ApiLevel) String() string {\n\treturn this.value\n}", "title": "" }, { "docid": "94c45dad0cbd6dc6a82d7aca9eab1da2", "score": "0.58610946", "text": "func (p Params) String() string {\n\tjp, _ := json.Marshal(p)\n\treturn string(jp)\n}", "title": "" }, { "docid": "7422e92ddcee98e4ae227fd7e12e513f", "score": "0.58572596", "text": "func (s GroupMember) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "7422e92ddcee98e4ae227fd7e12e513f", "score": "0.58563596", "text": "func (s GroupMember) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "7358f35155cdc3916d6c7b63efefbf06", "score": "0.5852876", "text": "func (o VserverAggrInfoType) String() string {\n\treturn ToString(reflect.ValueOf(o))\n}", "title": "" }, { "docid": "310d5d40e335fdc7a93c98cac63b9d31", "score": "0.5834359", "text": "func (p *Param) String() string {\n\treturn fmt.Sprintf(\"%v\", p.Val)\n}", "title": "" }, { "docid": "b1d6b5994a29dcc33e36e994f65e26fb", "score": "0.5801103", "text": "func (s EnvironmentMember) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "46c2001dbd67e783a57ed9852ca71501", "score": "0.580063", "text": "func (m *Member) String() string {\n\tvar builder strings.Builder\n\tbuilder.WriteString(\"Member(\")\n\tbuilder.WriteString(fmt.Sprintf(\"id=%v\", m.ID))\n\tbuilder.WriteString(\", member_email=\")\n\tbuilder.WriteString(m.MemberEmail)\n\tbuilder.WriteString(\", member_name=\")\n\tbuilder.WriteString(m.MemberName)\n\tbuilder.WriteString(\", member_password=\")\n\tbuilder.WriteString(m.MemberPassword)\n\tbuilder.WriteByte(')')\n\treturn builder.String()\n}", "title": "" }, { "docid": "4b1f28615644fea2c3b408ac935bfd54", "score": "0.5799672", "text": "func (fp *FunctionParam) String() string {\n\treturn fmt.Sprintf(\"%v %v\", fp.wtype, fp.name)\n}", "title": "" }, { "docid": "2e6daaa750bb307921f4b0e7a3989155", "score": "0.5786204", "text": "func (p *Parameter) String() string {\n\treturn fmt.Sprintf(\"%v\", p.value)\n}", "title": "" }, { "docid": "753a1ebc168f836c3755404e8ca8865c", "score": "0.57039726", "text": "func (v *Echo_EchoStructMap_Args) String() string {\n\tif v == nil {\n\t\treturn \"<nil>\"\n\t}\n\n\tvar fields [1]string\n\ti := 0\n\tfields[i] = fmt.Sprintf(\"Arg: %v\", v.Arg)\n\ti++\n\n\treturn fmt.Sprintf(\"Echo_EchoStructMap_Args{%v}\", strings.Join(fields[:i], \", \"))\n}", "title": "" }, { "docid": "e988010bbe0a9e1db934209b8b5bcc6f", "score": "0.567947", "text": "func (v *QueryParamsOptsStruct) String() string {\n\tif v == nil {\n\t\treturn \"<nil>\"\n\t}\n\n\tvar fields [4]string\n\ti := 0\n\tfields[i] = fmt.Sprintf(\"Name: %v\", v.Name)\n\ti++\n\tif v.UserUUID != nil {\n\t\tfields[i] = fmt.Sprintf(\"UserUUID: %v\", *(v.UserUUID))\n\t\ti++\n\t}\n\tif v.AuthUUID != nil {\n\t\tfields[i] = fmt.Sprintf(\"AuthUUID: %v\", *(v.AuthUUID))\n\t\ti++\n\t}\n\tif v.AuthUUID2 != nil {\n\t\tfields[i] = fmt.Sprintf(\"AuthUUID2: %v\", *(v.AuthUUID2))\n\t\ti++\n\t}\n\n\treturn fmt.Sprintf(\"QueryParamsOptsStruct{%v}\", strings.Join(fields[:i], \", \"))\n}", "title": "" }, { "docid": "64a4291dd4348f585fd7a6f0091ab3cd", "score": "0.563443", "text": "func (p Params) String() string {\n\treturn fmt.Sprintf(`Params:\n Partner validator min-self-delegation: %s\n Consensus validator min-self-delegation: %s\n Partner validator min-delegation: \t %s\n Consensus validator min-delegation: %s\n Consensus validator commission rate: %s\n Partner validator commission rate: %s\n Supply Coin Denom: %s`,\n\t\tp.PartnerValidatorMinSelfDelegation, p.ConsensusValidatorMinSelfDelegation,\n\t\tp.PartnerValidatorMinDelegation, p.ConsensusValidatorMinDelegation,\n\t\tp.ConsensusValidatorFixedCommissionRate, p.PartnerValidatorFixedCommissionRate, p.SupplyDenom)\n}", "title": "" }, { "docid": "433974782504c0247a4eed38b693bc8d", "score": "0.56344277", "text": "func (m Member) String() string {\n\treturn m.Name\n}", "title": "" }, { "docid": "1050fda86d17f688f12ea9cf1ca474f4", "score": "0.5626281", "text": "func (s StringParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "1050fda86d17f688f12ea9cf1ca474f4", "score": "0.5626281", "text": "func (s StringParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "947af2e687894d6d29a03e631290b565", "score": "0.561751", "text": "func (s CognitoMemberDefinition) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "956aefa1bd2d41ff35f228d4f764a1ae", "score": "0.55769485", "text": "func (f *ParameterField) String() string {\n\treturn fmt.Sprintf(\"parameterOptions %s typeCode %s mode %s fraction %d length %d name %s\",\n\t\tf.parameterOptions,\n\t\tf.tc,\n\t\tf.mode,\n\t\tf.fraction,\n\t\tf.length,\n\t\tf.Name(),\n\t)\n}", "title": "" }, { "docid": "583380e127f4dba18cda6324b85ffb0f", "score": "0.5576497", "text": "func (nvp NameValuePair) String() string {\n\treturn fmt.Sprintf(\"(Name:%s,Value:%s,Type:%s)\", nvp.VarName, nvp.value(), nvp.Type)\n}", "title": "" }, { "docid": "e8ec90c7d3803454fa26a1609fe712fe", "score": "0.5574367", "text": "func (s ConformancePackInputParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "cdbf7509ef8d41cb9a5406ad3a14a3af", "score": "0.55650645", "text": "func (d *gsObjectInfo) String() string {\n\tsrc, err := json.Marshal(d.Info)\n\tif err != nil {\n\t\treturn fmt.Sprintf(\"%+v\", d.Info)\n\t}\n\treturn string(src)\n}", "title": "" }, { "docid": "2fd2874d221bf3b88cfdab9b589cac77", "score": "0.55598235", "text": "func (t Type) String() string {\n\treturn TypeToGRPCField(t).String()\n}", "title": "" }, { "docid": "0815da3599a45529b5c823ddbc76dc70", "score": "0.5559223", "text": "func (gp Params) String() string {\n\treturn gp.VotingParams.String() + \"\\n\" +\n\t\tgp.TallyParams.String() + \"\\n\" + gp.DepositParams.String()\n}", "title": "" }, { "docid": "0d34031c513158ff6fe20f3e7ebd3f66", "score": "0.5522846", "text": "func (s MappedDataSetParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "3ffc25a4edc5ca7652c4750de89130eb", "score": "0.5522052", "text": "func (s Parameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "3ffc25a4edc5ca7652c4750de89130eb", "score": "0.5522052", "text": "func (s Parameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "3ffc25a4edc5ca7652c4750de89130eb", "score": "0.5522052", "text": "func (s Parameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "24f0bfea8419e1d9c7653157a829aa39", "score": "0.55216116", "text": "func (op PipelineOp) String() string {\n\ts := make([]byte, 0, 32)\n\ts = append(s, \"get field \"...)\n\ts = strconv.AppendInt(s, int64(op.Field), 10)\n\tif op.DefaultValue == nil {\n\t\treturn string(s)\n\t}\n\ts = append(s, \" with default\"...)\n\treturn string(s)\n}", "title": "" }, { "docid": "2537643a4d071d1910ad417707ce36dd", "score": "0.5486904", "text": "func (s DecimalParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "2537643a4d071d1910ad417707ce36dd", "score": "0.5486904", "text": "func (s DecimalParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "09b6703358d5028baa3775665603a54e", "score": "0.5476082", "text": "func (s JSONMappingParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "09b6703358d5028baa3775665603a54e", "score": "0.5476082", "text": "func (s JSONMappingParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "95c26f59688abacfcb4ed1f105b419b1", "score": "0.54714805", "text": "func (s *VirtualInput) String() string {\n\treturn fmt.Sprintf(\"%+v\", *s)\n}", "title": "" }, { "docid": "64398c6eed3f49936dbd9aade80b76a9", "score": "0.5461902", "text": "func (s MemberDetail) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "b6af83497814306805b3f888238bb744", "score": "0.54596865", "text": "func (me TxsdMethodparamRep) String() string { return xsdt.Token(me).String() }", "title": "" }, { "docid": "3604a1d12813ce519872a998508d98c7", "score": "0.5449386", "text": "func (s GetMembersOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "9e3f1500f300e00cb99b759b6cf8770c", "score": "0.5446846", "text": "func (s ScmProvider) String() string {\n\tjs, _ := json.Marshal(s)\n\treturn string(js)\n}", "title": "" }, { "docid": "34d38eab17e3420f214ad03a8ffc6359", "score": "0.5435329", "text": "func (field Field) String() string {\n\treturn field.Name + \"=\" + stringValue(field, false)\n}", "title": "" }, { "docid": "a102628683574cfd3b9b626c5ef060f3", "score": "0.54287386", "text": "func (v *QueryParamsStruct) String() string {\n\tif v == nil {\n\t\treturn \"<nil>\"\n\t}\n\n\tvar fields [5]string\n\ti := 0\n\tfields[i] = fmt.Sprintf(\"Name: %v\", v.Name)\n\ti++\n\tif v.UserUUID != nil {\n\t\tfields[i] = fmt.Sprintf(\"UserUUID: %v\", *(v.UserUUID))\n\t\ti++\n\t}\n\tif v.AuthUUID != nil {\n\t\tfields[i] = fmt.Sprintf(\"AuthUUID: %v\", *(v.AuthUUID))\n\t\ti++\n\t}\n\tif v.AuthUUID2 != nil {\n\t\tfields[i] = fmt.Sprintf(\"AuthUUID2: %v\", *(v.AuthUUID2))\n\t\ti++\n\t}\n\tfields[i] = fmt.Sprintf(\"Foo: %v\", v.Foo)\n\ti++\n\n\treturn fmt.Sprintf(\"QueryParamsStruct{%v}\", strings.Join(fields[:i], \", \"))\n}", "title": "" }, { "docid": "62ee10d923b49807859b3bd764f81e15", "score": "0.54285294", "text": "func (s ParameterObject) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "11f92eb230675327e183270a7f53023f", "score": "0.54221255", "text": "func (f *Field) String() string {\n\treturn fmt.Sprintf(\"%s.%s\", f.Struct, f.Name)\n}", "title": "" }, { "docid": "3564e810a8580c6c92411a90aa6d60a1", "score": "0.542205", "text": "func (t *Struct) String() string {\n\treturn fmt.Sprintf(\"%s.%s\", t.Pkg.Name, t.Name)\n}", "title": "" }, { "docid": "252b578b02445495faf7c8a1d0cc0396", "score": "0.5392762", "text": "func (s HITLayoutParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "a4c54cbfb5ec72f8261aaed18092a6bb", "score": "0.53917444", "text": "func (i *GPUInfo) JSONString(indent bool) string {\n\treturn safeJSON(gpuPrinter{i}, indent)\n}", "title": "" }, { "docid": "7810f5262bc9faa785879a38fa9b775d", "score": "0.5389633", "text": "func (s MemberDefinition) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "9c66cccdde8c717e9a130db2918369c1", "score": "0.5387474", "text": "func (s ParameterStringFilter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "0937fecf24664785bc9484de4c85dcaf", "score": "0.5386834", "text": "func String(name, value, usage string) *string {\n\tp := Param{\n\t\tParamType: ParamTypeString,\n\t\tName: name,\n\t\tDefault: value,\n\t\tUsage: usage,\n\t}\n\tptr := new(string)\n\treturn newParam(p, ptr).(*string)\n}", "title": "" }, { "docid": "107d9ae25b5bb5f0efc1b21eb657e2e8", "score": "0.53807145", "text": "func (s ServiceNowParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "107d9ae25b5bb5f0efc1b21eb657e2e8", "score": "0.53807145", "text": "func (s ServiceNowParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "c29d600c62e6f51acbe98998ffc8a1c1", "score": "0.5378591", "text": "func (c *Criterion) String() string {\n\tstr, _ := c.MarshalJSON()\n\treturn string(str)\n}", "title": "" }, { "docid": "73bd066e28eeff7b75dcd1008d93e4cf", "score": "0.53734565", "text": "func (params Params) String() string {\n\tout, _ := yaml.Marshal(params)\n\treturn string(out)\n}", "title": "" }, { "docid": "b9d97878d151e4ecce328096f2855449", "score": "0.5372647", "text": "func (s TemplateParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "dbade7ee1753eb1fdd72af772e529075", "score": "0.5366653", "text": "func (s ParameterMapEntry) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "cd6fe60da2bb297ea1daa97e3ee383f5", "score": "0.5364029", "text": "func (s ComponentParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "11a4e7851f5e03cc5470dce5aef81d39", "score": "0.53605855", "text": "func (s GetMembersInput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "0e6890d3a0204c7a1bce386acb72d903", "score": "0.5346103", "text": "func (s CSVMappingParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "0e6890d3a0204c7a1bce386acb72d903", "score": "0.5346103", "text": "func (s CSVMappingParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "105fac700204ede2246a8a98a8d47b43", "score": "0.53390586", "text": "func (s PolicyParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "2982946683bb0e094591a99296aea725", "score": "0.5322016", "text": "func String(value interface{}) string {\n\treturn fmt.Sprintf(\"%v\", value)\n}", "title": "" }, { "docid": "62132f69fac5eea623282741c0a0cce3", "score": "0.5322011", "text": "func (c Params) String() string {\n\tdefer FuncEnding(FuncStarting())\n\tnameFmt := \"%10s: \"\n\tlines := []string{\n\t\tfmt.Sprintf(nameFmt+\"%t\", \"Debug\", debug),\n\t\tfmt.Sprintf(nameFmt+\"%t\", \"Verbose\", c.Verbose),\n\t\tfmt.Sprintf(nameFmt+\"%d\", \"Errors\", len(c.Errors)),\n\t\tfmt.Sprintf(nameFmt+\"%d\", \"Count\", c.Count),\n\t\tfmt.Sprintf(nameFmt+\"%s\", \"Input File\", c.InputFile),\n\t\tfmt.Sprintf(nameFmt+\"%d lines\", \"Input\", len(c.Input)),\n\t\tfmt.Sprintf(nameFmt+\"%d lines\", \"Custom\", len(c.Custom)),\n\t}\n\tif len(c.Errors) > 0 {\n\t\tlines = append(lines, fmt.Sprintf(\"Errors (%d):\", len(c.Errors)))\n\t\terrors := make([]string, len(c.Errors))\n\t\tfor i, err := range c.Errors {\n\t\t\terrors[i] = err.Error()\n\t\t}\n\t\tlines = append(lines, AddLineNumbers(errors, 1)...)\n\t}\n\tif len(c.Input) > 0 {\n\t\tlines = append(lines, fmt.Sprintf(\"Input (%d):\", len(c.Input)))\n\t\tlines = append(lines, AddLineNumbers(c.Input, 0)...)\n\t}\n\tif len(c.Custom) > 0 {\n\t\tlines = append(lines, fmt.Sprintf(\"Custom Input (%d):\", len(c.Custom)))\n\t\tlines = append(lines, AddLineNumbers(c.Custom, 0)...)\n\t}\n\treturn strings.Join(lines, \"\\n\")\n}", "title": "" }, { "docid": "2a5dddaf0899c0f7e51a50091b7a40ef", "score": "0.5316514", "text": "func (o SnapmirrorUpdateRequest) String() string {\n\treturn ToString(reflect.ValueOf(o))\n}", "title": "" }, { "docid": "5bc0664256e6e8fd580c5161059d7c29", "score": "0.5316048", "text": "func (v *Voca) String() string {\n\tvar builder strings.Builder\n\tbuilder.WriteString(\"Voca(\")\n\tbuilder.WriteString(fmt.Sprintf(\"id=%v\", v.ID))\n\tbuilder.WriteString(\", key=\")\n\tbuilder.WriteString(v.Key)\n\tbuilder.WriteString(\", value=\")\n\tbuilder.WriteString(v.Value)\n\tif v := v.Example; v != nil {\n\t\tbuilder.WriteString(\", example=\")\n\t\tbuilder.WriteString(*v)\n\t}\n\tbuilder.WriteString(\", created_at=\")\n\tbuilder.WriteString(v.CreatedAt.Format(time.ANSIC))\n\tbuilder.WriteByte(')')\n\treturn builder.String()\n}", "title": "" }, { "docid": "f29b23f1b131dd94a2fc5373e8a0a277", "score": "0.5314912", "text": "func (a Attribute) String() string {\n\treturn fmt.Sprintf(\"\\t%s %s `json:\\\"%s\\\" dynamo:\\\"%s\\\"`\", a.Ident.Pascalize(), a.GoType, a.Ident.Underscore(), a.Ident.Underscore())\n}", "title": "" }, { "docid": "619f91c872b1bac6a6d507944a962fcc", "score": "0.53133047", "text": "func (o LunInfoType) String() string {\n\treturn ToString(reflect.ValueOf(o))\n}", "title": "" }, { "docid": "c635c83e9b07fb603c326a6878f831b7", "score": "0.5312883", "text": "func (dp Service) String() string {\n\tout, err := json.Marshal(dp)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\treturn string(out)\n}", "title": "" }, { "docid": "55a28d15ad9e8a75756df0d81bf05189", "score": "0.5312249", "text": "func (o NvmeSubsystemMapRemoveRequest) String() string {\n\treturn ToString(reflect.ValueOf(o))\n}", "title": "" }, { "docid": "1d4e7d34766971f14676e1ed1bb0333d", "score": "0.5308925", "text": "func (s DocumentParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "1d4e7d34766971f14676e1ed1bb0333d", "score": "0.5308925", "text": "func (s DocumentParameter) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "ad064b89c683e8c10416461fa7825d0b", "score": "0.5302616", "text": "func (s SuperuserParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "93c0d5d23f2068450a6c0f5084645da1", "score": "0.5302495", "text": "func (s VirtualObject) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "920ad16961bdc8adf5d75ab107eae1a0", "score": "0.529565", "text": "func (s ServiceFlags) String() string {\n\tvar str []string\n\n\tstr = append(str, string(s.SVCType()))\n\tif s&serviceFlagLocalScope != 0 {\n\t\tstr = append(str, string(SVCTrafficPolicyLocal))\n\t}\n\tif s&serviceFlagSessionAffinity != 0 {\n\t\tstr = append(str, \"sessionAffinity\")\n\t}\n\tif s&serviceFlagRoutable == 0 {\n\t\tstr = append(str, \"non-routable\")\n\t}\n\tif s&serviceFlagSourceRange != 0 {\n\t\tstr = append(str, \"check source-range\")\n\t}\n\n\treturn strings.Join(str, \", \")\n}", "title": "" }, { "docid": "9a84e8c3b2f0d30f9bb1d8ed78ed0599", "score": "0.5290413", "text": "func (st DetectionFlags) String() string {\n\tret := getValueOrEmpty(st.Bestface, \"bestface,\") +\n\t\tgetValueOrEmpty(st.Basicpoints, \"basicpoints,\") +\n\t\tgetValueOrEmpty(st.Propoints, \"propoints,\") +\n\t\tgetValueOrEmpty(st.Classifiers, \"classifiers\") +\n\t\tgetValueOrEmpty(st.Extended, \"extended,\") +\n\t\t\"content\"\n\n\treturn ret\n}", "title": "" }, { "docid": "77b987e44636fdda049baa7cd0039edc", "score": "0.5286009", "text": "func (s PermissionGroupParams) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "24840a1ef3be00166611c4c788935816", "score": "0.52813804", "text": "func (o LunMapRequest) String() string {\n\treturn ToString(reflect.ValueOf(o))\n}", "title": "" }, { "docid": "f94e9a71c6e92cd52044faa4da2c8f59", "score": "0.527597", "text": "func (s ParameterValue) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "3f9b199981a4e1345a0566bee40b0dc9", "score": "0.5274579", "text": "func (g Feature) String() string {\n\treturn g.JSON()\n}", "title": "" }, { "docid": "8a050b5c8d8af92ab0d999a24ba8b324", "score": "0.52694976", "text": "func JSONString(v interface{}) string {\n\treturn marshalString(json.MarshalIndent(v, \"\", \" \"))\n}", "title": "" }, { "docid": "80c6316aa005b21888c64775f655189d", "score": "0.5258605", "text": "func (msg *message) String() string {\n\tif len(msg.params) > 0 {\n\t\treturn fmt.Sprintf(\"%s - %v\", msg.message, msg.params)\n\t}\n\treturn fmt.Sprintf(\"%s\", msg.message)\n}", "title": "" }, { "docid": "f22d9114d24746c8de07c97a5a480a3f", "score": "0.5258158", "text": "func (s MariaDbParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "6e6e105e8809846ee34eb462299b2036", "score": "0.5257236", "text": "func (v String) String() string {\n\tb, _ := json.Marshal(v.Value())\n\treturn string(b)\n}", "title": "" }, { "docid": "f22d9114d24746c8de07c97a5a480a3f", "score": "0.52570957", "text": "func (s MariaDbParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "5cc1f76fbd35bc1a3cdba30e9de8fea5", "score": "0.52565295", "text": "func (s MappingParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "5cc1f76fbd35bc1a3cdba30e9de8fea5", "score": "0.52565295", "text": "func (s MappingParameters) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "79b571b636deac89245b3c09fef3c6b3", "score": "0.5254819", "text": "func (p AssetParam) String() string {\n\tout, _ := yaml.Marshal(p)\n\treturn string(out)\n}", "title": "" }, { "docid": "b30303dca846f470eb2efe6e9ecb18bb", "score": "0.5248988", "text": "func (s FieldInfo) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "eddc413eecf078a2d20ca3cd8fb78889", "score": "0.52487046", "text": "func (f Flag) String() string {\n\tjf, _ := json.Marshal(f)\n\treturn string(jf)\n}", "title": "" }, { "docid": "61f38bff17ce0c57709f071aecaaf379", "score": "0.5248105", "text": "func (s StringParameterDeclaration) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "2bd92fee4c643989fbf729c048a5e54f", "score": "0.52439094", "text": "func (request *ChargeRequest) String() string {\n\tjsonifiedRequest, err := json.Marshal(request)\n\n\tif err != nil {\n\t\tlog.Errorf(\"Got error while marshaling request %v\", err)\n\t\treturn \"\"\n\t}\n\n\treturn string(jsonifiedRequest)\n}", "title": "" }, { "docid": "2b86b8c6420eb971496ecd9490d6bfac", "score": "0.524053", "text": "func (m method) String() string {\n\te := m.Serialize()\n\t// Method serialization is custom because of different spacing.\n\treturn fmt.Sprintf(\"%v %v%v\", e.Kind, e.Name, e.Decl)\n}", "title": "" }, { "docid": "2eddc9f51f6b73ff8ba0d0f0f09ce6ff", "score": "0.5240068", "text": "func (vp VertexProperty) String() string {\n\treturn fmt.Sprintf(\"vp[%s->%v]\", vp.Key, vp.Value)\n}", "title": "" }, { "docid": "79f79cadd28fa45a671401aa7685c540", "score": "0.52380306", "text": "func (s GetParameterOutput) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "669d6d05248d32c96e2631243438f273", "score": "0.523777", "text": "func (s EnvironmentMember) GoString() string {\n\treturn s.String()\n}", "title": "" }, { "docid": "bfc42822336be896e444d831ea8b700b", "score": "0.5235532", "text": "func (flag *MapFlag[K, V]) String() string {\n\treturn cli.FlagStringer(flag)\n}", "title": "" }, { "docid": "7aa9725ba483ff95901c718a2096f3e3", "score": "0.52346087", "text": "func (o AggrGetIterRequestDesiredAttributes) String() string {\n\treturn ToString(reflect.ValueOf(o))\n}", "title": "" }, { "docid": "d98b354f882484be7c9167e50644a6d0", "score": "0.5231971", "text": "func (flag *mapValue[K, V]) String() string {\n\tif flag.values == nil {\n\t\treturn \"\"\n\t}\n\treturn collections.MapJoin(*flag.values, flag.argSep, flag.valSep)\n}", "title": "" }, { "docid": "e03733f177bf21ef1d2c26ae306400cd", "score": "0.52319294", "text": "func (t MatchType) String() string {\n\treturn MatchTypeToGRPCField(t).String()\n}", "title": "" }, { "docid": "f14bf1da8cb7aafcd921efa19d21c006", "score": "0.523171", "text": "func (s StructValue) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" }, { "docid": "8dd132e703b8d75b5577b3291c034635", "score": "0.5230301", "text": "func (s ComponentParameterDetail) String() string {\n\treturn awsutil.Prettify(s)\n}", "title": "" } ]
ac5031fa3d488102fa19e6d61f860e2f
Bracket checks that all brackets are matched
[ { "docid": "cde0e7fc0892624e857a1518a2e2fbe7", "score": "0.6696062", "text": "func Bracket(s string) (bool, error) {\n\tstack := make([]rune, 0)\n\n\tfor _, r := range s {\n\t\tswitch r {\n\t\tcase '{':\n\t\t\tstack = append(stack, '}')\n\t\tcase '[':\n\t\t\tstack = append(stack, ']')\n\t\tcase '(':\n\t\t\tstack = append(stack, ')')\n\t\tdefault:\n\t\t\tend := len(stack) - 1\n\t\t\tif end == -1 || r != stack[end] {\n\t\t\t\treturn false, nil\n\t\t\t}\n\n\t\t\tstack = stack[0:end]\n\t\t}\n\t}\n\treturn len(stack) == 0, nil\n}", "title": "" } ]
[ { "docid": "fb7147e12c6de557b1a53d533585afc8", "score": "0.75185585", "text": "func isBracket(r rune) bool {\n\tswitch r {\n\tcase '{',\n\t\t'}',\n\t\t'[',\n\t\t']',\n\t\t'(',\n\t\t')':\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "7ef45391484ef5b3de5d172bc0aa9a95", "score": "0.69857776", "text": "func Bracket(input string) (bool, error) {\n\tvar brackets bracketStack\n\n\tfor _, r := range input {\n\t\tif strings.ContainsRune(\"[{(\", r) {\n\t\t\tbrackets.push(r)\n\t\t} else if strings.ContainsRune(\"]})\", r) {\n\t\t\tif len(brackets) == 0 {\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t\tif b := brackets.pop(); matchingBrackets[b] != r {\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t}\n\t}\n\n\treturn len(brackets) == 0, nil\n}", "title": "" }, { "docid": "8c47d2e58eee4aa86cf6a51fc722df16", "score": "0.68088233", "text": "func MatchBracketSurrounded() MatcherFunc {\n\treturn MatchSurrounded(\"[\", \"]\")\n}", "title": "" }, { "docid": "8c47d2e58eee4aa86cf6a51fc722df16", "score": "0.68088233", "text": "func MatchBracketSurrounded() MatcherFunc {\n\treturn MatchSurrounded(\"[\", \"]\")\n}", "title": "" }, { "docid": "6a9858b15f057def4c15d654952c031f", "score": "0.6698263", "text": "func bracket(r rune) bool {\n\treturn r == '<' || r == '>'\n}", "title": "" }, { "docid": "1bbb575860e13476b5a876763e1666b3", "score": "0.6695119", "text": "func Bracket(str string) bool {\n\tstack := []rune{}\n\tfor _, r := range str {\n\t\tswitch r {\n\t\tcase '[', '{', '(':\n\t\t\tstack = append(stack, r)\n\t\tcase ']', '}', ')':\n\t\t\tif len(stack) == 0 {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tstackR := stack[len(stack)-1]\n\t\t\tif stackR != closeToOpen[r] {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tstack = stack[:len(stack)-1]\n\t\tdefault:\n\t\t}\n\t}\n\treturn len(stack) == 0\n}", "title": "" }, { "docid": "7a6736bf65df9d6952d4f09f5baee927", "score": "0.6426768", "text": "func Bracket(test string) (bool, error) {\n\tvar stack []rune = make([]rune, 0, len(test))\n\tfor _, r := range test {\n\n\t\t//if we find a closing character and we have a non-empty stack\n\t\tif open, ok := closeToOpen[r]; ok && len(stack) > 0 {\n\t\t\t// ... check to make sure it closes out what's on top of the stack\n\t\t\tif stack[len(stack)-1] != open {\n\t\t\t\t//if it doesn't, this string fails\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t\t//if it does, pop the item from the stack\n\t\t\tstack = stack[:len(stack)-1]\n\n\t\t\t// if we have an empty stack and a closing character, bail\n\t\t} else if len(stack) == 0 && ok {\n\t\t\treturn false, nil\n\n\t\t} else {\n\t\t\t//otherwise, we push onto the stack\n\t\t\tstack = append(stack, r)\n\t\t}\n\t}\n\n\treturn len(stack) == 0, nil\n}", "title": "" }, { "docid": "fbe87064f51006b71b750b31017eccfb", "score": "0.6348964", "text": "func isValid(parenthesis string, length int) (bool) {\n\tstack := Stack.CreateStack(length)\n\tfor i := 0; i < length; i++ {\n\t\tvar c = parenthesis[i]\n\t\tif c == '(' || c == '{' || c == '[' {\n\t\t\tstack.Push(string(c))\n\t\t} else {\n\t\t\t// It has to be a closing brace.\n\t\t\tif !stack.IsEmpty() {\n\t\t\t\tif (c == ')' && stack.Peek() == \"(\") ||\n\t\t\t\t\t(c == ']' && stack.Peek() == \"[\") ||\n\t\t\t\t\tc == '}' && stack.Peek() == \"{\"\t{\n\t\t\t\t\tstack.Pop()\n\t\t\t\t} else {\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t}\n\treturn stack.IsEmpty()\n}", "title": "" }, { "docid": "3ca034c241ea98e294df436a6e454c30", "score": "0.63356715", "text": "func parseBracket(exp string, lr bool) (string) {\n\tprevExp := \"\"\n\tre := regexp.MustCompile(\"\\\\([0-9+*]+\\\\)\")\n\tfor {\n\t\tif exp == prevExp {\n\t\t\tbreak\n\t\t}\n\t\tprevExp = exp\n\t\tfound := re.FindString(exp)\n\t\t// have we found a bracket with no nested brackets inside?\n\t\tif found != \"\" {\n\t\t\t// if so reduce it, based on the rule given in the instructions\n\t\t\tnewValue := \"\"\n\t\t\tif lr {\n\t\t\t\tnewValue = parseLR(AH.TrimLastRune(AH.TrimFirstRune(found)))\n\t\t\t} else {\n\t\t\t\tnewValue = parsePM(AH.TrimLastRune(AH.TrimFirstRune(found)))\n\t\t\t}\n\n\t\t\texp = strings.Replace(exp, found, newValue, 1)\n\t\t}\n\t}\n\treturn exp\n}", "title": "" }, { "docid": "47d17925a6946982a2886ad955f895cc", "score": "0.6314016", "text": "func matchingViaStack(str string) bool {\n\tchars := []rune(str)\n\tbracePairs := map[string]string{\n\t\t\"[\": \"]\",\n\t\t\"{\": \"}\",\n\t\t\"(\": \")\",\n\t}\n\n\tbracesFound := []string{}\n\tstartBrace := \"\"\n\n\tfor _, char := range chars {\n\t\tif _, ok := bracePairs[string(char)]; ok {\n\t\t\tbracesFound = append(bracesFound, string(char))\n\t\t} else {\n\t\t\t// Check to ensure that we have any matching braces before we try to read them.\n\t\t\t// If we don't we are in a strange state and should exit.\n\t\t\tif len(bracesFound) == 0 {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tstartBrace, bracesFound = bracesFound[len(bracesFound)-1], bracesFound[:len(bracesFound)-1]\n\n\t\t\tif bracePairs[startBrace] != string(char) {\n\t\t\t\t// Mismatch, bail out!\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\n\t// At the end there should be no braces left.\n\tif len(bracesFound) > 0 {\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "b6997d6d06a3f8fce834fb97477c4235", "score": "0.62241006", "text": "func checkForBalancedBrackets(stringVal string, openBrackets map[string]bool, stack []string, brackets map[string]string) (result string) {\n\n\t// loop through the input string's indivisual characters\n\tfor i := 0; i < len(stringVal); i++ {\n\n\t\t// get the ith character\n\t\tvar tempChar string = stringVal[i : i+1]\n\n\t\t// if the char is an open bracket\n\t\tif openBrackets[tempChar] {\n\t\t\tstack = append(stack, tempChar)\n\t\t} else {\n\n\t\t\t//check if the bracket is a closed bracket\n\t\t\topenBracketCheck, ok := brackets[tempChar]\n\t\t\tif ok {\n\n\t\t\t\t// if the length the stack is 0, that means we did not\n\t\t\t\t// encounter open bracket for the current closed bracket\n\t\t\t\tif len(stack) == 0 {\n\t\t\t\t\treturn \"NO\"\n\t\t\t\t}\n\n\t\t\t\t// pop the top of the stack\n\t\t\t\tvar popedChar string = stack[len(stack)-1]\n\t\t\t\tstack = stack[:len(stack)-1]\n\n\t\t\t\t// check if the closeing bracket that corresponds to\n\t\t\t\t// poped element of the stack matches the\n\t\t\t\t// closed bracket that we have encountered currently\n\t\t\t\tif openBracketCheck != popedChar {\n\t\t\t\t\treturn \"NO\"\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\t// check if there are any dangling open brackets in the stack\n\t// which did not get a matching closing bracket in the loop\n\tif len(stack) == 0 {\n\t\treturn \"YES\"\n\t} else {\n\t\treturn \"NO\"\n\t}\n\n}", "title": "" }, { "docid": "4fb0566707ae0f3d32241e3ce7e1e7d8", "score": "0.6124177", "text": "func Bracket(s string) (bool, error) {\n\tqueue := make([]rune, 0)\n\tfor _, l := range s {\n\n\t\tif !isParentheses(l) {\n\t\t\tcontinue\n\t\t}\n\n\t\tif isOpen(l) {\n\t\t\tqueue = append(queue, couples[l])\n\t\t} else {\n\t\t\tif len(queue) == 0 || string(l) != string(queue[len(queue)-1:]) {\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t\tqueue = queue[:len(queue)-1]\n\t\t}\n\n\t}\n\treturn len(queue) == 0, nil\n}", "title": "" }, { "docid": "7a227e287c98b5493f448e7c0cb1e5cc", "score": "0.6091991", "text": "func (x *yyLex) openBrackets() bool {\n\treturn x.bracket != 0 || x.parenthesis != 0 || x.brace != 0\n}", "title": "" }, { "docid": "3768ce6d720c872fccbbc6c63f89f7a8", "score": "0.60738736", "text": "func Problem20() {\n\n\tfmt.Println(isValid(\"()\"))\n\tfmt.Println(isValid(\"()[]{}\"))\n\tfmt.Println(isValid(\"(]\"))\n\tfmt.Println(isValid(\"([)]\"))\n\tfmt.Println(isValid(\"{[]}\"))\n\n}", "title": "" }, { "docid": "f96450e3e8d0c1c154e59fd7271a91d4", "score": "0.60591686", "text": "func isOpenBra(r rune) bool {\n\treturn r == '('\n}", "title": "" }, { "docid": "fbaeb600a76908a091ef7b63ce031e41", "score": "0.6045165", "text": "func (ml *MarkdownLang) AutoBracket(fs *pi.FileStates, bra rune, pos lex.Pos, curLn []rune) (match, newLine bool) {\n\tlnLen := len(curLn)\n\tmatch = pos.Ch == lnLen || unicode.IsSpace(curLn[pos.Ch]) // at end or if space after\n\tnewLine = false\n\treturn\n}", "title": "" }, { "docid": "c2608529178c2ec04990c0f6fc4827f7", "score": "0.58930725", "text": "func validate(stack []rune) {\n\n\tparity := 0\n\tfor _, r := range stack {\n\t\tswitch r {\n\t\tcase '(':\n\t\t\tparity++\n\t\tcase ')':\n\t\t\tparity--\n\t\tdefault:\n\t\t\t// stack should only contain '(' and ')'\n\t\t\tfmt.Printf(\"Something wrong: %q\\n\", string(stack))\n\t\t\tos.Exit(1)\n\t\t}\n\t\tif parity < 0 {\n\t\t\treturn\n\t\t}\n\t}\n\tif parity == 0 {\n\t\tfmt.Printf(\"Balanced expression: %q\\n\", string(stack))\n\t}\n}", "title": "" }, { "docid": "e598680e31d8e6d45bc6042b7bbc8e00", "score": "0.58647406", "text": "func isValid(s string) bool {\n\tx := []byte(s)\n\tstack := []byte{}\n\tfor _, v := range x {\n\t\tif len(stack) == 0 {\n\t\t\tstack = append(stack, v)\n\t\t} else {\n\t\t\tp := stack[len(stack)-1]\n\t\t\tswitch p {\n\t\t\tcase '(':\n\t\t\t\tif v == ')' {\n\t\t\t\t\tstack = stack[:len(stack)-1]\n\t\t\t\t} else if v == '}' || v == ']' {\n\t\t\t\t\treturn false\n\t\t\t\t} else {\n\t\t\t\t\tstack = append(stack, v)\n\t\t\t\t}\n\t\t\tcase '[':\n\t\t\t\tif v == ']' {\n\t\t\t\t\tstack = stack[:len(stack)-1]\n\t\t\t\t} else if v == '}' || v == ')' {\n\t\t\t\t\treturn false\n\t\t\t\t} else {\n\t\t\t\t\tstack = append(stack, v)\n\t\t\t\t}\n\t\t\tcase '{':\n\t\t\t\tif v == '}' {\n\t\t\t\t\tstack = stack[:len(stack)-1]\n\t\t\t\t} else if v == ']' || v == ')' {\n\t\t\t\t\treturn false\n\t\t\t\t} else {\n\t\t\t\t\tstack = append(stack, v)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif len(stack) > 0 {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "58fe5f073284404ddf21d0968e82da7c", "score": "0.58608437", "text": "func main() {\n\tfmt.Println(isValid(\"((()))\"))\n}", "title": "" }, { "docid": "10103292bc7fa8096a1456a6ca4bd709", "score": "0.5847047", "text": "func checkParenthesis(expression *string) bool {\n\tchars := []byte(*expression)\n\tstack := newStack()\n\tcntr := 0\n\tfor _, char := range chars {\n\t\tif char == lParenthesisByte {\n\t\t\tcntr++\n\t\t\tstack.push(&lParenthesis)\n\t\t}\n\t\tif char == rParenthesisByte {\n\t\t\tcntr--\n\t\t\tstack.pop()\n\t\t}\n\t}\n\tif cntr == 0 && stack.len() == 0 {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "7f063a3ebb2f479810a2e42968d27744", "score": "0.58042425", "text": "func isOpenCurly(r rune) bool {\n\treturn r == '{'\n}", "title": "" }, { "docid": "892a002068fc263f6882b6f13f69737a", "score": "0.57936853", "text": "func inBrackets(filter string) string {\n\tfilter = strings.TrimLeft(filter, \" \\t\\n\\r\")\n\tif filter == \"\" {\n\t\treturn filter\n\t}\n\tif filter[0] == '(' {\n\t\treturn filter\n\t}\n\treturn fmt.Sprintf(\"(%s)\", filter)\n}", "title": "" }, { "docid": "a0b512a7a59e83782cb35437241fe2bf", "score": "0.56821465", "text": "func IsValid(s string) bool {\n\tok := len(s) > 0 && (len(s)&1) == 0\n\tif !ok {\n\t\treturn ok\n\t}\n\tvar stack []int32\n\tfor _, v := range s {\n\t\tvar str int32\n\t\tswitch v {\n\t\tcase '}':\n\t\t\tstr = '{'\n\t\tcase ']':\n\t\t\tstr = '['\n\t\tcase ')':\n\t\t\tstr = '('\n\t\t}\n\t\tif str > 0 {\n\t\t\t// 右括号时候要检查栈中是否有左括号对应,没有的话就是不合法,有的话出栈\n\t\t\tif len(stack) == 0 || stack[len(stack)-1] != str{\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tstack = stack[0 : len(stack)-1]\n\t\t\tcontinue\n\t\t}\n\t\t// 左括号入栈\n\t\tstack = append(stack, v)\n\t}\n\treturn len(stack) == 0\n}", "title": "" }, { "docid": "9c21e033e6dfff84e2aea1dad671dd29", "score": "0.5628155", "text": "func bracketEscape(idf string, back bool) string {\n\t// escape brackets inside name=\"x[]\"\n\tif back {\n\t\tfor k, v := range trans {\n\t\t\tidf = strings.Replace(idf, v, k, -1)\n\t\t}\n\t\treturn idf\n\t}\n\tfor k, v := range trans {\n\t\tidf = strings.Replace(idf, k, v, -1)\n\t}\n\treturn idf\n}", "title": "" }, { "docid": "48e01c786585ba63dc0765b0c146baae", "score": "0.56265897", "text": "func (line *Line) BracketMatch(start, end rune, idx, dir, count int) (int, int) {\n\tif idx < 0 {\n\t\tidx += line.Length()\n\t}\n\tfor ; idx >= 0 && idx < line.Length(); idx += dir {\n\t\tr := line.GetChar(idx)\n\t\tif r == end {\n\t\t\tcount--\n\t\t}\n\t\tif r == start {\n\t\t\tcount++\n\t\t}\n\t\tif count == 0 {\n\t\t\treturn idx, count\n\t\t}\n\t}\n\treturn idx, count\n}", "title": "" }, { "docid": "4463912d56bd3e30f5111f1ec087cd22", "score": "0.5604742", "text": "func isValid(s string) bool {\n\tstk := stack{}\n\tfor i := 0; i < len(s); i++ {\n\t\tif s[i] == '(' {\n\t\t\tstk.push(1)\n\t\t} else if s[i] == ')' {\n\t\t\tv := stk.pop()\n\t\t\tif v == nil {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\treturn stk.isEmpty()\n}", "title": "" }, { "docid": "e87fa931cddc0edb1daa9b264bebbf65", "score": "0.5569427", "text": "func isCloseBra(r rune) bool {\n\treturn r == ')'\n}", "title": "" }, { "docid": "21f270bb6510790a09779a5d89c91278", "score": "0.55372286", "text": "func isValid(s string) bool {\n\tif s == \"\" {\n\t\treturn true\n\t}\n\tmaps := map[rune]rune{\n\t\t'{': '}',\n\t\t'[': ']',\n\t\t'(': ')',\n\t}\n\tstack := []rune{}\n\tfor _, char := range s {\n\t\tif char == '{' || char == '[' || char == '(' {\n\t\t\tstack = append(stack, char)\n\t\t} else {\n\t\t\tif len(stack) > 0 && maps[stack[len(stack)-1]] == char {\n\t\t\t\tstack = stack[:len(stack)-1]\n\t\t\t} else {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\treturn len(stack) == 0\n}", "title": "" }, { "docid": "9a9c507374cad763197dfab0f79ad07e", "score": "0.5510765", "text": "func (p *Parser) ParseBracket() (err error) {\n\ttoken := p.scnr.Peak()\n\tif !token.IsBracket() {\n\t\treturn errors.New(\"called ParseBracket without the beginning token being a bracket start\")\n\t}\n\n\ttoken.Data = trimBracketPrefix(token.Data)\n\n\tif token.IsBracket() {\n\t\ttoken.Data = trimBracketSuffix(token.Data)\n\t}\n\n\tp.scnr.SetToken(token.Data)\n\treturn nil\n}", "title": "" }, { "docid": "092070468152f9722c6c1a4a04f1f428", "score": "0.5456258", "text": "func isValid(s string) bool {\n\tn := len(s)\n\tif n % 2 == 1 {\n\t\treturn false\n\t}\n\n\tpairs := map[byte]byte {\n\t\t')':'(',\n\t\t']':'[',\n\t\t'}':'{',\n\t}\n\tstack := []byte{}\n\n\tfor i := 0; i < n; i++{\n\t\tif pair, ok := pairs[s[i]]; ok {\n\t\t\tif len(stack) == 0 || stack[len(stack)-1] != pair {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tstack = stack[:len(stack)-1]\n\t\t}else{\n\t\t\tstack = append(stack, s[i])\n\t\t}\n\t}\n\treturn len(stack) == 0\n\n\treturn true\n}", "title": "" }, { "docid": "795ebb72c73672079758f74484d7d981", "score": "0.5360919", "text": "func isValid(s string) bool {\n\tif len(s) == 0{\n\t\treturn true\n\t}\n\tif len(s)%2 == 1{\n\t\treturn false\n\t}\n\tif s[0]==')'||s[0]=='}'||s[0]==']'{\n\t\treturn false\n\t}\n\tvar st = \"\"\n\tfor i:=0;i<len(s);i++{\n\t\tif s[i]=='('||s[i]=='{'||s[i]=='['{\n\t\t\tst = st + string(s[i])\n\t\t\tcontinue\n\t\t}\n\t\tif s[i] == ']' && st[len(st)-1] != '['{\n\t\t\treturn false\n\t\t}\n\t\tif s[i] == ')' && st[len(st)-1] != '('{\n\t\t\treturn false\n\t\t}\n\t\tif s[i] == '}' && st[len(st)-1] != '{'{\n\t\t\treturn false\n\t\t}\n\t\tst = st[:len(st)-1]\n\t}\n\tif len(st) != 0{\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "2249405a4b7c5b8e07967f972ea1b68e", "score": "0.5337521", "text": "func TestLookupBrackets(t *testing.T) {\n\tq := \"InfoContent.Niinfo.IpAssignments[].IpAddress[0]\"\n\tvar received []string\n\tvar callback = func(inp reflect.Value) {\n\t\tt.Log(inp)\n\t\treceived = append(received, inp.String())\n\t}\n\tutils.LookupWithCallback(infoTest, q, callback)\n\tif len(received) != 2 {\n\t\tt.Errorf(\"expected %d values, received %d\", 2, len(received))\n\t}\n\tif !checkInSlice(ipAddress1, received) {\n\t\tt.Errorf(\"not %s in %s\", ipAddress1, received)\n\t}\n\tif !checkInSlice(ipAddress3, received) {\n\t\tt.Errorf(\"not %s in %s\", ipAddress3, received)\n\t}\n}", "title": "" }, { "docid": "bfe4f0a25edad59c3fac4465499ac489", "score": "0.5258954", "text": "func TestLookupBracket(t *testing.T) {\n\tq := \"InfoContent.Niinfo.IpAssignments[].MacAddress\"\n\tvar received []string\n\tvar callback = func(inp reflect.Value) {\n\t\tt.Log(inp)\n\t\tswitch inp.String() {\n\t\tcase macAddress1:\n\t\t\treceived = append(received, inp.String())\n\t\tcase macAddress2:\n\t\t\treceived = append(received, inp.String())\n\t\tdefault:\n\t\t\tt.Errorf(\"not expected value %s\", inp.String())\n\t\t}\n\t}\n\tutils.LookupWithCallback(infoTest, q, callback)\n\tif len(received) != 2 {\n\t\tt.Errorf(\"expected %d values, received %d\", 2, len(received))\n\t}\n\tif !checkInSlice(macAddress1, received) {\n\t\tt.Errorf(\"not %s in %s\", macAddress1, received)\n\t}\n\tif !checkInSlice(macAddress2, received) {\n\t\tt.Errorf(\"not %s in %s\", macAddress2, received)\n\t}\n}", "title": "" }, { "docid": "64a1c21d6ca490afbea47920d6dc26ca", "score": "0.5254185", "text": "func MultiBracket(f Func1D, left, right float64) ([][]float64, error) {\n\treturn bracketHelper(f, left, right, InitialBracketNumber, -1)\n}", "title": "" }, { "docid": "a9bbd416616a79014da6629f7578f23b", "score": "0.5253503", "text": "func isBalanced(s string) string {\n\tstack := []rune{}\n\n\tfor _, char := range s {\n\t\tswitch char {\n\t\tcase '{', '[', '(':\n\t\t\tstack = append(stack, char) // Push\n\t\tcase '}':\n\t\t\tif len(stack) == 0 || stack[len(stack)-1] != '{' {\n\t\t\t\treturn \"NO\"\n\t\t\t}\n\t\t\tstack = stack[:len(stack)-1] // Pop\n\t\tcase ']':\n\t\t\tif len(stack) == 0 || stack[len(stack)-1] != '[' {\n\t\t\t\treturn \"NO\"\n\t\t\t}\n\t\t\tstack = stack[:len(stack)-1] // Pop\n\t\tcase ')':\n\t\t\tif len(stack) == 0 || stack[len(stack)-1] != '(' {\n\t\t\t\treturn \"NO\"\n\t\t\t}\n\t\t\tstack = stack[:len(stack)-1] // Pop\n\t\t}\n\t}\n\tif len(stack) > 0 {\n\t\treturn \"NO\"\n\t}\n\treturn \"YES\"\n}", "title": "" }, { "docid": "4e103f395796cb6847cedca5143d2fa9", "score": "0.52145576", "text": "func isBalanced(s string) string {\n\tstackObj := &Stack{Tos: -1}\n\n\tfor i := 0; i < len(s); i++ {\n\t\tif string(s[i]) == \"(\" || string(s[i]) == \"[\" || string(s[i]) == \"{\" {\n\t\t\tstackObj.Push(string(s[i]))\n\t\t} else if string(s[i]) == \")\" || string(s[i]) == \"]\" || string(s[i]) == \"}\" {\n\n\t\t\tif string(s[i]) == \")\" && stackObj.Peek() == \"(\" {\n\t\t\t\terr := stackObj.Pop()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"NO\"\n\t\t\t\t}\n\t\t\t} else if string(s[i]) == \"]\" && stackObj.Peek() == \"[\" {\n\t\t\t\terr := stackObj.Pop()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"NO\"\n\t\t\t\t}\n\t\t\t} else if string(s[i]) == \"}\" && stackObj.Peek() == \"{\" {\n\t\t\t\terr := stackObj.Pop()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"NO\"\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treturn \"NO\"\n\t\t\t}\n\n\t\t}\n\t}\n\tif stackObj.Tos == -1 {\n\t\treturn \"YES\"\n\t}\n\treturn \"NO\"\n}", "title": "" }, { "docid": "a9b016fe344fe512c5ff822af3f04e98", "score": "0.519412", "text": "func checkValidString(s string) bool {\n\tleft, right := 0, 0\n\n\tfor i := 0; i < len(s); i++ {\n\t\tif s[i] == '(' || s[i] == '*' {\n\t\t\tleft++\n\n\t\t} else {\n\t\t\tleft--\n\n\t\t}\n\n\t\tif left < 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\tfor i := len(s) - 1; i >= 0; i-- {\n\t\tif s[i] == ')' || s[i] == '*' {\n\t\t\tright++\n\n\t\t} else {\n\t\t\tright--\n\n\t\t}\n\n\t\tif right < 0 {\n\t\t\treturn false\n\t\t}\n\n\t}\n\n\treturn true\n\n}", "title": "" }, { "docid": "5c04a0a4928fc641c8be26f93988dbdb", "score": "0.51683074", "text": "func TestLookupBracketsEnds(t *testing.T) {\n\tq := \"InfoContent.Niinfo.IpAssignments[].IpAddress[]\"\n\tvar received []string\n\tvar callback = func(inp reflect.Value) {\n\t\tt.Log(inp)\n\t\treceived = append(received, inp.String())\n\t}\n\tutils.LookupWithCallback(infoTest, q, callback)\n\tif len(received) != 4 {\n\t\tt.Errorf(\"expected %d values, received %d\", 4, len(received))\n\t}\n\tif !checkInSlice(ipAddress1, received) {\n\t\tt.Errorf(\"not %s in %s\", ipAddress1, received)\n\t}\n\tif !checkInSlice(ipAddress2, received) {\n\t\tt.Errorf(\"not %s in %s\", ipAddress2, received)\n\t}\n\tif !checkInSlice(ipAddress3, received) {\n\t\tt.Errorf(\"not %s in %s\", ipAddress3, received)\n\t}\n\tif !checkInSlice(ipAddress4, received) {\n\t\tt.Errorf(\"not %s in %s\", ipAddress4, received)\n\t}\n}", "title": "" }, { "docid": "2d4c9f0045dd21c20da4c920f443ca0a", "score": "0.51286286", "text": "func (s *BaseSearchParserListener) EnterBracketQueryCriterias(ctx *BracketQueryCriteriasContext) {}", "title": "" }, { "docid": "84feaa41fdaacbf867434a7ad9206a7a", "score": "0.5111243", "text": "func isCloseCurly(r rune) bool {\n\treturn r == '}'\n}", "title": "" }, { "docid": "71f07a1af8caa0f4fd2fb9af2ac8d903", "score": "0.5110577", "text": "func bracketHelper(f Func1D, left, right float64, bracketNum uint, maxBrackets int) ([][]float64, error) {\n\tif left == right {\n\t\treturn nil, errors.New(\"bracket error: must give two distinct points to find bracket\")\n\t}\n\tif left > right {\n\t\tleft, right = right, left\n\t}\n\txs := MakeRange(left, right, bracketNum)\n\tscale := xs[1] - xs[0]\n\tbrackets := [][]float64{}\n\tfor i, _ := range xs {\n\t\t// only get as many brackets as requested\n\t\tif maxBrackets > 0 && len(brackets) >= maxBrackets {\n\t\t\treturn brackets, nil\n\t\t}\n\t\t// don't check [endpoint, endpoint+scale] bracket\n\t\tif i >= len(xs)-1 {\n\t\t\tbreak\n\t\t}\n\t\t// check function values\n\t\tfa, fb := f(xs[i]), f(xs[i+1])\n\t\tif FuzzyEqual(fb, 0.0) {\n\t\t\tbrackets = append(brackets, []float64{xs[i], xs[i+1] + scale})\n\t\t} else if FuzzyEqual(fa, 0.0) {\n\t\t\tbrackets = append(brackets, []float64{xs[i] - scale, xs[i+1]})\n\t\t}\n\t\tif !sameSign(fa, fb) {\n\t\t\tbrackets = append(brackets, []float64{xs[i], xs[i+1]})\n\t\t}\n\t}\n\t// overshot bounds if without finding bracket if we get here\n\tif bracketNum >= MaxBracketNumber {\n\t\t// too many divisions\n\t\treturn nil, errors.New(ErrorNoBracket)\n\t}\n\t// not enough brackets - try again with smaller divisions\n\tif len(brackets) == 0 {\n\t\treturn bracketHelper(f, left, right, bracketNum*2, maxBrackets)\n\t}\n\treturn brackets, nil\n}", "title": "" }, { "docid": "c142af32beb07c128af88e5a8fee4675", "score": "0.50881165", "text": "func MatchParensSurrounded() MatcherFunc {\n\treturn MatchSurrounded(\"(\", \")\")\n}", "title": "" }, { "docid": "c142af32beb07c128af88e5a8fee4675", "score": "0.50881165", "text": "func MatchParensSurrounded() MatcherFunc {\n\treturn MatchSurrounded(\"(\", \")\")\n}", "title": "" }, { "docid": "69300001782000140ab108913a933eec", "score": "0.50722784", "text": "func TestRemoveOuterBraces(t *testing.T) {\n\ttestCases := []struct {\n\t\tinput string\n\t\texpectedOutput string\n\t}{\n\t\t{\"{hello,world}\", \"hello,world\"},\n\t\t{\"hello,world}\", \"hello,world}\"},\n\t\t{\"hello,{world}\", \"hello,{world}\"},\n\t}\n\n\tfor _, testCase := range testCases {\n\t\toutput := RemoveOuterBraces(testCase.input)\n\t\tif output != testCase.expectedOutput {\n\t\t\tt.Errorf(\"expected %s, got: %s\", testCase.expectedOutput, output)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ccecbed2e853ade1cb8dd2121a52f712", "score": "0.5051686", "text": "func validSignature(sign string) bool {\n\t//No Repeats allowed\n\tfor _, signature := range currentSignatures {\n\t\tif sign == signature.Name {\n\t\t\treturn false\n\t\t}\n\t}\n\n\t//No Blanks allowed\n\treturn sign != \"\"\n}", "title": "" }, { "docid": "90d2adbbc44b51ad59a47c5268d385ca", "score": "0.5048923", "text": "func TestValidString() {\n\tfmt.Printf(\"Testing for Valid Parenthesis String: %t %t \\n\",\n\t\tcheckValidString(\")\") == false, checkValidString(\"(*))\") == true)\n}", "title": "" }, { "docid": "56c7be306b9856ab17e3744d1158e033", "score": "0.50454193", "text": "func (s *BaseSparqlListener) EnterBrackettedExpression(ctx *BrackettedExpressionContext) {}", "title": "" }, { "docid": "8db8f60867bf76fa2ae93304703a4e72", "score": "0.503567", "text": "func IsValid(s string) bool {\n\tif s == \"\" {\n\t\treturn true\n\t}\n\tarr := strings.Split(s, \"\")\n\tif len(arr)%2 != 0 {\n\t\treturn false\n\t}\n\tlastOpenBraces = []string{}\n\tfor _, b := range arr {\n\t\tif !isAllowedSymbol(b) || !isCorrectBrace(b) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn len(lastOpenBraces) == 0\n}", "title": "" }, { "docid": "786fe9b8e4f8d2ab2ab645350784836a", "score": "0.50045764", "text": "func isOpenSqrt(r rune) bool {\n\treturn r == '['\n}", "title": "" }, { "docid": "8bf0d364c182ae32244c5f2a24c1be6c", "score": "0.50000143", "text": "func (m *Memory) removeInvalidParenthesis(input string, start int, lastRemoveIndex int, par []byte){\n\n\tfor stack, i := 0, start; i < len(input); i += 1{\n\t\t//increment the stack if open bracket\n\t\tif input[i] == par[0] {\n\t\t\tstack += 1\n\t\t}\n\t\t//decrement the stack if close bracket\n\t\tif input[i] == par[1]{\n\t\t\tstack -= 1\n\t\t}\n\t\t//if stack is positive, which means parenthesis is valid, so continue with the next element\n\t\t//It will take in other elements which are not brackets\n\t\tif stack >= 0{\n\t\t\tcontinue\n\t\t}\n\t\t//Else we need to remove the bracket and do recursive operation\n\t\t/*\n\t\tTo make the prefix valid, we need to remove a ‘)’. The problem is: which one? The answer is any one in the prefix.\n\t\tHowever, if we remove any one, we will generate duplicate results, for example: s = ()), we can remove s[1] or s[2]\n\t\tbut the result is the same (). Thus, we restrict oneself to remove the first ) in a series of consecutive )s.\n\n\t\t*/\n\t\tfor j := lastRemoveIndex; j <= i; j += 1{\n\t\t\tif input[j] == par[1] && (j == lastRemoveIndex || input[j-1] != par[1]){\n\t\t\t\t/*\n\t\t\t\tAfter the removal, the prefix is then valid. We then call the function recursively to solve the rest of the string.\n\t\t\t\tHowever, we need to keep another information: the last removal position. If we do not have this position,\n\t\t\t\twe will generate duplicate by removing two ‘)’ in two steps only with a different order.\n\t\t\t\tFor this, we keep tracking the last removal position and only remove ‘)’ after that.\n\t\t\t\t*/\n\t\t\t\tm.removeInvalidParenthesis(input[:j] + input[j+1:], i, j, par)\n\t\t\t}\n\t\t}\n\t\t/*\n\t\tDon't underestimate this return. It's very important\n\t\tif inside the outer loop, it reaches the above inner loop. You have scanned the str_to_check up to count_i\n\t\tIn the above inner loop, when construct the new_str_to_check, we include the rest chars after count_i\n\t\tand call remove with it.\n\t\tSo after the above inner loop finishes, we shouldn't allow the outer loop continue to next round because self.remove in the\n\t\tinner loop has taken care of the rest chars after count_i\n\t\t*/\n\t\treturn\n\t}\n\n\t/*\n\tNow one may ask. What about ‘(‘? What if s = ‘(()(()’ in which we need to remove ‘(‘?\n\tThe answer is: do the same from right to left.\n\tHowever, a cleverer idea is: reverse the string and reuse the code!\n\t*/\n\tinput = reverse(input)\n\n\tif par[0] == '('{\n\t\tm.removeInvalidParenthesis(input, 0, 0, []byte{')', '('})\n\t} else {\n\t\tm.list = append(m.list, input)\n\t}\n}", "title": "" }, { "docid": "20b0d5fa24fc794f3e4bcdbe793badb1", "score": "0.49861497", "text": "func indexOfFirstRightBracket(key string) int {\n\treturn (rightBracketRegExp.FindStringIndex(key)[1] - 1)\n}", "title": "" }, { "docid": "26a3e5074217c51d8ac2869c160a5463", "score": "0.4984282", "text": "func hasAz(tags []string) (string, bool) {\n\tfor _, t := range tags {\n\t\tmatch, _ := regexp.MatchString(\"us-(east|west)-[1|2][a|b|c]$\", t)\n\t\tif match {\n\t\t\treturn t, true\n\t\t}\n\t}\n\n\treturn \"\", false\n}", "title": "" }, { "docid": "31ac2d76d8b0111556a2e9e6f75727a8", "score": "0.49840203", "text": "func IsWellFormed(s string) bool {\n\tstack := new(RuneStack)\n\tfor _, c := range s {\n\t\tswitch p, ok := parenthesis[c]; {\n\t\tcase ok:\n\t\t\tstack.Push(p)\n\t\tcase stack.Len() == 0 || c != stack.Pop().(rune):\n\t\t\treturn false\n\t\t}\n\t}\n\tif stack.Len() != 0 {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "621aa4c8bb4a4f99205b54adbaec85d7", "score": "0.49769145", "text": "func getSubstringInsideParentheses(s string) string {\n\ti := strings.Index(s, \"(\")\n\tif i >= 0 {\n\t\tj := strings.Index(s[i:], \")\")\n\t\tif j >= 0 {\n\t\t\treturn s[i+1 : j+i]\n\t\t}\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "8628c0e8132cbb430183d0e3b01d87c5", "score": "0.49716616", "text": "func startLoop() {\n\tif memory[memoryPointer] == 0 {\n\t\toldAddress := cursor\n\t\tbrackets := 0\n\t\tcursor++\n\t\tfor ; cursor < len(commands); cursor++ {\n\t\t\tswitch commands[cursor] {\n\t\t\tcase '[':\n\t\t\t\tbrackets++\n\t\t\tcase ']':\n\t\t\t\tif brackets == 0 {\n\t\t\t\t\treturn\n\t\t\t\t} else {\n\t\t\t\t\tbrackets--\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tpanic(\"missing closing bracket matching to bracket on position \" + strconv.Itoa(oldAddress))\n\t}\n}", "title": "" }, { "docid": "1cc6c0af30ebe57687f676e1b400bf7f", "score": "0.49641415", "text": "func substituteBrackets(s string) string {\n\tstr := strings.Replace(s, \"[\", \"\", -1)\n\treturn strings.Replace(str, \"]\", \"\", -1)\n}", "title": "" }, { "docid": "16856215d549ed36ea5b0cd4ef7166ac", "score": "0.4942209", "text": "func (m *matcher) Matches(in []byte) [][]byte {\n\tif len(in) == 0 {\n\t\treturn nil\n\t}\n\tif len(m.e) == 0 {\n\t\treturn nil\n\t}\n\tcaptures := m.captures[:0]\n\texpr := m.e\n\tif ls, ok := expr[0].(literals); ok {\n\t\ti := bytes.Index(in, ls)\n\t\tif i != 0 {\n\t\t\treturn nil\n\t\t}\n\t\tin = in[len(ls):]\n\t\texpr = expr[1:]\n\t}\n\tif len(expr) == 0 {\n\t\treturn nil\n\t}\n\t// from now we have capture - literals - capture ... (literals)?\n\tfor len(expr) != 0 {\n\t\tif len(expr) == 1 { // we're ending on a capture.\n\t\t\tif !(expr[0].(capture)).isUnamed() {\n\t\t\t\tcaptures = append(captures, in)\n\t\t\t}\n\t\t\treturn captures\n\t\t}\n\t\tcap := expr[0].(capture)\n\t\tls := expr[1].(literals)\n\t\texpr = expr[2:]\n\t\ti := bytes.Index(in, ls)\n\t\tif i == -1 {\n\t\t\t// if a capture is missed we return up to the end as the capture.\n\t\t\tif !cap.isUnamed() {\n\t\t\t\tcaptures = append(captures, in)\n\t\t\t}\n\t\t\treturn captures\n\t\t}\n\n\t\tif cap.isUnamed() {\n\t\t\tin = in[len(ls)+i:]\n\t\t\tcontinue\n\t\t}\n\t\tcaptures = append(captures, in[:i])\n\t\tin = in[len(ls)+i:]\n\t}\n\n\treturn captures\n}", "title": "" }, { "docid": "56895c934946001f664c40a88e7b3015", "score": "0.49352413", "text": "func validElement(element *string) bool {\n\tif len(*element) == 0 {\n\t\treturn false\n\t}\n\tif len(*element) >= 2 {\n\t\tif (*element)[len(*element)-2:] == \"[]\" {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "932b7c1071ce4b6557c51ef72da952a8", "score": "0.49308616", "text": "func (p *parser) acceptExpr8() (expr, *node.ErrorNode) {\n\tleft, err := p.acceptExpr9()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor {\n\t\tif p.accept(tokenSqOpen) {\n\t\t\tnpos := p.token.pos\n\t\t\tp.acceptBlanks()\n\t\t\tif p.accept(tokenColon) {\n\t\t\t\tn := &sliceNode{left, []expr{nil, nil}}\n\t\t\t\tp.acceptBlanks()\n\t\t\t\tif p.peek().typ != tokenSqClose {\n\t\t\t\t\texpr, err := p.acceptExpr1()\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\t\t\t\t\tn.rlist[1] = expr\n\t\t\t\t\tp.acceptBlanks()\n\t\t\t\t}\n\t\t\t\tif !p.accept(tokenSqClose) {\n\t\t\t\t\treturn nil, p.errorf(\n\t\t\t\t\t\t\"expected %s but got %s\",\n\t\t\t\t\t\ttokenName(tokenSqClose),\n\t\t\t\t\t\ttokenName(p.peek().typ),\n\t\t\t\t\t)\n\t\t\t\t}\n\t\t\t\tleft = node.NewPosNode(npos, n)\n\t\t\t} else {\n\t\t\t\tright, err := p.acceptExpr1()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tp.acceptBlanks()\n\t\t\t\tif p.accept(tokenColon) {\n\t\t\t\t\tn := &sliceNode{left, []expr{right, nil}}\n\t\t\t\t\tp.acceptBlanks()\n\t\t\t\t\tif p.peek().typ != tokenSqClose {\n\t\t\t\t\t\texpr, err := p.acceptExpr1()\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t\t}\n\t\t\t\t\t\tn.rlist[1] = expr\n\t\t\t\t\t\tp.acceptBlanks()\n\t\t\t\t\t}\n\t\t\t\t\tif !p.accept(tokenSqClose) {\n\t\t\t\t\t\treturn nil, p.errorf(\n\t\t\t\t\t\t\t\"expected %s but got %s\",\n\t\t\t\t\t\t\ttokenName(tokenSqClose),\n\t\t\t\t\t\t\ttokenName(p.peek().typ),\n\t\t\t\t\t\t)\n\t\t\t\t\t}\n\t\t\t\t\tleft = node.NewPosNode(npos, n)\n\t\t\t\t} else {\n\t\t\t\t\tn := &subscriptNode{left, right}\n\t\t\t\t\tp.acceptBlanks()\n\t\t\t\t\tif !p.accept(tokenSqClose) {\n\t\t\t\t\t\treturn nil, p.errorf(\n\t\t\t\t\t\t\t\"expected %s but got %s\",\n\t\t\t\t\t\t\ttokenName(tokenSqClose),\n\t\t\t\t\t\t\ttokenName(p.peek().typ),\n\t\t\t\t\t\t)\n\t\t\t\t\t}\n\t\t\t\t\tleft = node.NewPosNode(npos, n)\n\t\t\t\t}\n\t\t\t}\n\t\t} else if p.accept(tokenPOpen) {\n\t\t\tpos := p.token.pos\n\t\t\tn := &callNode{left, make([]expr, 0, 8)}\n\t\t\tp.acceptBlanks()\n\t\t\tif !p.accept(tokenPClose) {\n\t\t\t\tfor {\n\t\t\t\t\targ, err := p.acceptExpr1()\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\t\t\t\t\tn.rlist = append(n.rlist, arg)\n\t\t\t\t\tp.acceptBlanks()\n\t\t\t\t\tif p.accept(tokenComma) {\n\t\t\t\t\t\tp.acceptBlanks()\n\t\t\t\t\t\tif p.accept(tokenPClose) {\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if p.accept(tokenPClose) {\n\t\t\t\t\t\tbreak\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn nil, p.errorf(\n\t\t\t\t\t\t\t\"expected %s or %s but got %s\",\n\t\t\t\t\t\t\ttokenName(tokenComma),\n\t\t\t\t\t\t\ttokenName(tokenPClose),\n\t\t\t\t\t\t\ttokenName(p.peek().typ),\n\t\t\t\t\t\t)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tleft = node.NewPosNode(pos, n)\n\t\t} else if p.accept(tokenDot) {\n\t\t\tdot := p.token\n\t\t\tp.acceptBlanks()\n\t\t\tif !p.acceptIdentifierLike() {\n\t\t\t\treturn nil, p.errorf(\n\t\t\t\t\t\"expected %s but got %s\",\n\t\t\t\t\ttokenName(tokenIdentifier),\n\t\t\t\t\ttokenName(p.peek().typ),\n\t\t\t\t)\n\t\t\t}\n\t\t\tright := node.NewPosNode(p.token.pos, &identifierNode{p.token.val, false})\n\t\t\tleft = node.NewPosNode(dot.pos, &dotNode{left, right})\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn left, nil\n}", "title": "" }, { "docid": "69bedf0d47d94b8bbf4a38275f0d6fb6", "score": "0.4920051", "text": "func TestHasEllipses(t *testing.T) {\n\ttestCases := []struct {\n\t\targs []string\n\t\texpectedOk bool\n\t}{\n\t\t// Tests for all args without ellipses.\n\t\t{\n\t\t\t[]string{\"64\"},\n\t\t\tfalse,\n\t\t},\n\t\t// Found flower braces, still attempt to parse and throw an error.\n\t\t{\n\t\t\t[]string{\"{1..64}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"{1..2..}\"},\n\t\t\ttrue,\n\t\t},\n\t\t// Test for valid input.\n\t\t{\n\t\t\t[]string{\"1...64\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"{1...2O}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"...\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"{-1...1}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"{0...-1}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"{1....4}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"{1...64}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"{...}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"{1...64}\", \"{65...128}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\"http://minio{2...3}/export/set{1...64}\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\n\t\t\t\t\"http://minio{2...3}/export/set{1...64}\",\n\t\t\t\t\"http://minio{2...3}/export/set{65...128}\",\n\t\t\t},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t[]string{\n\t\t\t\t\"mydisk-{a...z}{1...20}\",\n\t\t\t},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\n\t\t\t[]string{\n\t\t\t\t\"mydisk-{1...4}{1..2.}\",\n\t\t\t},\n\t\t\ttrue,\n\t\t},\n\t}\n\n\tfor i, testCase := range testCases {\n\t\tt.Run(fmt.Sprintf(\"Test%d\", i+1), func(t *testing.T) {\n\t\t\tgotOk := HasEllipses(testCase.args...)\n\t\t\tif gotOk != testCase.expectedOk {\n\t\t\t\tt.Errorf(\"Expected %t, got %t\", testCase.expectedOk, gotOk)\n\t\t\t}\n\t\t})\n\t}\n}", "title": "" }, { "docid": "bf57fa1066cfde1752b020aeac9f1e20", "score": "0.49024016", "text": "func findFirstStringInBracket(word string) string {\n\tlength := len(word)\n\tif length > 0 {\n\t\topenBracketIndex := strings.LastIndex(word, \"(\")\n\t\tclosingBrackerIndex := strings.Index(word, \")\")\n\t\tif openBracketIndex >= 0 && closingBrackerIndex >= 0 {\n\t\t\t// return the string between '(' and ')'\n\t\t\treturn string(word[openBracketIndex+1 : closingBrackerIndex])\n\t\t}\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "86fd3e4d729fb59767bd4dbd40476e7e", "score": "0.4866671", "text": "func MatchSurrounded(opening string, closure string) MatcherFunc {\n\treturn func(str string) Match {\n\t\tmetaEscapedOpening := regexp.QuoteMeta(opening)\n\t\tmetaEscapedClosure := regexp.QuoteMeta(closure)\n\t\tmatchPattern := fmt.Sprintf(\"%s[^%s]*%s\", metaEscapedOpening, metaEscapedOpening, metaEscapedClosure)\n\t\tr, _ := regexp.Compile(matchPattern)\n\t\treturn MatchRegexp(r)(str)\n\t}\n}", "title": "" }, { "docid": "3a60bfdfaeb1f31e6fb2ee97c2c6f313", "score": "0.48565727", "text": "func checkOperatorValid() token.Token {\n\toperator := \"\"\n\tvar tokens token.Token\n\n\tif isBound() && strings.Contains(operatorChar, peekChar()) {\n\t\toperator += eatChar()\n\t}\n\n\tif len(operator) > 0 {\n\t\ttokens = token.Token{ token.OPERATOR, operator }\n\t\treturn tokens\n\t}\n\n\treturn token.Token{ token.UNKNOWN, operator }\n}", "title": "" }, { "docid": "34f8026a77992a675ee6fdb6793d2a16", "score": "0.48544323", "text": "func Parens(match Parser) Parser { return Lexeme(Between(Symbol(\"(\"), Symbol(\")\"), match)) }", "title": "" }, { "docid": "dcffe223df873c93793f03aa257e72d6", "score": "0.47898614", "text": "func unescapeBrackets(str string) string {\n\t// DONE in this Go implementation: make \\] a regex\n\tif str == \"\" {\n\t\treturn str\n\t}\n\tstr = regexps.EscapedBracketRx.ReplaceAllString(str, \"]\")\n\treturn str\n}", "title": "" }, { "docid": "b29b365d00f169755c6ec3dff57fdd0f", "score": "0.47844505", "text": "func isCloseSqrt(r rune) bool {\n\treturn r == ']'\n}", "title": "" }, { "docid": "30f1a14479ed632f9171b6d50b4798e9", "score": "0.4781543", "text": "func ExpNoBracket(expression string) string {\n\tvar result string = \"0\"\n\treg := regexp.MustCompile(`^[+\\-*/\\d\\s.]+$`)\n\tif reg.MatchString(expression) {\n\t\tmulDivReg := regexp.MustCompile(`[\\d.]+[*/]+[\\d.]`)\n\t\tmulDivSignReg := regexp.MustCompile(`\\*|\\/`)\n\t\tcheckReg := regexp.MustCompile(`^[\\d.]+$`)\n\n\t\tfor {\n\t\t\tif !mulDivReg.MatchString(expression) {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tif checkReg.MatchString(expression) {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tfor _, frag := range mulDivReg.FindAllString(expression, -1) {\n\t\t\t\tfragQue := mulDivSignReg.Split(frag, -1)\n\t\t\t\tfrag1, frag2 := fragQue[0], fragQue[1]\n\t\t\t\tfragV1, _ := strconv.ParseFloat(frag1, 64)\n\t\t\t\tfragV2, _ := strconv.ParseFloat(frag2, 64)\n\n\t\t\t\trpl := \"\"\n\t\t\t\tif strings.Index(frag, \"/\") > -1 {\n\t\t\t\t\trpl = fmt.Sprintf(\"%v\", fragV1/fragV2)\n\t\t\t\t} else if strings.Index(frag, \"*\") > -1 {\n\t\t\t\t\trpl = fmt.Sprintf(\"%v\", fragV1*fragV2)\n\t\t\t\t}\n\n\t\t\t\texpression = strings.ReplaceAll(expression, frag, rpl)\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn result\n}", "title": "" }, { "docid": "202eee9fadb907dc3810395216b422c4", "score": "0.47805664", "text": "func isSubsequence(base, word string) bool {\n\tvar cursor int\n\tfor _, c := range word {\n\t\tvar matched bool\n\t\tfor i := cursor; i < len(base); i++ {\n\t\t\tif byte(c) == base[i] {\n\t\t\t\tcursor = i + 1\n\t\t\t\tmatched = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !matched {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "1c76c1c856588e3f0da0635436361cce", "score": "0.47740895", "text": "func (c Config) parseTokens(tokens []Token) (ASTNode, Error) { // nolint: gocyclo\n\t// a stack of control tag state, for matching nested {%if}{%endif%} etc.\n\ttype frame struct {\n\t\tsyntax BlockSyntax\n\t\tnode *ASTBlock\n\t\tap *[]ASTNode\n\t}\n\tvar (\n\t\tg = c.Grammar\n\t\troot = &ASTSeq{} // root of AST; will be returned\n\t\tap = &root.Children // newly-constructed nodes are appended here\n\t\tsd BlockSyntax // current block syntax definition\n\t\tbn *ASTBlock // current block node\n\t\tstack []frame // stack of blocks\n\t\trawTag *ASTRaw // current raw tag\n\t\tinComment = false\n\t\tinRaw = false\n\t)\n\tfor _, tok := range tokens {\n\t\tswitch {\n\t\t// The parser needs to know about comment and raw, because tags inside\n\t\t// needn't match each other e.g. {%comment%}{%if%}{%endcomment%}\n\t\t// TODO is this true?\n\t\tcase inComment:\n\t\t\tif tok.Type == TagTokenType && tok.Name == \"endcomment\" {\n\t\t\t\tinComment = false\n\t\t\t}\n\t\tcase inRaw:\n\t\t\tif tok.Type == TagTokenType && tok.Name == \"endraw\" {\n\t\t\t\tinRaw = false\n\t\t\t} else {\n\t\t\t\trawTag.Slices = append(rawTag.Slices, tok.Source)\n\t\t\t}\n\t\tcase tok.Type == ObjTokenType:\n\t\t\texpr, err := expressions.Parse(tok.Args)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, WrapError(err, tok)\n\t\t\t}\n\t\t\t*ap = append(*ap, &ASTObject{tok, expr})\n\t\tcase tok.Type == TextTokenType:\n\t\t\t*ap = append(*ap, &ASTText{Token: tok})\n\t\tcase tok.Type == TagTokenType:\n\t\t\tif cs, ok := g.BlockSyntax(tok.Name); ok {\n\t\t\t\tswitch {\n\t\t\t\tcase tok.Name == \"comment\":\n\t\t\t\t\tinComment = true\n\t\t\t\tcase tok.Name == \"raw\":\n\t\t\t\t\tinRaw = true\n\t\t\t\t\trawTag = &ASTRaw{}\n\t\t\t\t\t*ap = append(*ap, rawTag)\n\t\t\t\tcase cs.RequiresParent() && (sd == nil || !cs.CanHaveParent(sd)):\n\t\t\t\t\tsuffix := \"\"\n\t\t\t\t\tif sd != nil {\n\t\t\t\t\t\tsuffix = \"; immediate parent is \" + sd.TagName()\n\t\t\t\t\t}\n\t\t\t\t\treturn nil, Errorf(tok, \"%s not inside %s%s\", tok.Name, strings.Join(cs.ParentTags(), \" or \"), suffix)\n\t\t\t\tcase cs.IsBlockStart():\n\t\t\t\t\tpush := func() {\n\t\t\t\t\t\tstack = append(stack, frame{syntax: sd, node: bn, ap: ap})\n\t\t\t\t\t\tsd, bn = cs, &ASTBlock{Token: tok, syntax: cs}\n\t\t\t\t\t\t*ap = append(*ap, bn)\n\t\t\t\t\t}\n\t\t\t\t\tpush()\n\t\t\t\t\tap = &bn.Body\n\t\t\t\tcase cs.IsClause():\n\t\t\t\t\tn := &ASTBlock{Token: tok, syntax: cs}\n\t\t\t\t\tbn.Clauses = append(bn.Clauses, n)\n\t\t\t\t\tap = &n.Body\n\t\t\t\tcase cs.IsBlockEnd():\n\t\t\t\t\tpop := func() {\n\t\t\t\t\t\tf := stack[len(stack)-1]\n\t\t\t\t\t\tstack = stack[:len(stack)-1]\n\t\t\t\t\t\tsd, bn, ap = f.syntax, f.node, f.ap\n\t\t\t\t\t}\n\t\t\t\t\tpop()\n\t\t\t\tdefault:\n\t\t\t\t\tpanic(fmt.Errorf(\"block type %q\", tok.Name))\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t*ap = append(*ap, &ASTTag{tok})\n\t\t\t}\n\t\t}\n\t}\n\tif bn != nil {\n\t\treturn nil, Errorf(bn, \"unterminated %q block\", bn.Name)\n\t}\n\treturn root, nil\n}", "title": "" }, { "docid": "a11bce6eb3b04a31685942098ff2d586", "score": "0.4769116", "text": "func IsParenExpr(node ast.Node) bool {\n\t_, ok := node.(*ast.ParenExpr)\n\treturn ok\n}", "title": "" }, { "docid": "5f04e164477877c7676606bb22b983f7", "score": "0.4768545", "text": "func TestPush(t *testing.T) {\n\ttestCases := []int{\n\t\t10,\n\t\t100,\n\t\t1000,\n\t\t10000,\n\t}\n\n\tfor i, numElem := range testCases {\n\t\tstack := BraceStack{}\n\t\tfor i := 0; i < numElem; i++ {\n\t\t\tstack.Push(1)\n\t\t}\n\t\tif len(stack.braces) != numElem {\n\t\t\tt.Errorf(\"test %d: stack size \\\"%d\\\" != expected size \\\"%d\\\"\", i, len(stack.braces), numElem)\n\t\t\tt.Fail()\n\t\t}\n\t}\n}", "title": "" }, { "docid": "484ffe52a3ff6653ab856251edd63194", "score": "0.474109", "text": "func (this *tokenIterator) skip_to_bracket_pair() bool {\n\tright := this.token().tok\n\tleft := g_bracket_pairs[right]\n\treturn this.skip_to_left_bracket(left, right)\n}", "title": "" }, { "docid": "940ff7f9bd34c63a9fe4808259538882", "score": "0.4740036", "text": "func (s *BaseSparqlListener) ExitBrackettedExpression(ctx *BrackettedExpressionContext) {}", "title": "" }, { "docid": "3dda98647b202183d06fd08ee0ad6868", "score": "0.4736296", "text": "func tryit(inputs, stack []rune) {\n\tif len(inputs) == 0 {\n\t\tvalidate(stack)\n\t\treturn\n\t}\n\n\tif inputs[0] == '*' {\n\t\t// try parentheses possibilities\n\t\tfor _, p := range possibilities {\n\t\t\tstack = append(stack, p)\n\t\t\ttryit(inputs[1:], stack)\n\t\t\tstack = stack[:len(stack)-1]\n\t\t}\n\t\t// try empty string\n\t\ttryit(inputs[1:], stack)\n\t\treturn\n\t}\n\tstack = append(stack, inputs[0])\n\ttryit(inputs[1:], stack)\n\tstack = stack[:len(stack)-1]\n}", "title": "" }, { "docid": "acdc96070836d0530a9af850d509a181", "score": "0.47302184", "text": "func stackContainsAll(spec string, count uintptr, stk []*profile.Location, labels map[string][]string) bool {\n\tfor _, f := range strings.Split(spec, \",\") {\n\t\tif !stackContains(f, count, stk, labels) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "331320d26616e103a983cbee9a89124a", "score": "0.4713739", "text": "func (lr LanguageRange) Compatible(tag string) bool {\n\t_, err := language.Parse(tag)\n\tif err != nil {\n\t\treturn false\n\t}\n\tif lr.IsWildcard() {\n\t\treturn true\n\t}\n\tm := language.NewMatcher([]language.Tag{language.Und, lr.tag})\n\t//_, i, _ := m.Match(t)\n\t_, i := language.MatchStrings(m, tag)\n\treturn i != 0\n}", "title": "" }, { "docid": "e64ef393acc34892e7490f3e7bd0492e", "score": "0.47042298", "text": "func isValid(s string) bool {\n\tif len(s)%2 == 1 {\n\t\treturn false\n\t}\n\n\tvar (\n\t\tstack = make([]byte, len(s)+1)\n\t\ttop = 1 // skip stack[0], for keep available when calculate 'stack[top-1]' below\n\t\tvalid = true\n\t)\n\n\tfor i := range s {\n\t\tif v, ok := m[s[i]]; ok { // left side char\n\t\t\tstack[top] = v\n\t\t\ttop++\n\t\t\tcontinue\n\t\t}\n\n\t\t// right side char, not matched\n\t\tif top == 0 || s[i] != stack[top-1] {\n\t\t\tvalid = false\n\t\t\tbreak\n\t\t}\n\n\t\ttop--\n\t}\n\n\treturn valid && top == 1\n}", "title": "" }, { "docid": "2e4839a5a97392da79e9a6093043586f", "score": "0.4703417", "text": "func TestLength(t *testing.T) {\n\ttestCases := []int{\n\t\t10,\n\t\t100,\n\t\t1000,\n\t\t10000,\n\t}\n\n\tfor i, numElem := range testCases {\n\t\tstack := BraceStack{}\n\t\tfor i := 0; i < numElem; i++ {\n\t\t\tstack.braces = append(stack.braces, 1)\n\t\t}\n\t\tif stack.Length() != numElem {\n\t\t\tt.Errorf(\"test %d: stack size \\\"%d\\\" != expected size \\\"%d\\\"\", i, len(stack.braces), numElem)\n\t\t\tt.Fail()\n\t\t}\n\t}\n}", "title": "" }, { "docid": "63137912def4a3325d5acd96ac63c558", "score": "0.4701917", "text": "func FindBracket(f Func1D, left, right float64) (float64, float64, error) {\n\tbracket, err := bracketHelper(f, left, right, InitialBracketNumber, 1)\n\tif err != nil {\n\t\treturn 0.0, 0.0, err\n\t}\n\tbl, br := bracket[0][0], bracket[0][1]\n\treturn bl, br, err\n}", "title": "" }, { "docid": "1d208902c4981e8408afed011baa86d1", "score": "0.4701589", "text": "func validateText(text string) bool {\n\tdepth := 0\n\tlength := len(text)\n\t// Since all valid strings must be within at least one level of parens, the\n\t// minimum lenght of a valid string is 2 \"()\"\n\tif length < 2 {\n\t\treturn false\n\t}\n\tfor i, char := range text {\n\t\tif char == ' ' {\n\t\t\treturn false\n\t\t} else if char == '(' {\n\t\t\tdepth++\n\t\t} else if char == ')' {\n\t\t\tdepth--\n\t\t}\n\t\t// since all of the string must fall within the outer parens, our depth\n\t\t// should never drop below 1 until we get to the end\n\t\tif depth < 1 && i < length-1 {\n\t\t\treturn false\n\t\t}\n\t}\n\t// at the end, the depth must be 0. This could instead be 'if depth == 0\n\t// return true' and have the last line return false, but I like to hit all\n\t// false cases first, then have true as the default at the bottom. If the\n\t// input has survived the gauntlet, then it is worthy.\n\tif depth != 0 {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "1ac7d6893b9989b94726cb8a35ce3e05", "score": "0.4692046", "text": "func (s Stack) Match(i int, pkg string, anyFunc ...string) bool {\n\tif i < 0 {\n\t\ti = len(s) + i\n\t}\n\tif i < 0 || i >= len(s) {\n\t\treturn false\n\t}\n\n\tlevel := s[i]\n\n\tif pkg != \"\" && level.Package != pkg {\n\t\treturn false\n\t}\n\n\tif len(anyFunc) == 0 {\n\t\treturn true\n\t}\n\n\tfor _, fn := range anyFunc {\n\t\tif strings.HasSuffix(fn, \"*\") {\n\t\t\tif strings.HasPrefix(level.Func, fn[:len(fn)-1]) {\n\t\t\t\treturn true\n\t\t\t}\n\t\t} else if level.Func == fn {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "09149800f54fbe2004255d0da422edbb", "score": "0.4681408", "text": "func findFirstStringInBracket(str string) string {\n\tif len(str) > 0 {\n\t\tindexOpeningBracket := strings.Index(str,\"(\")\n\t\tindexClosingBracket := strings.Index(str,\")\")\n\n\t\tif (indexClosingBracket > indexOpeningBracket) && (indexOpeningBracket >=0 && indexClosingBracket >= 0) {\n\t\t\trunes := []rune(str)\n\t\t\treturn string(runes[indexOpeningBracket+1:indexClosingBracket])\n\t\t}\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "fece638322ac4457fb59e064f0eb999b", "score": "0.46714118", "text": "func Matches(t *testing.T, value, expr string) {\n\tif x := matches(2, value, expr); x != \"\" {\n\t\tfmt.Println(x)\n\t\tt.Fail()\n\t}\n}", "title": "" }, { "docid": "474f5b97a8552bead58e3188d97fcc70", "score": "0.46699774", "text": "func emptyStackScan(input, key string) int {\n\ts := make(istack, 0)\n\tptr := 1\n\ts.push(0)\n\tfor !s.empty() {\n\t\tch := input[ptr]\n\t\tif ch == '[' {\n\t\t\ts.push(ptr)\n\t\t} else if ch == ']' {\n\t\t\ts.pop()\n\t\t}\n\t\tptr += 1\n\t}\n\treturn ptr\n}", "title": "" }, { "docid": "a2779b9be0afff60fbf4a2debc8cd0a7", "score": "0.4666604", "text": "func ContainOuterNot(expr Expression) bool {\n\treturn containOuterNot(expr, false)\n}", "title": "" }, { "docid": "5f6456b1e6abc860b1ee7af75c9eeaf1", "score": "0.46663335", "text": "func (element *Element) EndBracket() *Element {\n\n\t// If we already have an end bracket, then skip\n\tif element.endBracket {\n\t\treturn element\n\t}\n\n\t// If this element is not a container, then this closes it permanently\n\tif !element.container {\n\t\telement.closed = true\n\t}\n\n\telement.endBracket = true\n\telement.builder.WriteRune('>')\n\treturn element\n}", "title": "" }, { "docid": "5a210b80714a055035255c2457377cbf", "score": "0.46612978", "text": "func isPrefixAltLiterals(r *syntax.Regexp) bool {\n\tisPrefixAlt := r.Op == syntax.OpConcat &&\n\t\tlen(r.Sub) == 2 &&\n\t\tr.Sub[0].Op == syntax.OpBeginText &&\n\t\tr.Sub[1].Op == syntax.OpAlternate\n\tif !isPrefixAlt {\n\t\treturn false\n\t}\n\n\tfor _, sub := range r.Sub[1].Sub {\n\t\tif sub.Op != syntax.OpLiteral {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "41300476d4664a182369e0a4342dd2e9", "score": "0.4659592", "text": "func anyTokenMatch(value rune, t ...Token) bool {\n\tfor _, token := range t {\n\t\tif token.MatchRune(value) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "511feed01d6381b1ba0c7f205ef4e30a", "score": "0.46373782", "text": "func isMatchBasicRegexNR(input, regex *string) bool {\n\tvar prevMatch byte = 0xFF\n\ts, p := *input, *regex\n\tsLen, pLen := len(s), len(p)\n\tj, k := 0, 0\n\n\tu.Debug(\"\\ninput: s = '%v', p = '%v'\\n\", s, p)\n\n\tfor pLen-k > 0 {\n\t\tu.Debug(\" ---: s[j] = '%v'[%v], p[k] = '%v'[%v], prevMatch = %c\\n\",\n\t\t\tu.GetSliceAtIndex(s, j), j, u.GetSliceAtIndex(p, k), k, prevMatch)\n\t\tif sLen-j == 0 {\n\t\t\tu.Debug(\" -*-: s[j] = '%v'[%v], p[k] = '%v'[%v], prevMatch = %c\\n\",\n\t\t\t\tu.GetSliceAtIndex(s, j), j, u.GetSliceAtIndex(p, k), k, prevMatch)\n\t\t\tif pLen-k == 1 || p[k+1] != '*' && p[k+1] != '.' {\n\t\t\t\treturn p[k] == prevMatch || p[k] == '.'\n\t\t\t}\n\t\t\tif p[k+1] == '.' {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tk += 2\n\t\t} else if pLen-k < 2 || p[k+1] != '*' {\n\t\t\tif sLen-j == 0 || s[j] != p[k] && p[k] != '.' {\n\t\t\t\t// the first chars do not match\n\t\t\t\treturn false\n\t\t\t}\n\t\t\t// both moving to next char\n\t\t\tj++\n\t\t\tk++\n\t\t\tprevMatch = 0xFF\n\t\t} else {\n\t\t\t// repeat until matched all chars with *\n\t\t\tfor sLen-j > 0 && (s[j] == p[k] || s[j] == prevMatch || p[k] == '.') {\n\t\t\t\tprevMatch = s[j]\n\t\t\t\tj++\n\t\t\t}\n\t\t\t// check the rest\n\t\t\tk += 2\n\t\t}\n\t}\n\n\treturn sLen-j == 0\n}", "title": "" }, { "docid": "88d2897bc2db245359d624f5533554ef", "score": "0.46354774", "text": "func (Empty) Matches(tags map[string]string) bool {\r\n\treturn true\r\n}", "title": "" }, { "docid": "66ee034ac75f6306d7b8b41a43f74b6f", "score": "0.4634622", "text": "func Advent01aParens(s string) (sum int) {\n\tfor _, x := range s {\n\t\tif x == '(' {\n\t\t\tsum++\n\t\t} else if x == ')' {\n\t\t\tsum--\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "c4cfeffcfba055866f8071dcc296b810", "score": "0.4628548", "text": "func (blacklist *Blacklist) containsInner(searchTree patternTreeNode, pattern []string, start int) bool {\n\tif start == len(pattern) {\n\t\treturn true\n\t}\n\n\t// searching for both exact and wildcard match\n\tfor _, patternPart := range []string{pattern[start], \"*\"} {\n\t\tif treeNode, exists := searchTree[patternPart]; exists {\n\t\t\tif blacklist.containsInner(treeNode, pattern, start+1) {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "4eca20cc90153fffd723a5793ff44526", "score": "0.4619191", "text": "func FindOpeningBracket(s string, openingBracket byte, closingBracketIndex int) int {\n\topenCnt := 1\n\ti := closingBracketIndex\n\tif i >= len(s) {\n\t\treturn -1\n\t}\n\tclosingBracket := s[i]\n\tfor i--; i >= 0; i-- {\n\t\tif s[i] == closingBracket {\n\t\t\topenCnt++\n\t\t} else if s[i] == openingBracket {\n\t\t\topenCnt--\n\t\t\tif openCnt == 0 {\n\t\t\t\treturn i\n\t\t\t}\n\t\t}\n\t}\n\treturn -1\n}", "title": "" }, { "docid": "e5e70b5e44a73f594cfa21694955623b", "score": "0.46167904", "text": "func endLoop() {\n\tif memory[memoryPointer] != 0 {\n\t\toldAddress := cursor\n\t\tbrackets := 0\n\t\tcursor--\n\t\tfor ; cursor >= 0; cursor-- {\n\t\t\tswitch commands[cursor] {\n\t\t\tcase '[':\n\t\t\t\tif brackets == 0 {\n\t\t\t\t\treturn\n\t\t\t\t} else {\n\t\t\t\t\tbrackets--\n\t\t\t\t}\n\t\t\tcase ']':\n\t\t\t\tbrackets++\n\t\t\t}\n\t\t}\n\t\tpanic(\"missing opening bracket matching to bracket on position \" + strconv.Itoa(oldAddress))\n\t}\n}", "title": "" }, { "docid": "fdbf941094bcac027ca91f26b3d90b5a", "score": "0.46038282", "text": "func (p *Parser) parseTags() TagsExpr {\n\tvar tags TagsExpr\n\n\tif p.scan() != LBRACKET {\n\t\tpanic(\"caller should have checked for left bracket\")\n\t}\n\n\tfor {\n\t\tif !p.scanToken(IDENT, \"tag name\") {\n\t\t\treturn nil\n\t\t}\n\n\t\ttags = append(tags, TagExpr(p.s.Literal()))\n\n\t\tif p.scan() == RBRACKET {\n\t\t\treturn tags\n\t\t}\n\n\t\tp.unscan()\n\t\tif !p.scanToken(COMMA, \"comma\") {\n\t\t\treturn nil\n\t\t}\n\t}\n}", "title": "" }, { "docid": "fe531202ef086a4aa3a7bd0ad1fa0ccf", "score": "0.46027932", "text": "func (s *BaseSearchParserListener) ExitBracketQueryCriterias(ctx *BracketQueryCriteriasContext) {}", "title": "" }, { "docid": "75428d4467dda176c67c88e46c376010", "score": "0.45891628", "text": "func parensAround(s string, plusIndex int) string {\n\tvar parenDepth, openParen, closeParen int\n\t// go backwards from the \"+\" to find where we should put the open paren\n\tfor openParen = plusIndex - 2; openParen > 0; openParen-- {\n\t\tswitch s[openParen] {\n\t\tcase ')':\n\t\t\tparenDepth++\n\t\tcase '(':\n\t\t\tparenDepth--\n\t\t}\n\t\tif parenDepth == 0 {\n\t\t\tbreak\n\t\t}\n\t}\n\t// go forwards from the \"+\" to find where we should put the close paren\n\tfor closeParen = plusIndex + 2; closeParen < len(s); closeParen++ {\n\t\tswitch s[closeParen] {\n\t\tcase ')':\n\t\t\tparenDepth--\n\t\tcase '(':\n\t\t\tparenDepth++\n\t\t}\n\t\tif parenDepth == 0 {\n\t\t\tbreak\n\t\t}\n\t}\n\tif closeParen == len(s) {\n\t\treturn s[:openParen] + \"(\" + s[openParen:] + \")\"\n\t}\n\treturn s[:openParen] + \"(\" + s[openParen:closeParen+1] + \")\" + s[closeParen+1:]\n}", "title": "" }, { "docid": "473a8660c28e9bd69943846e023d0c37", "score": "0.4578674", "text": "func (e *Expression) IsParenthesis() bool {\n\treturn e.typ == parenthesis\n}", "title": "" }, { "docid": "0b30d5b71c48af8540c713457bb21141", "score": "0.4576838", "text": "func (tok Token) isOperator() bool { return tok > operatorBegin && tok < operatorEnd }", "title": "" }, { "docid": "45ed14ca23486eaa932ff7d93bd9cf1e", "score": "0.4558468", "text": "func TestCharacters(t *testing.T) {\n\tsc := NewScanner(\";-(),\");\n\tif tok := sc.NextToken(); tok.Type() != token.SEMICOLON {\n\t\tt.Errorf(\"Error, expected token type: %d, but got: %d\", token.SEMICOLON, tok.Type());\n\t}\n\tif tok := sc.NextToken(); tok.Type() != token.HYPHEN {\n\t\tt.Errorf(\"Error, expected token type: %d, but got: %d\", token.HYPHEN, tok.Type());\n\t}\n\tif tok := sc.NextToken(); tok.Type() != token.OPENPAREN {\n\t\tt.Errorf(\"Error, expected token type: %d, but got: %d\", token.OPENPAREN, tok.Type());\n\t}\n\tif tok := sc.NextToken(); tok.Type() != token.CLOSEPAREN {\n\t\tt.Errorf(\"Error, expected token type: %d, but got: %d\", token.CLOSEPAREN, tok.Type());\n\t}\n\tif tok := sc.NextToken(); tok.Type() != token.COMMA {\n\t\tt.Errorf(\"Error, expected token type: %d, but got: %d\", token.COMMA, tok.Type());\n\t}\n}", "title": "" } ]
5f12e6d5c02913e4e3f7c964b2838f7f
SentenceList returns list of sentences of lorem ipsum
[ { "docid": "482e51b154e0f9a3ac809a03be674adc", "score": "0.82616395", "text": "func (li *LoremIpsum) SentenceList(count int) []string {\n\tsentences := make([]string, count)\n\tfor idx := range sentences {\n\t\tsentences[idx] = li.Sentence()\n\t\tli.shuffle()\n\t}\n\treturn sentences\n}", "title": "" } ]
[ { "docid": "0dac65432899894685f3de600408fa6f", "score": "0.7119958", "text": "func (li *LoremIpsum) Sentences(count int) string {\n\treturn strings.Join(li.SentenceList(count), \" \")\n}", "title": "" }, { "docid": "d341cb1197891e0807253d5030ef6596", "score": "0.69101214", "text": "func (f *loremFaker) Sentences(rng *rand.Rand, num int) []string {\n\ts := make([]string, num)\n\tfor i := range s {\n\t\tvar b strings.Builder\n\t\tnumWords := randInt(rng, 4, 8)\n\t\tfor j := 0; j < numWords; j++ {\n\t\t\tword := f.words.Rand(rng).(string)\n\t\t\tif j == 0 {\n\t\t\t\tword = firstToUpper(word)\n\t\t\t}\n\t\t\tb.WriteString(word)\n\t\t\tif j == numWords-1 {\n\t\t\t\tb.WriteString(`.`)\n\t\t\t} else {\n\t\t\t\tb.WriteString(` `)\n\t\t\t}\n\t\t}\n\t\ts[i] = b.String()\n\t}\n\treturn s\n}", "title": "" }, { "docid": "3d36d193da14e1febbee6d1bd4b838a9", "score": "0.6767345", "text": "func (g Generator) Sentences(n int) []string {\n\tvar result []string\n\tl := len(g.Text)\n\tfor i := 0; i < n; i++ {\n\t\tresult = append(result, g.Text[i%l])\n\t}\n\treturn result\n}", "title": "" }, { "docid": "b756f1613b312e0df9912fffedb9a778", "score": "0.6435575", "text": "func (li *LoremIpsum) Sentence() string {\n\tfor {\n\t\tl := int(li.gauss(24.46, 5.08))\n\t\tif l > 0 {\n\t\t\twords := li.words[:l]\n\t\t\treturn li.punctuate(words)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "a9ebc460253eef424d317202a02d2c4f", "score": "0.62719804", "text": "func (fa FakeAdapter) Sentences() string {\n\treturn fake.Sentences()\n}", "title": "" }, { "docid": "fefc3cdafa9a6bd022cb7be402069fc7", "score": "0.6064348", "text": "func (li *LoremIpsum) WordList(count int) []string {\n\treturn li.words[:count]\n}", "title": "" }, { "docid": "16188284f4afa5349bd9c4c0982ba824", "score": "0.5969004", "text": "func (li *LoremIpsum) ParagraphList(count int) []string {\n\tparagraphs := make([]string, count)\n\tfor idx := range paragraphs {\n\t\tparagraphs[idx] = li.Paragraph()\n\t}\n\treturn paragraphs\n}", "title": "" }, { "docid": "71b1f20ee9c31655abecca3c3779dd39", "score": "0.5865644", "text": "func (r *Response) Sentences() []*Sentence {\n\treturn r.sentences\n}", "title": "" }, { "docid": "e61f6a21330f606bb0dd98957783d2f6", "score": "0.5844075", "text": "func getSentences() ([]database.Sentence, error) {\n\tcol := database.\n\t\tSession.\n\t\tCollection(config.Database.SentencesTableName)\n\tif _, err := col.Exists(); err != nil { // check sentencesTableName whether is exist\n\t\treturn nil, err\n\t}\n\tres := col.Find()\n\tvar result []database.Sentence\n\terr := res.All(&result)\n\treturn result, err\n}", "title": "" }, { "docid": "e497acde9360f7a86081dbcc534866d6", "score": "0.57508177", "text": "func SplitSentences(text string) []string {\n\tsplitText := RegexSplitSentences().ReplaceAllString(strings.TrimSpace(text), \"\\n\")\n\treturn strings.Split(splitText, \"\\n\")\n}", "title": "" }, { "docid": "2f871d9a91125e8f3e7f56e060736329", "score": "0.57484204", "text": "func generateSentences(S [][]string, text string) []string {\r\n\tG := make(map[string][]string)\r\n\tfor _, pair := range S {\r\n\t\tG[pair[0]] = append(G[pair[0]], pair[1])\r\n\t\tG[pair[1]] = append(G[pair[1]], pair[0])\r\n\t}\r\n\tansSet := make(map[string]bool)\r\n\tbfsq := []string{text}\r\n\tfor len(bfsq) > 0 {\r\n\t\tuText := bfsq[0]\r\n\t\tbfsq = bfsq[1:]\r\n\t\tansSet[uText] = true\r\n\t\twords := strings.Split(uText, \" \")\r\n\t\tfor i, word := range words {\r\n\t\t\tif _, ok := G[word]; ok == false {\r\n\t\t\t\tcontinue\r\n\t\t\t}\r\n\t\t\tfor _, u := range G[word] {\r\n\t\t\t\twords[i] = u\r\n\t\t\t\tvText := strings.Join(words, \" \")\r\n\t\t\t\tif _, ok := ansSet[vText]; ok {\r\n\t\t\t\t\tcontinue\r\n\t\t\t\t}\r\n\t\t\t\tansSet[vText] = true\r\n\t\t\t\tbfsq = append(bfsq, vText)\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\tans := []string{}\r\n\tfor k := range ansSet {\r\n\t\tans = append(ans, k)\r\n\t}\r\n\tsort.Strings(ans)\r\n\treturn ans\r\n}", "title": "" }, { "docid": "62aa4f3a2b70d3b0cfee0d88cc15a5e0", "score": "0.5654616", "text": "func (mnemonic Mnemonic) GetSentenceFrom(wordlist wordlist.Wordlist) ([]string, error) {\n\twords := make([]string, len(mnemonic.Sentence))\n\n\tfor i := 0; i < len(words); i++ {\n\t\tvar getErr error\n\t\twords[i], getErr = wordlist.GetWordAt(mnemonic.Sentence[i])\n\n\t\tif (getErr != nil) {\n\t\t\treturn []string{}, mnemonicError{Message: getErr.Error()}\n\t\t}\n\t}\n\n\treturn words[:], nil\n}", "title": "" }, { "docid": "d531ec56b1e9f57b9e5dcd4167abfeff", "score": "0.5510767", "text": "func (c *compound) Sentences() []Compound {\n\tconds, err := immutable.FastForward(c)\n\tif err == nil {\n\t\treturn *(conds.(*[]Compound))\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8598e1d69bca71543244e50ffc97894d", "score": "0.5435233", "text": "func splitSentences(str string, sentences []string) []string {\n\tconst (\n\t\tInputPunct = iota // sentence-terminating punctuation\n\t\tInputExtra // additional punctuation (one is optionionally consumed after punct if present)\n\t\tInputSpace // whitespace\n\t\tInputAny // any valid rune not previously matched\n\t\tInputInvalid // an invalid byte\n\t\tInputEOS // end-of-string\n\t)\n\tconst (\n\t\tOutputNone = iota // moves to the next rune.\n\t\tOutputNext // adds everything from the last call up to (but not including) the current rune, and moves to the next rune.\n\t\tOutputRest // adds everything not yet added by OutputNext (state must be -1)\n\t)\n\tconst (\n\t\tStateDefault = iota // in a sentence\n\t\tStateAfterPunct // after the sentence-terminating rune\n\t\tStateAfterPunctExtra // after the optional additional punctuation rune\n\t\tStateAfterSpace // the trailing whitespace after the sentence\n\t)\n\n\tif sentences == nil {\n\t\tsentences = make([]string, 0, 4) // pre-allocate some room\n\t}\n\n\tfor i, state := 0, 0; state != -1; {\n\t\tx, z := utf8.DecodeRuneInString(str[i:])\n\n\t\tvar input int\n\t\tswitch x {\n\t\tcase utf8.RuneError:\n\t\t\tswitch z {\n\t\t\tcase 0:\n\t\t\t\tinput = InputEOS\n\t\t\tdefault:\n\t\t\t\tinput = InputInvalid\n\t\t\t}\n\t\tcase '.', '!', '?':\n\t\t\tinput = InputPunct\n\t\tcase '\\'', '\"', '”', '’', '“', '…':\n\t\t\tinput = InputExtra\n\t\tcase '\\t', '\\n', '\\f', '\\r', ' ': // \\s only matches only ASCII whitespace\n\t\t\tinput = InputSpace\n\t\tdefault:\n\t\t\tinput = InputAny\n\t\t}\n\n\t\tvar output int\n\t\tswitch state {\n\t\tcase StateDefault:\n\t\t\tswitch input {\n\t\t\tcase InputPunct:\n\t\t\t\toutput, state = OutputNone, StateAfterPunct\n\t\t\tcase InputExtra:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputSpace:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputAny:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputInvalid:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputEOS:\n\t\t\t\toutput, state = OutputRest, -1\n\t\t\tdefault:\n\t\t\t\tpanic(\"unhandled input\")\n\t\t\t}\n\t\tcase StateAfterPunct:\n\t\t\tswitch input {\n\t\t\tcase InputPunct:\n\t\t\t\toutput, state = OutputNone, StateAfterPunct\n\t\t\tcase InputExtra:\n\t\t\t\toutput, state = OutputNone, StateAfterPunctExtra\n\t\t\tcase InputSpace:\n\t\t\t\toutput, state = OutputNone, StateAfterSpace\n\t\t\tcase InputAny:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputInvalid:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputEOS:\n\t\t\t\toutput, state = OutputRest, -1\n\t\t\tdefault:\n\t\t\t\tpanic(\"unhandled input\")\n\t\t\t}\n\t\tcase StateAfterPunctExtra:\n\t\t\tswitch input {\n\t\t\tcase InputPunct:\n\t\t\t\toutput, state = OutputNone, StateAfterPunct\n\t\t\tcase InputExtra:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputSpace:\n\t\t\t\toutput, state = OutputNone, StateAfterSpace\n\t\t\tcase InputAny:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputInvalid:\n\t\t\t\toutput, state = OutputNone, StateDefault\n\t\t\tcase InputEOS:\n\t\t\t\toutput, state = OutputRest, -1\n\t\t\tdefault:\n\t\t\t\tpanic(\"unhandled input\")\n\t\t\t}\n\t\tcase StateAfterSpace:\n\t\t\tswitch input {\n\t\t\tcase InputPunct:\n\t\t\t\toutput, state = OutputNext, StateAfterPunct\n\t\t\tcase InputExtra:\n\t\t\t\toutput, state = OutputNext, StateDefault\n\t\t\tcase InputSpace:\n\t\t\t\toutput, state = OutputNone, StateAfterSpace\n\t\t\tcase InputAny:\n\t\t\t\toutput, state = OutputNext, StateDefault\n\t\t\tcase InputInvalid:\n\t\t\t\toutput, state = OutputNext, StateDefault\n\t\t\tcase InputEOS:\n\t\t\t\toutput, state = OutputRest, -1\n\t\t\tdefault:\n\t\t\t\tpanic(\"unhandled input\")\n\t\t\t}\n\t\tdefault:\n\t\t\tpanic(\"unhandled state\")\n\t\t}\n\n\t\tswitch output {\n\t\tcase OutputNone:\n\t\t\ti += z\n\t\tcase OutputNext:\n\t\t\tsentences = append(sentences, str[:i])\n\t\t\tstr, i = str[i:], z\n\t\tcase OutputRest:\n\t\t\tif len(str) != 0 || len(sentences) == 0 {\n\t\t\t\tsentences = append(sentences, str)\n\t\t\t}\n\t\t\tif state != -1 {\n\t\t\t\tpanic(\"invalid state\")\n\t\t\t}\n\t\tdefault:\n\t\t\tpanic(\"unhandled output\")\n\t\t}\n\t}\n\n\treturn sentences\n}", "title": "" }, { "docid": "d3d9640275d039119feab1f39e33ac9c", "score": "0.53551775", "text": "func getWordExceptionSentences(customerSentences, staffSentences []string, enterprise string, sqlLike model.SqlLike) ([]model.SimpleSentence, []model.SimpleSentence, error) {\n\tcustomerException := []model.SimpleSentence{}\n\tstaffException := []model.SimpleSentence{}\n\tvar sq *model.SentenceQuery\n\tvar deleted int8\n\n\tif len(customerSentences) > 0 {\n\t\tsq = &model.SentenceQuery{\n\t\t\tUUID: customerSentences,\n\t\t\tIsDelete: &deleted,\n\t\t\tEnterprise: &enterprise,\n\t\t\tLimit: 100,\n\t\t}\n\n\t\tcustomerExceptionSentences, err := sentenceDao.GetSentences(sqlLike, sq)\n\t\tif err != nil {\n\t\t\treturn customerException, staffException, err\n\t\t}\n\t\tcustomerException = model.ToSimpleSentences(customerExceptionSentences)\n\t}\n\n\tif len(staffSentences) > 0 {\n\t\tsq = &model.SentenceQuery{\n\t\t\tUUID: staffSentences,\n\t\t\tIsDelete: &deleted,\n\t\t\tEnterprise: &enterprise,\n\t\t\tLimit: 100,\n\t\t}\n\t\tstaffExceptionSentences, err := sentenceDao.GetSentences(sqlLike, sq)\n\t\tif err != nil {\n\t\t\treturn customerException, staffException, err\n\t\t}\n\t\tstaffException = model.ToSimpleSentences(staffExceptionSentences)\n\t}\n\n\treturn customerException, staffException, nil\n}", "title": "" }, { "docid": "206104bdc49a00893e2addbee15a94ef", "score": "0.5332592", "text": "func (t *Text) buildSentences(i string) {\n\tfor o, rs := range gotokenizer.Sentences(i) {\n\t\tvar tokens []string = t.getTokens(rs)\n\t\tvar s sentence = sentence{\n\t\t\traw: rs,\n\t\t\tlengthRaw: float64(len(rs)),\n\t\t\ttokens: tokens,\n\t\t\tlengthTokens: float64(len(tokens)),\n\t\t\torder: o + 1,\n\t\t}\n\n\t\tif len(s.raw) > 0 {\n\t\t\tt.sentences = append(t.sentences, s)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "668b47807a10ec7dafa66fcdf0228739", "score": "0.53271204", "text": "func (li *LoremIpsum) Paragraphs(count int) string {\n\treturn strings.Join(li.ParagraphList(count), \"\\n\")\n}", "title": "" }, { "docid": "18c39fcec2c5f6d8adfa8ca571f2c5bd", "score": "0.53238", "text": "func TestSentences(t *testing.T) {\n\t// Test a simple sentence.\n\ttranslation, err := TranslateSentence(\"I ate the popcorn.\")\n\tassert.Nil(t, err)\n\tassert.Equal(t, \"Gi gate ethogo opcornpogo.\", translation)\n\n\t// Test a sentence that doesn't end with a supported punctuation symbol.\n\t_, err = TranslateSentence(\"I ate the popcorn\")\n\tassert.NotNil(t, err)\n\n\t// Test a sentence with a lot of punctuation.\n\ttranslation, err = TranslateSentence(\"I (ate) the, :)popcorn.\")\n\tassert.Nil(t, err)\n\tassert.Equal(t, \"Gi (gate) ethogo, :)opcornpogo.\", translation)\n}", "title": "" }, { "docid": "8aaf2090f7f6659011d4d60510b24cac", "score": "0.53194016", "text": "func (li *LoremIpsum) Words(count int) string {\n\treturn strings.Join(li.WordList(count), \" \")\n}", "title": "" }, { "docid": "97219d91cb5b7149c4bb2ce4a2629f7c", "score": "0.5162904", "text": "func WordList(srcBytes []byte) [][]byte {\n\toutList := make([][]byte, 0, 0)\n\tstopWords := [][]byte{\n\t\t[]byte(\"and\"),\n\t\t[]byte(\"that\"),\n\t\t[]byte(\"the\"),\n\t\t[]byte(\"this\"),\n\t\t[]byte(\"via\")}\n\n\tinputList := bytes.Split(srcBytes, []byte(\" \"))\n\tfor _, w := range inputList {\n\t\tif len(w) > 2 && containsBytes(stopWords, w) == false {\n\t\t\toutList = append(outList, w)\n\t\t}\n\t}\n\treturn outList\n}", "title": "" }, { "docid": "69d52946a8fd52b7ac47066b642f0b43", "score": "0.5158462", "text": "func (fa FakeAdapter) Sentence() string {\n\treturn fake.Sentence()\n}", "title": "" }, { "docid": "97a07d32472b63321d24864ac7b87f42", "score": "0.51560825", "text": "func (s *DefaultSentenceTokenizer) Tokenize(text string) []*Sentence {\n\tannotatedTokens := s.AnnotatedTokens(text)\n\n\tlastBreak := 0\n\tsentences := make([]*Sentence, 0, len(annotatedTokens))\n\tfor _, token := range annotatedTokens {\n\t\tif !token.SentBreak {\n\t\t\tcontinue\n\t\t}\n\n\t\tsentence := &Sentence{lastBreak, token.Position, text[lastBreak:token.Position]}\n\t\tsentences = append(sentences, sentence)\n\n\t\tlastBreak = token.Position\n\t}\n\n\tif lastBreak != len(text) {\n\t\tlastChar := len(text)\n\t\tsentence := &Sentence{lastBreak, lastChar, text[lastBreak:lastChar]}\n\t\tsentences = append(sentences, sentence)\n\t}\n\n\treturn sentences\n}", "title": "" }, { "docid": "f35cdc935adaf0b593da14144d688328", "score": "0.5111902", "text": "func (ch Chunk) Text() []string {\n\tres := []string{}\n\tfor _, s := range ch.Sents {\n\t\tres = append(res, s.Sentence.Text)\n\t}\n\treturn res\n}", "title": "" }, { "docid": "784aecc1788dc69893d0142595d58ff2", "score": "0.5001779", "text": "func NewHoldsText_List(s *capnp.Segment, sz int32) (HoldsText_List, error) {\n\tl, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 0, PointerCount: 3}, sz)\n\treturn HoldsText_List{l}, err\n}", "title": "" }, { "docid": "784aecc1788dc69893d0142595d58ff2", "score": "0.5001779", "text": "func NewHoldsText_List(s *capnp.Segment, sz int32) (HoldsText_List, error) {\n\tl, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 0, PointerCount: 3}, sz)\n\treturn HoldsText_List{l}, err\n}", "title": "" }, { "docid": "8859328eb5cbcde2d8b5c80d580901af", "score": "0.49773416", "text": "func parseSentence(sentence string) []markovchain.Token {\n\t// Straightforward solution.\n\tparsed := strings.Fields(sentence)\n\tif len(parsed) == 0 {\n\t\treturn nil\n\t}\n\n\ttokens := make([]markovchain.Token, 0, len(parsed)+2)\n\ttokens = append(tokens, startToken)\n\tfor _, chunk := range parsed {\n\t\ttokens = append(tokens, markovchain.Token(chunk))\n\t}\n\ttokens = append(tokens, endToken)\n\n\treturn tokens\n}", "title": "" }, { "docid": "8fe79857595f526ee990b43dbcf6cf1e", "score": "0.4956897", "text": "func Tokenize(text string) []string {\n\twords := wordRe.FindAllString(text, -1)\n\t// var tokens []string\n\t// 75 percentils of sentences have 15 or less tokens\n\ttokens := make([]string, 0, 15)\n\tfor _, w := range words {\n\t\ttoken := strings.ToLower(w)\n\t\ttoken = stemmer.Stem(token)\n\t\tif token != \"\" {\n\t\t\ttokens = append(tokens, token)\n\t\t}\n\t}\n\treturn tokens\n}", "title": "" }, { "docid": "8be9ddaccc43cadd2038dd40b0cc5517", "score": "0.4909739", "text": "func countSentencesWithWords(s *goquery.Selection, l int) int {\n\tsentences := getSentences(s.Text())\n\tcount := 0\n\n\tfor _, s := range sentences {\n\t\tif countWords(s) > l {\n\t\t\tcount++\n\t\t}\n\t}\n\n\treturn count\n}", "title": "" }, { "docid": "fe96415f3cedaa101c7e884aefc870fa", "score": "0.485514", "text": "func GenerateCandidateKeywords(sentenceList []string, stopWordPattern *regexp.Regexp) []string {\n\tphraseList := []string{}\n\n\tfor _, sentence := range sentenceList {\n\t\ttmp := stopWordPattern.ReplaceAllString(strings.TrimSpace(sentence), \"|\")\n\n\t\tmultipleWhiteSpaceRe := regexp.MustCompile(`\\s\\s+`)\n\t\ttmp = multipleWhiteSpaceRe.ReplaceAllString(strings.TrimSpace(tmp), \" \")\n\n\t\tphrases := strings.Split(tmp, \"|\")\n\t\tfor _, phrase := range phrases {\n\t\t\tphrase = strings.ToLower(strings.TrimSpace(phrase))\n\t\t\tif phrase != \"\" {\n\t\t\t\tphraseList = append(phraseList, phrase)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn phraseList\n}", "title": "" }, { "docid": "4372399895e17de8796c0a17890e5260", "score": "0.48224217", "text": "func (fa FakeAdapter) Paragraphs() string {\n\treturn fake.Paragraphs()\n}", "title": "" }, { "docid": "b34dcd6d254f9e68e8cb3ad323ff7f86", "score": "0.48128462", "text": "func Tokenize(Doc string) []string {\n\twordList := []string{}\n\n\t// The following regexp finds individual\n\t// words in a sentence\n\tr := regexp.MustCompile(\"[^\\\\s]+\")\n\twordList = r.FindAllString(Doc, -1)\n\n\twordList = Preprocessing(wordList)\n\twordList = RemoveDuplicates(wordList)\n\n\treturn wordList\n}", "title": "" }, { "docid": "8a89382fb35b563b55bd3dbd4fbf5fe2", "score": "0.47653386", "text": "func convArticleList(a *Article) []string {\n\tp := make([]string, 0)\n\tresult := make([]string, 0)\n\ttSplit := strings.Split(a.Title, \" \")\n\tsSplit := strings.Split(a.SubTitle, \" \")\n\tcSplit := strings.Split(a.Content, \" \")\n\tp = append(p, tSplit...)\n\tp = append(p, sSplit...)\n\tp = append(p, cSplit...)\n\tfor _, e := range p {\n\t\tresult = append(result, strings.ToLower(e))\n\t}\n\treturn result\n}", "title": "" }, { "docid": "ac99978d0d70b39d9f2b7156dd037f47", "score": "0.47019264", "text": "func GetSnippetsFromText(text string) []string {\n re := regexp.MustCompile(\"\\n|\\r\")\n snippets := re.Split(text, -1)\n\n return snippets\n}", "title": "" }, { "docid": "e1900c8848f01794be5a94f3c9286e6d", "score": "0.46969995", "text": "func (b *Article1) Tokenize() ([]WordGroupToken, int) {\n\n\tif b.Ps == nil || len(b.Ps) == 0 {\n\t\tpanic(\"Fill Article before tokenizing it\")\n\t}\n\n\tcntrTokens := 0\n\tsumSize := 0\n\tvar all []WordGroupToken\n\n\tfor i := 0; i < len(b.Ps); i++ {\n\n\t\t// Even longer headlines should never by broken up\n\t\tif len(b.H2s[i]) > 1 {\n\t\t\tall = append(all, make([]WordGroupToken, 1)...)\n\t\t\tall[cntrTokens].SemanticStart = \"h2\"\n\t\t\tall[cntrTokens].SemanticEnd = \"h2\"\n\t\t\tall[cntrTokens].Text = b.H2s[i]\n\t\t\tall[cntrTokens].Size = len(b.H2s[i])\n\t\t\tsumSize += len(b.H2s[i])\n\t\t\tcntrTokens++\n\t\t}\n\n\t\t// todo: This inefficient\n\t\t// we rather want to split in *one* go\n\t\t// and by multiple separators\n\t\t// and want to retain the separators too\n\t\tvar sentences []string\n\t\tsentences = strings.SplitAfter(*b.Ps[i], \".\")\n\t\tsentences = RecombineShortTokens(sentences, 15)\n\t\tsentences = SplitFurther(sentences, \",\")\n\t\tsentences = RecombineShortTokens(sentences, 15)\n\t\tsentences = SplitFurther(sentences, \";\")\n\t\tsentences = SplitFurther(sentences, \"!\")\n\t\tsentences = SplitFurther(sentences, \"?\")\n\t\tsentences = SplitFurther(sentences, \"<br/>\")\n\t\tsentences = RecombineShortTokens(sentences, 15)\n\n\t\t// dump := util.IndentedDump(sentences)\n\t\t// pf((*dump))\n\n\t\tnumRefinedSentences := len(sentences)\n\t\tall = append(all, make([]WordGroupToken, numRefinedSentences)...)\n\t\tfor j := 0; j < numRefinedSentences; j++ {\n\n\t\t\ts := strings.TrimSpace(sentences[j])\n\n\t\t\t//\n\t\t\t// overhung from last block\n\t\t\tif strings.HasPrefix(s, \"</p>\") {\n\t\t\t\tif cntrTokens > 0 {\n\t\t\t\t\tall[cntrTokens-1].SemanticEnd = \"p\"\n\t\t\t\t}\n\t\t\t\ts = strings.TrimPrefix(s, \"</p>\")\n\t\t\t}\n\n\t\t\t//\n\t\t\t// regular\n\t\t\tif strings.HasPrefix(s, \"<p>\") {\n\t\t\t\tall[cntrTokens].SemanticStart = \"p\"\n\t\t\t\ts = strings.TrimPrefix(s, \"<p>\")\n\t\t\t}\n\t\t\tif strings.HasSuffix(s, \"</p>\") {\n\t\t\t\tall[cntrTokens].SemanticEnd = \"p\"\n\t\t\t\ts = strings.TrimSuffix(s, \"</p>\")\n\t\t\t}\n\t\t\tif strings.HasSuffix(s, \"<br/>\") {\n\t\t\t\tall[cntrTokens].SemanticStart = \"br\"\n\t\t\t\tall[cntrTokens].SemanticEnd = \"br\"\n\t\t\t\ts = strings.TrimSuffix(s, \"<br/>\")\n\t\t\t}\n\n\t\t\tall[cntrTokens].Text = s\n\t\t\tall[cntrTokens].Size = len(s)\n\t\t\tsumSize += len(s)\n\n\t\t\tcntrTokens++\n\t\t}\n\t}\n\n\t// dump := util.IndentedDump(all)\n\t// pf((*dump))\n\n\treturn all, sumSize\n}", "title": "" }, { "docid": "070c208ec4c8f5ce48725ff492fcbe68", "score": "0.46881238", "text": "func isList(s prose.Sentence) bool {\n\tfor _, p := range listPrefixes {\n\t\tif strings.HasPrefix(s.Text, p) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "f9f938eb516a6cf6d3e063c3f7c91f83", "score": "0.46821356", "text": "func searchList(lines []token.LineToken, index int) token.ListParagraph {\n\tlist := token.ListParagraph{}\n\n\tfor i := index; ; i++ {\n\t\tif i >= len(lines) { //EOF\n\t\t\tbreak\n\t\t}\n\n\t\tif line, ok := lines[i].(token.ListLine); ok {\n\t\t\tlist.Items = append(list.Items, line)\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn list\n}", "title": "" }, { "docid": "8041a328f70d769712649625735f2251", "score": "0.46808892", "text": "func (s HoldsText) NewLst(n int32) (capnp.TextList, error) {\n\tl, err := capnp.NewTextList(s.Struct.Segment(), n)\n\tif err != nil {\n\t\treturn capnp.TextList{}, err\n\t}\n\terr = s.Struct.SetPtr(1, l.List.ToPtr())\n\treturn l, err\n}", "title": "" }, { "docid": "8041a328f70d769712649625735f2251", "score": "0.46808892", "text": "func (s HoldsText) NewLst(n int32) (capnp.TextList, error) {\n\tl, err := capnp.NewTextList(s.Struct.Segment(), n)\n\tif err != nil {\n\t\treturn capnp.TextList{}, err\n\t}\n\terr = s.Struct.SetPtr(1, l.List.ToPtr())\n\treturn l, err\n}", "title": "" }, { "docid": "3433f31dbf9a14573f7eaf9fdfaf7163", "score": "0.467964", "text": "func (p PunktSentenceTokenizer) Tokenize(text string) []string {\n\tsents := []string{}\n\tfor _, s := range p.tokenizer.Tokenize(text) {\n\t\tsents = append(sents, s.Text)\n\t}\n\treturn sents\n}", "title": "" }, { "docid": "c269c451609784e8596eb77b02d76c6a", "score": "0.4678683", "text": "func (r *Response) Entities(name string) []*Entity {\n\tvar entities []*Entity\n\tfor _, sentence := range r.sentences {\n\t\tif sentenceEntities := sentence.Entities(name); len(sentenceEntities) > 0 {\n\t\t\tentities = append(entities, sentenceEntities...)\n\t\t}\n\t}\n\tif len(entities) > 0 {\n\t\treturn entities\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "cb821d99f47b8797e866a5de572fa278", "score": "0.46543923", "text": "func (v *TextIter) StartsSentence() bool {\n\treturn gobool(C.gtk_text_iter_starts_sentence(v.native()))\n}", "title": "" }, { "docid": "69b0df1b75edb20b5ccb52958dd74688", "score": "0.46448717", "text": "func (c *Client) SentenceDependencies(text string) (sentenceDependencies SentenceDependencies, err error) {\n\tdata := new(bytes.Buffer)\n\n\terr = json.NewEncoder(data).Encode(textInput{Text: text})\n\tif err != nil {\n\t\treturn\n\t}\n\n\tbody, err := c.apiPost(\"sentence-dependencies\", data)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = json.Unmarshal(body, &sentenceDependencies)\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "f516921ceab84537a7005514e558f844", "score": "0.4637784", "text": "func (dao BaseDao) GetTextVersionList (entity model.Entity)([]model.Entity, error) {\n\tif entity.GetKey() == \"\" {\n\t\treturn nil, errors.New(\"GetText: Key is not defined\")\n\t}\n\n\tid := entity.GetCollection() + \"/\" + entity.GetKey()\n\tsql := `FOR v, e, p IN 1..1 OUTBOUND @startId @@edgeCollection FILTER e.label == 'text' SORT v.version DESC RETURN v`\n\n\tfilterMap := make(map[string]interface{})\n\tfilterMap[\"startId\"] = id\n\tfilterMap[\"@edgeCollection\"] = model.PRJ_EDGES\n\n\tvar query ara.Query\n\tquery.Aql = sql\n\tquery.BindVars = filterMap\n\n\tcursor, err := dao.Database().Execute(&query)\n\tmodel.CheckErr(err)\n\n\tviv := new(model.VersionedText)\n\tvar viList []model.Entity\n\n\tfor cursor.FetchOne(viv) {\n\t\tviList = append (viList, viv)\n\t\tviv = new(model.VersionedText)\n\t}\n\n\treturn viList, nil\n}", "title": "" }, { "docid": "cf970c535fa1419bf65f5f9741aa15ff", "score": "0.46111208", "text": "func transcriptTalkTranscript(doc *goquery.Document) []string {\n\ttexts := doc.Find(\".talk-transcript__para__text\").Contents().Text()\n\tvar para []string\n\tfor _, text := range strings.Split(texts, \" \") {\n\n\t\t//fmt.Println(text)\n\t\tpara = append(para, text)\n\t}\n\n\tvar lines []string\n\tfor _, para := range strings.Split(texts, \"\\n\\n\") {\n\n\t\t//fmt.Println(text)\n\t\tlines = append(lines, para)\n\t}\n\n\treturn para\n\t//return lines\n}", "title": "" }, { "docid": "0e52f9d4aa93d638b753733cdaa81ba2", "score": "0.46020168", "text": "func parseWordList(s *State, t *tokenizer) {\n\n}", "title": "" }, { "docid": "b3d2297fbd19385cb7f271dbb855e097", "score": "0.45882154", "text": "func (r *Response) Sentence() *Sentence {\n\treturn r.sentences[0]\n}", "title": "" }, { "docid": "1bdd4a1eaa120afc8ec6b579d5f4c0d4", "score": "0.45763832", "text": "func (li *LoremIpsum) Paragraph() string {\n\tfor {\n\t\tcount := int(li.gauss(5.8, 1.93))\n\t\tif count > 0 {\n\t\t\treturn li.Sentences(count)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "8f50e0dcc98b29cc81fcf5bed52b769f", "score": "0.4563081", "text": "func transcriptTalkTranscriptAndTimeStamps(doc *goquery.Document) []string {\n\n\ttexts := doc.Find(\".talk-transcript__para\").Contents().Text()\n\tvar para []string\n\tfor _, text := range strings.Split(texts, \" \") {\n\n\t\t//fmt.Println(text)\n\t\tpara = append(para, text)\n\t}\n\n\tvar lines []string\n\tfor _, para := range strings.Split(texts, \"\\n\\n\") {\n\n\t\t//fmt.Println(text)\n\t\tlines = append(lines, para)\n\t}\n\n\treturn para\n\t//return lines\n}", "title": "" }, { "docid": "9519f094ee8345e84b6fc4088081951c", "score": "0.45506483", "text": "func (v *TextIter) InsideSentence() bool {\n\treturn gobool(C.gtk_text_iter_inside_sentence(v.native()))\n}", "title": "" }, { "docid": "6495a16b0d1472bc7d47a07ecb5387eb", "score": "0.45156553", "text": "func loadWordList(corpusFile string) []string {\n\n\tcorpus, err := ioutil.ReadFile(corpusFile)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\twords := strings.Split(string(corpus), \"\\n\")\n\n\treturn words\n}", "title": "" }, { "docid": "fc54a14ffaed0e6d7a4c6ca7ca3111bb", "score": "0.4509827", "text": "func (project *Project) Texts() *Texts {\n\treturn project.texts\n}", "title": "" }, { "docid": "1e06a86c7e39152fe92b875858d57513", "score": "0.44619912", "text": "func Words(text ...string) (output []string) {\n\treturn WordsFunc(func(input string) string {\n\t\treturn Punctuation.ReplaceAllString(input, EMPTYCHAR)\n\t}, text...)\n}", "title": "" }, { "docid": "0a05be46081cacda042a68b8476a8828", "score": "0.4441419", "text": "func viewList(n int) (s string) {\n\tfor i := 0; i < n; i++ {\n\t\ts += fmt.Sprintf(\" %d) Some blog post\\n\", i)\n\t}\n\treturn\n}", "title": "" }, { "docid": "a15fc4fdfcd4cb06b166bb8bc4a0058e", "score": "0.44286734", "text": "func createWordList() []string {\n\twordList := []string{}\n\tpickList := []string{}\n\n\t// Read from a file line by line\n\tfile, err := os.Open(\"wordlist.txt\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdefer file.Close()\n\tscanner := bufio.NewScanner(file)\n\tfor scanner.Scan() {\n\t\twordList = append(wordList, scanner.Text())\n\t}\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tfor i := 0; i < 25; i++ {\n\t\trandomIndex := rand.Intn(len(wordList))\n\t\tpickList = append(pickList, wordList[randomIndex])\n\t\tcopy(wordList[randomIndex:], wordList[randomIndex+1:])\n\t\twordList[len(wordList)-1] = \"\"\n\t\twordList = wordList[:len(wordList)-1]\n\t}\n\n\treturn pickList\n}", "title": "" }, { "docid": "2316ac0ad61fcb1eaad1b5300d293a09", "score": "0.44280308", "text": "func getMessageBlocksFromTexts(txtArray, separators []string) []shared.MessageBlock {\n\tblocks := make([]shared.MessageBlock, len(txtArray))\n\tfor i, line := range txtArray {\n\t\tblocks[i] = parseMessageBlock(line, separators)\n\t}\n\treturn blocks\n}", "title": "" }, { "docid": "89cfe6d40bbffdb643b7fbfa2c389686", "score": "0.44230846", "text": "func paragraphify(fileContent string) string {\n paragraphs := strings.Split(fileContent, \"\\n\")\n return fmt.Sprintf(\"<p>%s</p>\", strings.Join(paragraphs, \"</p><p>\"))\n}", "title": "" }, { "docid": "08af0b400f8de0f521a819aa33fb8058", "score": "0.44061825", "text": "func (c *Context) StringList(name string) ([]string, bool) {\n\tfor _, entry := range c.Grammar {\n\t\tif entry.OptionType == Subcommand && entry.Found {\n\t\t\tsubContext := entry.Value.(Context)\n\n\t\t\treturn subContext.StringList(name)\n\t\t}\n\n\t\tif entry.Found && entry.OptionType == StringListType && name == entry.LongName {\n\t\t\treturn entry.Value.([]string), true\n\t\t}\n\t}\n\n\treturn make([]string, 0), false\n}", "title": "" }, { "docid": "e85cb3907cf3a9c4f713c819adf491d1", "score": "0.4393574", "text": "func listWords(conns map[string]*types.Connection) []string {\n\twords := make([]string, 0, len(conns))\n\tfor k := range conns {\n\t\twords = append(words, k)\n\t}\n\treturn words\n}", "title": "" }, { "docid": "bc71bc3b293ea8546728bdac40b0fa63", "score": "0.43880612", "text": "func (t *Text) getTokens(s string) (r []string) {\n\tvar tr []string\n\tvar ts []string = gotokenizer.Words(s)\n\tif m, e := gopostagger.LoadModel(t.lang.model); e != nil {\n\t\ttr = append(r, ts...)\n\t} else {\n\t\ttagger := gopostagger.NewTagger(m)\n\n\t\tvar pts [][]string = tagger.Tag(ts)\n\t\tfor _, i := range pts {\n\t\t\tvar k, v string = i[1], strings.ToLower(i[0])\n\t\t\tif strings.Contains(k, nTag) || strings.Contains(k, aTag) {\n\t\t\t\ttr = append(r, v)\n\t\t\t}\n\t\t}\n\t}\n\n\tfor _, i := range tr {\n\t\tif !t.lang.isStopword(i) {\n\t\t\tr = append(r, i)\n\t\t}\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "1d805a937d34bad36f1cca4691cadd89", "score": "0.43819577", "text": "func (r *Reply) List() ([]string, error) {\n if r.Type == ReplyError {\n return nil, r.Err\n }\n if r.Type != ReplyMulti {\n return nil, errors.New(\"reply type is not ReplyMulti\")\n }\n\n strings := make([]string, len(r.Elems))\n for i, v := range r.Elems {\n if v.Type == ReplyString {\n strings[i] = v.Body\n } else if v.Type == ReplyNil {\n strings[i] = \"\"\n } else {\n return nil, errors.New(\"element type is not ReplyString or ReplyNil\")\n }\n }\n\n return strings, nil\n}", "title": "" }, { "docid": "1336223bbb9ff98b910538ddfd44b616", "score": "0.4375288", "text": "func FragmentaAgumentos(s string) []string {\n\tsec := strings.Split(s, \" \")\n\tretorno := []string{}\n\tfor _, valor := range sec {\n\t\tif len(valor) != 0 {\n\t\t\tretorno = append(retorno, valor)\n\t\t}\n\t}\n\n\treturn retorno\n}", "title": "" }, { "docid": "a8fb358c34661a137f2764cf894ca612", "score": "0.43724817", "text": "func buildWordsList() {\n\tr = rand.New(rand.NewSource(time.Now().UnixNano()))\n\tcontent, err := ioutil.ReadFile(\"/usr/share/dict/words\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tdictionary = strings.Split(string(content), \"\\n\")\n}", "title": "" }, { "docid": "e4ec0f9f9441ec3fa35701fbd22c6ac2", "score": "0.43712482", "text": "func (f *loremFaker) Words(rng *rand.Rand, num int) []string {\n\tw := make([]string, num)\n\tfor i := range w {\n\t\tw[i] = f.words.Rand(rng).(string)\n\t}\n\treturn w\n}", "title": "" }, { "docid": "1a5c8d0444f53a76db8ce99ac9ef3af4", "score": "0.4364703", "text": "func (m *Message) AddSentence(s Sentence) {\n\tif s.Number > 0 {\n\t\tm.Sentences = append(m.Sentences, s)\n\t}\n}", "title": "" }, { "docid": "5220aa4babc134cc30d14bc9dcd1a339", "score": "0.43621647", "text": "func (fa FakeAdapter) Words() string {\n\treturn fake.Words()\n}", "title": "" }, { "docid": "3007a3ed1e1a38459670c557161d52b0", "score": "0.43619716", "text": "func SimpleLines(text string) []Line {\n\tv := strings.Split(text, \"\\n\")\n\tlines := make([]Line, len(v))\n\tfor i, s := range v {\n\t\tlines[i] = []LineElement{LineElement{Text: s}}\n\t}\n\treturn lines\n}", "title": "" }, { "docid": "c68e5256ec23b5e36310ad4753f649ec", "score": "0.43505523", "text": "func (span TextSpan) Words() []string {\n\tvar words []string\n\ti := 0\n\tfor _, boundary := range span.boundaries {\n\t\tif boundary.kind != wordBoundary {\n\t\t\tcontinue\n\t\t}\n\t\tj := boundary.pos + boundary.size\n\t\twords = append(words, span.Text[i:j])\n\t\ti = j\n\t}\n\tif i < len(span.Text) {\n\t\twords = append(words, span.Text[i:])\n\t}\n\treturn words\n}", "title": "" }, { "docid": "4cdc27d0221ab757413d9ede00da1e94", "score": "0.43424228", "text": "func (list LayoutList) Strings() []string {\n\tout := make([]string, len(list))\n\tfor i, l := range list {\n\t\tout[i] = l.String()\n\t}\n\treturn out\n}", "title": "" }, { "docid": "ea227b6b8e896848603ccda1500fad7d", "score": "0.43391076", "text": "func (t *Text) Summarize() (summary []string) {\n\tvar scorer *scorer = newScorer(t.sentences)\n\tfor _, s := range scorer.getSummary() {\n\t\tsummary = append(summary, s.raw)\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "a88c7ce827a9e3539ceb492bf31bd153", "score": "0.4335039", "text": "func (gps *GPSDevice) NextSentence() (sentence string) {\n\tsentence = gps.readNextSentence()\n\tfor !validSentence(sentence) {\n\t\tsentence = gps.readNextSentence()\n\t}\n\treturn sentence\n}", "title": "" }, { "docid": "195b78d41bc900d5c9e13fcb7ee0886d", "score": "0.43333352", "text": "func (f *loremFaker) Paragraph(rng *rand.Rand) string {\n\treturn strings.Join(f.Sentences(rng, randInt(rng, 1, 5)), ` `)\n}", "title": "" }, { "docid": "6f9ad8c4ca762d2cfdccc3c4d35a85df", "score": "0.4325361", "text": "func (db *db) ListNotes(tags []string) string {\n\tresult := []string{}\n\tfor _, e := range db.repo {\n\t\tskip := false\n\t\tfor _, tag := range tags {\n\t\t\tfound := false\n\t\t\tfor _, t := range e.Tags {\n\t\t\t\tif t == tag {\n\t\t\t\t\tfound = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif !found {\n\t\t\t\tskip = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif skip {\n\t\t\tcontinue\n\t\t}\n\n\t\tresult = append(result, e.Text)\n\t}\n\n\treturn strings.Join(result, \"\\n\\n\")\n}", "title": "" }, { "docid": "1de458dca6fb3a05fd808c6c327f70bc", "score": "0.4324579", "text": "func GetWordList() []string {\n\tl := make([]string, 0, len(englishWords))\n\tfor k := range englishWords {\n\t\tl = append(l, k)\n\t}\n\treturn l\n}", "title": "" }, { "docid": "f1bb197dcad696aaa965fb9989cf0e2d", "score": "0.4321361", "text": "func List() ([]string, error) {\n\tfile, err := get(\"/api/list\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlangs := []string{}\n\tfor _, row := range strings.Split(file, \"\\n\") {\n\t\tlangs = append(langs, strings.Split(row, \",\")...)\n\t}\n\n\treturn langs, nil\n}", "title": "" }, { "docid": "a22ddb367720b0f0684f4c5ca5bd8826", "score": "0.43106663", "text": "func ListPhilosopherPhrases(philosopherName string, context fiber.Ctx) error{\n\n\tphilosophers,err := functions.LoadPhilosophers()\n\n\n\tif err != nil {\n\t\treturn context.Status(fiber.StatusInternalServerError).JSON(\n\t\t\tfiber.Map{\n\t\t\t\t\t\"error\":err.Error(),\n\t\t\t},\n\t\t)\n\t}\n\n\tphilosopher,err := functions.SearchPhilosopherByName(philosopherName,*philosophers)\n\n\tif err != nil {\n\t\treturn context.Status(fiber.StatusNotFound).JSON(\n\t\t\tfiber.Map{\n\t\t\t\t\"message\": \"philosophers not found\",\n\t\t\t},\n\t\t)\n\t}\n\n\treturn context.Status(fiber.StatusOK).JSON(\n\t\tfiber.Map{\n\t\t\t\"philosopher\":philosopher.Name,\n\t\t\t\"phrases\":philosopher.Phrases,\n\t\t},\n\t)\n\t\n}", "title": "" }, { "docid": "91a1416fc21a5dc7ab47852480b4accc", "score": "0.43098807", "text": "func (nlp NLPTokenizer) Tokenize(ctx context.Context, text string) ([]tokenize.Token, error) {\n\tres, err := nlp.req(ctx, text)\n\tif err != nil {\n\t\treturn []tokenize.Token{}, err\n\t}\n\n\ttokens := make([]tokenize.Token, 0)\n\tfor _, tok := range res.GetTokens() {\n\t\tif _, ok := poSMap[tok.PartOfSpeech.Tag]; !ok {\n\t\t\treturn tokens, errors.New(\"can't find pos match\")\n\t\t}\n\n\t\ttokens = append(tokens, tokenize.Token{\n\t\t\tPoS: poSMap[tok.PartOfSpeech.Tag],\n\t\t\tText: tok.GetText().GetContent(),\n\t\t})\n\t}\n\treturn tokens, nil\n}", "title": "" }, { "docid": "3d1822edb08a0ce62b4da26f56a343e5", "score": "0.43080956", "text": "func (s *Seg) SegmentText(t string) []string {\n\tts := s.groupText(t)\n\tout := []string{}\n\n\tfor _, ti := range ts {\n\t\tif s.isThai(ti) {\n\t\t\tif s.Dict.Depth() > 0 {\n\t\t\t\tres := s.segmentThai(ti)\n\t\t\t\tout = append(out, res...)\n\t\t\t} else {\n\t\t\t\tout = append(out, ti)\n\t\t\t}\n\t\t} else {\n\t\t\tbuf := strings.Trim(ti, \" \")\n\t\t\tif len(buf) > 0 {\n\t\t\t\tout = append(out, buf)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn out\n}", "title": "" }, { "docid": "9ed965f644ac7e48520478b0b6747271", "score": "0.43017688", "text": "func (d *Dict) Words() []string {\n\treturn d.list\n}", "title": "" }, { "docid": "7e175a80ef57fa3d1c18d150fa07450f", "score": "0.42989868", "text": "func (l *Lorem) Fake() {\n\tl.Wordʹ()\n\tl.Wordsʹ()\n\tl.Sentenceʹ()\n\tl.Sentencesʹ()\n\tl.Paragraphʹ()\n\tl.Paragraphsʹ()\n}", "title": "" }, { "docid": "a541f2dfb509585e8ae8d31dcf9c9532", "score": "0.42937073", "text": "func (t Tags) List(name string) []string {\n\tparts := strings.Split(reflect.StructTag(t).Get(name), \",\")\n\tout := make([]string, 0, len(parts))\n\tfor _, part := range parts {\n\t\tpart = strings.TrimSpace(part)\n\t\tif len(part) > 0 {\n\t\t\tout = append(out, part)\n\t\t}\n\t}\n\treturn out\n}", "title": "" }, { "docid": "7bf88041897431ec5f8e2a76fd75c24b", "score": "0.4289751", "text": "func (h *HTTPHandler) getTagsofNews(news *models.News) {\r\n\tretrievedTags, err := h.database.RetrieveData(fmt.Sprintf(\"SELECT tags FROM tags WHERE news_id = (SELECT id FROM news WHERE title = '%s');\", news.Title))\r\n\tif err != nil {\r\n\t\tlog.Println(\"Error retrieving tags : \", err.Error())\r\n\t}\r\n\tfor retrievedTags.Data.Next() {\r\n\t\tvar tags string\r\n\t\terr = retrievedTags.Data.Scan(&tags)\r\n\t\tif err != nil {\r\n\t\t\tlog.Println(\"Error scanning tags: \", err.Error())\r\n\t\t}\r\n\t\tnews.Tags = append(news.Tags, tags)\r\n\t}\r\n\r\n}", "title": "" }, { "docid": "d151571046e1cc58e006d350cf5d24fc", "score": "0.42779085", "text": "func main() {\n\tsentence1 := \"Eating right now\"\n\tsentence2 := \"Eating right\"\n\top := sentenceSimilarity.SentencesSimilar(sentence1, sentence2)\n\tfmt.Println(op)\n}", "title": "" }, { "docid": "b85e5ec4a619eb4caeed1d883e45be2b", "score": "0.42707905", "text": "func (p *PrefixTree) Words() []string {\n words := []string{}\n for _, n := range p.Root.childNodes() {\n words = append(words, p.wordsHelper(n, &bytes.Buffer{})...)\n }\n return words\n}", "title": "" }, { "docid": "9da4277435ce36fd779eca5c38a6846b", "score": "0.42681193", "text": "func Shingle(lines []string) []string {\n\tsh := newShingler()\n\n\tfor _, line := range lines {\n\n\t\twords := strings.Fields(line)\n\t\tfor _, word := range words {\n\t\t\tw := removePunctuationMarks(word)\n\t\t\tif stopwords.IsStopWord(strings.ToLower(w)) {\n\t\t\t\tsh.appendCandidate()\n\t\t\t}\n\t\t\tsh.appendWord(w)\n\t\t}\n\t}\n\n\treturn sh.shingles\n}", "title": "" }, { "docid": "88ee6f6d03c8afd29b86532b09416bb5", "score": "0.42607552", "text": "func (v *TextIter) ForwardSentenceEnds(v1 int) bool {\n\treturn gobool(C.gtk_text_iter_forward_sentence_ends(v.native(), C.gint(v1)))\n}", "title": "" }, { "docid": "aaa43331f5660432a849d450ec91b087", "score": "0.42569888", "text": "func (b *Buntstift) List(text string, indentLevel ...int) {\n\tindent := 0\n\tif len(indentLevel) > 0 {\n\t\tindent = indentLevel[0]\n\t}\n\tColor := b.colorize(color.FgWhite)\n\tb.printf(Color, \"%v%v %v\\n\", strings.Repeat(\" \", indent*2), b.icons[\"multiplicationDot\"], text)\n}", "title": "" }, { "docid": "9f89139c2c0769402493ec01fd325f91", "score": "0.4253848", "text": "func (c *Collection) List() ([]string, error) {\n\ts, err := c.ListAll()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ts2 := make([]string, 0, len(s))\n\tfor _, x := range s {\n\t\tif strings.HasPrefix(x, \".\") {\n\t\t\tcontinue\n\t\t}\n\n\t\ts2 = append(s2, x)\n\t}\n\n\treturn s2, nil\n}", "title": "" }, { "docid": "09919a589d348ab75fa311ff49ca531d", "score": "0.42524123", "text": "func (s *DefaultSentenceTokenizer) SentencePositions(text string) []int {\n\tannotatedTokens := s.AnnotatedTokens(text)\n\n\tpositions := make([]int, 0, len(annotatedTokens))\n\tfor _, token := range annotatedTokens {\n\t\tif !token.SentBreak {\n\t\t\tcontinue\n\t\t}\n\n\t\tpositions = append(positions, token.Position)\n\t}\n\n\tlastChar := len(text)\n\tpositions = append(positions, lastChar)\n\n\treturn positions\n}", "title": "" }, { "docid": "e6a127e14cf5617743d82aeffdd7823b", "score": "0.4242732", "text": "func main() {\n\tfp, err := os.Open(\"hightemp.txt\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tdefer fp.Close()\n\n\t// get args\n\tn := flag.Int(\"n\", 0, \"input natural number\")\n\tflag.Parse()\n\n\tsentences := []string{}\n\tscanner := bufio.NewScanner(fp)\n\tfor scanner.Scan() {\n\t\tsentences = append(sentences, scanner.Text())\n\t}\n\tfor i := len(sentences) - *n; i < len(sentences); i++ {\n\t\tfmt.Println(sentences[i])\n\t}\n}", "title": "" }, { "docid": "1ea87f85370307c4d65e81d7b05a5b9c", "score": "0.42414245", "text": "func (v *TextIter) EndsSentence() bool {\n\treturn gobool(C.gtk_text_iter_ends_sentence(v.native()))\n}", "title": "" }, { "docid": "39e8a384c4fd358d7512727168ccb618", "score": "0.42366973", "text": "func GenerateNewSentence(sentenceModel string) string {\n\ttempFourWordArray := make([]string, len(fourWord))\n\tcopy(tempFourWordArray, fourWord)\n\ttempThreeWordArray := make([]string, len(threeWord))\n\tcopy(tempThreeWordArray, threeWord)\n\ttempTwoWordArray := make([]string, len(twoWord))\n\tcopy(tempTwoWordArray, twoWord)\n\ttempOneWordArray := make([]string, len(oneWord))\n\tcopy(tempOneWordArray, oneWord)\n\n\tnewSentence := sentenceModel\n\tsubstring := \"\"\n\n\tfor i := 1; i < 9; i++ {\n\t\tsubstring = strconv.Itoa(i * 1111)\n\t\tfor strings.Contains(newSentence, substring) {\n\t\t\tword := rand.Intn(len(tempFourWordArray))\n\t\t\tnewSentence = strings.Replace(newSentence, substring, tempFourWordArray[word], -1)\n\t\t\ttempFourWordArray[word] = tempFourWordArray[len(tempFourWordArray)-1]\n\t\t\ttempFourWordArray = tempFourWordArray[:len(tempFourWordArray)-1]\n\t\t}\n\n\t\tsubstring = strconv.Itoa(i * 111)\n\t\tfor strings.Contains(newSentence, substring) {\n\t\t\tword := rand.Intn(len(tempThreeWordArray))\n\t\t\tnewSentence = strings.Replace(newSentence, substring, tempThreeWordArray[word], -1)\n\t\t\ttempThreeWordArray[word] = tempThreeWordArray[len(tempThreeWordArray)-1]\n\t\t\ttempThreeWordArray = tempThreeWordArray[:len(tempThreeWordArray)-1]\n\t\t}\n\n\t\tsubstring = strconv.Itoa(i * 11)\n\t\tfor strings.Contains(newSentence, substring) {\n\t\t\tword := rand.Intn(len(tempTwoWordArray))\n\t\t\tnewSentence = strings.Replace(newSentence, substring, tempTwoWordArray[word], -1)\n\t\t\ttempTwoWordArray[word] = tempTwoWordArray[len(tempTwoWordArray)-1]\n\t\t\ttempTwoWordArray = tempTwoWordArray[:len(tempTwoWordArray)-1]\n\t\t}\n\t\tsubstring = strconv.Itoa(i)\n\t\tfor strings.Contains(newSentence, substring) {\n\t\t\tword := rand.Intn(len(tempOneWordArray))\n\t\t\tnewSentence = strings.Replace(newSentence, substring, tempOneWordArray[word], -1)\n\t\t\ttempOneWordArray[word] = tempOneWordArray[len(tempOneWordArray)-1]\n\t\t\ttempOneWordArray = tempOneWordArray[:len(tempOneWordArray)-1]\n\t\t}\n\n\t}\n\treturn newSentence\n}", "title": "" }, { "docid": "c6b5171492f0acde9f6f75568ca831b1", "score": "0.42325297", "text": "func newSentenceTokenizer(s *sentences.Storage) (*sentences.DefaultSentenceTokenizer, error) {\n\ttraining := s\n\n\tif training == nil {\n\t\tb, err := data.Asset(\"data/english.json\")\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\ttraining, err = sentences.LoadTraining(b)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// supervisor abbreviations\n\tabbrevs := []string{\"sgt\", \"gov\", \"no\", \"mt\"}\n\tfor _, abbr := range abbrevs {\n\t\ttraining.AbbrevTypes.Add(abbr)\n\t}\n\n\tlang := sentences.NewPunctStrings()\n\tword := newWordTokenizer(lang)\n\tannotations := sentences.NewAnnotations(training, lang, word)\n\n\tortho := &sentences.OrthoContext{\n\t\tStorage: training,\n\t\tPunctStrings: lang,\n\t\tTokenType: word,\n\t\tTokenFirst: word,\n\t}\n\n\tmultiPunct := &multiPunctWordAnnotation{\n\t\tStorage: training,\n\t\tTokenParser: word,\n\t\tTokenGrouper: &sentences.DefaultTokenGrouper{},\n\t\tOrtho: ortho,\n\t}\n\n\tannotations = append(annotations, multiPunct)\n\n\ttokenizer := &sentences.DefaultSentenceTokenizer{\n\t\tStorage: training,\n\t\tPunctStrings: lang,\n\t\tWordTokenizer: word,\n\t\tAnnotations: annotations,\n\t}\n\n\treturn tokenizer, nil\n}", "title": "" }, { "docid": "dcadca04f02df7c0607dc2c26e8dd282", "score": "0.42244673", "text": "func List(n int, do func(i int) string) []string {\n\tvar list []string\n\n\tfor i := 0; i < n; i++ {\n\t\tlist = append(list, do(i))\n\t}\n\n\treturn list\n}", "title": "" }, { "docid": "da1d0a3f2bb9139167c61f652c165b59", "score": "0.42239338", "text": "func (d *Dictionary) Passphrase(n int) ([]string, error) {\n\tif d.Entropy(n) < minEntropy {\n\t\treturn nil, fmt.Errorf(\"dictionary cannot support more than %0.0f bits of entropy\", minEntropy)\n\t}\n\tdictLengh := big.NewInt(int64(d.Length() - 1))\n\twords := make([]string, n)\n\tfor i := 0; i < n; i++ {\n\t\tidx, err := rand.Int(d.randReader, dictLengh)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"cannot generate random words: %s\", err)\n\t\t}\n\t\tword := d.words[d.start+int(idx.Int64())]\n\t\tswitch d.capitalize {\n\t\tcase \"all\":\n\t\t\tword = strings.ToUpper(word)\n\t\tcase \"first\":\n\t\t\tword = strings.ToUpper(word[0:1]) + word[1:]\n\t\tcase \"random\":\n\t\t\tfor j := range word {\n\t\t\t\tchoice, err := rand.Int(d.randReader, big.NewInt(int64(9)))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn []string{}, err\n\t\t\t\t}\n\t\t\t\tif int(choice.Int64()) < 5 {\n\t\t\t\t\tswitch j {\n\t\t\t\t\tcase 0:\n\t\t\t\t\t\tword = strings.ToUpper(word[0:1]) + word[1:]\n\t\t\t\t\tcase len(word) - 1:\n\t\t\t\t\t\tword = word[0:j] + strings.ToUpper(word[j:])\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tword = word[0:j] + strings.ToUpper(word[j:j+1]) + word[j+1:]\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\twords[i] = word\n\t}\n\treturn words, nil\n}", "title": "" }, { "docid": "bae0a0f651aa7661603ac998cefc30fa", "score": "0.42237976", "text": "func (s *Service) List(_ struct{}, reply *string) error {\n\ttableString := &strings.Builder{}\n\ttable := tablewriter.NewWriter(tableString)\n\ttable.SetHeader([]string{\"Service\"})\n\n\tnames := []string{}\n\tfor t := range s.serviceToken {\n\t\tif len(s.serviceToken[t]) > 0 {\n\t\t\tnames = append(names, t)\n\t\t}\n\t}\n\n\tsort.Strings(names)\n\n\tfor n := range names {\n\t\ttable.Append([]string{names[n]})\n\t}\n\n\ttable.Render()\n\t*reply = tableString.String()\n\treturn nil\n}", "title": "" }, { "docid": "2ca8af889fc52ff20ec1f2ba4554abb6", "score": "0.4220134", "text": "func stemmerFilter(tokens []string) []string {\n\tr := make([]string, len(tokens))\n\tfor i, token := range tokens {\n\t\tr[i] = snowballeng.Stem(token, false)\n\t}\n\treturn r\n}", "title": "" }, { "docid": "371e839a7c71a39c0129ee5be767e111", "score": "0.42184532", "text": "func (n BIP39ChineaseTraditional) GetList() []string {\n\treturn bip39ChineaseSimplified\n}", "title": "" }, { "docid": "aae42c64376ddc8cf7ee15f821e8e6a7", "score": "0.42099634", "text": "func WordSplicer(sentences string, res chan string) {\n\tdefer close(res)\n\n\tvar word string\n\tvar lastIndex int\n\tfor index, rune := range sentences {\n\t\tif unicode.IsSpace(rune) {\n\t\t\tword = cleanWord(sentences[lastIndex:index])\n\t\t\tlastIndex = index + 1\n\n\t\t\tif dotIndex := strings.IndexRune(word, dot); dotIndex != -1 && dotIndex+1 < len(word) {\n\t\t\t\tdotSplicer(word, res)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Probably we dealing with many dot type or single dot, so\n\t\t\t// just treat has having single dot, check abbreviation if\n\t\t\t// its one, if one, get abbreviation else add without dot.\n\t\t\tif strings.HasSuffix(word, \".\") {\n\t\t\t\tif abbrvs.IsAbbreviated(word) {\n\t\t\t\t\tres <- word\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tres <- strings.TrimSuffix(word, \".\")\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tres <- word\n\t\t\tcontinue\n\t\t}\n\t}\n\n\tif word = sentences[lastIndex:]; len(word) != 0 {\n\t\tword = cleanWord(word)\n\n\t\tif abbrvs.IsAbbreviated(word) {\n\t\t\tres <- word\n\t\t\treturn\n\t\t}\n\n\t\tif strings.HasSuffix(word, \".\") {\n\t\t\tres <- strings.TrimSuffix(word, \".\")\n\t\t\treturn\n\t\t}\n\n\t\tres <- word\n\t}\n}", "title": "" }, { "docid": "ba3ae35afca20d49c03514ebfd0424a1", "score": "0.4209765", "text": "func CollectKeywords(text string) []string {\n\tnaturalLanguageUnderstanding := VerifyNLU()\n\n\tsentiment := true\n\temotion := true\n\tlimit := int64(3)\n\n\tresponse, responseErr := naturalLanguageUnderstanding.Analyze(\n\t\t&naturallanguageunderstandingv1.AnalyzeOptions{\n\t\t\tText: &text,\n\t\t\tFeatures: &naturallanguageunderstandingv1.Features{\n\t\t\t\tKeywords: &naturallanguageunderstandingv1.KeywordsOptions{\n\t\t\t\t\tSentiment: &sentiment,\n\t\t\t\t\tEmotion: &emotion,\n\t\t\t\t\tLimit: &limit,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t)\n\tif responseErr != nil {\n\t\tpanic(responseErr)\n\t}\n\tresult := naturalLanguageUnderstanding.GetAnalyzeResult(response)\n\tb, _ := json.MarshalIndent(result, \"\", \" \")\n\tkeywordsList := createKeywordsList(b)\n\treturn (keywordsList)\n}", "title": "" }, { "docid": "1767a9c1de6bbde37f2695e60b543f86", "score": "0.42005414", "text": "func WordsFunc(funcToApply func(string) string, text ...string) (output []string) {\n\tfor _, txt := range text {\n\t\toutput = append(output, strings.Fields(funcToApply(txt))...)\n\t}\n\treturn output\n}", "title": "" } ]
f07e1c5b7b8b3bfec6df5e0c9ef9ec03
Specifies an HTTP proxy that the API client library will use to connect to the internet. host The proxy hostname. port The proxy port. userName The username. password The password.
[ { "docid": "d59ac766bbe45bf14abdad17d800a8c6", "score": "0.768328", "text": "func (client *HtmlToImageClient) SetProxy(host string, port int, userName string, password string) *HtmlToImageClient {\n client.helper.setProxy(host, port, userName, password)\n return client\n}", "title": "" } ]
[ { "docid": "5472aaaa6cf18b0860aaf322532d7aea", "score": "0.78388226", "text": "func (client *ImageToImageClient) SetProxy(host string, port int, userName string, password string) *ImageToImageClient {\n client.helper.setProxy(host, port, userName, password)\n return client\n}", "title": "" }, { "docid": "835ac1ad545948b8f2d2587ad97dc628", "score": "0.7625165", "text": "func (client *PdfToPdfClient) SetProxy(host string, port int, userName string, password string) *PdfToPdfClient {\n client.helper.setProxy(host, port, userName, password)\n return client\n}", "title": "" }, { "docid": "be7f267dc8ab9262eecf821b40efef75", "score": "0.7622708", "text": "func (client *ImageToPdfClient) SetProxy(host string, port int, userName string, password string) *ImageToPdfClient {\n client.helper.setProxy(host, port, userName, password)\n return client\n}", "title": "" }, { "docid": "795b7fdef29051c159ad41788d6b4444", "score": "0.7517148", "text": "func (client *HtmlToPdfClient) SetProxy(host string, port int, userName string, password string) *HtmlToPdfClient {\n client.helper.setProxy(host, port, userName, password)\n return client\n}", "title": "" }, { "docid": "b26e4242b906c05cdb65eb65bd116b91", "score": "0.74885833", "text": "func SetProxy(proxyServer string, proxyPort int, proxyUser string, proxyPassword string){\n _proxyServer = proxyServer\n _proxyPort = proxyPort\n _proxyUser = proxyUser\n _proxyPassword = proxyPassword\n _proxyServerEnabled = true\n}", "title": "" }, { "docid": "cbdfe925cbebca720f0d648038c1b943", "score": "0.74192035", "text": "func (client *PdfToTextClient) SetProxy(host string, port int, userName string, password string) *PdfToTextClient {\n client.helper.setProxy(host, port, userName, password)\n return client\n}", "title": "" }, { "docid": "0b9e8fa6b28df25c15a4b6e49e0acdab", "score": "0.74131215", "text": "func (client *PdfToHtmlClient) SetProxy(host string, port int, userName string, password string) *PdfToHtmlClient {\n client.helper.setProxy(host, port, userName, password)\n return client\n}", "title": "" }, { "docid": "4426bc506dd90485f4ef28cf19502ce1", "score": "0.6947236", "text": "func (client *ImageToImageClient) SetHttpProxy(proxy string) *ImageToImageClient {\n client.fields[\"http_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "a32810058742c93669c9dc3b4af15847", "score": "0.69257945", "text": "func (client *HtmlToPdfClient) SetHttpProxy(proxy string) *HtmlToPdfClient {\n client.fields[\"http_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "d90ae6f1606233319ee108f9982a12e8", "score": "0.6904352", "text": "func (client *PdfToTextClient) SetHttpProxy(proxy string) *PdfToTextClient {\n client.fields[\"http_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "8cea6da3ca581ae517f2c8b7f360e539", "score": "0.6896689", "text": "func (client *HtmlToImageClient) SetHttpProxy(proxy string) *HtmlToImageClient {\n client.fields[\"http_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "ac0559cfff623f84e753debdbe63826a", "score": "0.6889513", "text": "func (client *ImageToPdfClient) SetHttpProxy(proxy string) *ImageToPdfClient {\n client.fields[\"http_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "c5728f752511bd76524f9f3d0d334a3c", "score": "0.68809485", "text": "func (c *client) SetProxy(u string) Client {\n\tt, ok := c.opts.httpClient.Transport.(*http.Transport)\n\tif !ok {\n\t\tc.opts.logger.Warn(\"proxy not set: client transport failed \" +\n\t\t\t\"assertion\")\n\t\treturn c\n\t}\n\n\tproxyURL, err := url.Parse(u)\n\tif err != nil {\n\t\tc.opts.logger.WithField(\"url\", u).\n\t\t\tWarn(\"proxy url not set: failed to parse\")\n\t\treturn c\n\t}\n\n\tc.opts.proxyURL = proxyURL\n\tt.Proxy = http.ProxyURL(proxyURL)\n\tc.opts.logger.WithField(\"url\", u).Trace(\"proxy url set\")\n\n\treturn c\n}", "title": "" }, { "docid": "f364900b11fe583ae6fc5a6032e898f8", "score": "0.67799217", "text": "func (client *PdfToHtmlClient) SetHttpProxy(proxy string) *PdfToHtmlClient {\n client.fields[\"http_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "43fd9548ce904d4f8ecc8f30787698af", "score": "0.65707535", "text": "func TeleportProxy(operator ops.Operator, proxyHost string) (*client.ProxyClient, error) {\n\tteleport, err := Teleport(operator, proxyHost)\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\treturn teleport.ConnectToProxy()\n}", "title": "" }, { "docid": "bd3355b92dc442ad367e27f679ec603e", "score": "0.65279996", "text": "func (c *HTTPClient) SetProxy(urlStr string) error {\n\tif !strings.Contains(urlStr, \"://\") {\n\t\turlStr = \"http://\" + urlStr\n\t}\n\n\tproxy, err := url.Parse(urlStr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tc.client.Transport = &http.Transport{Proxy: http.ProxyURL(proxy)}\n\n\treturn nil\n}", "title": "" }, { "docid": "940472dded3c0d46e3e7cf28c549f8dc", "score": "0.64660716", "text": "func setupProxyClient() *http.Client {\n\tproxyStr := os.Getenv(\"PROXY_URL\")\n\tproxyUrl, err := url.Parse(proxyStr)\n\tif err != nil {\n\t\tlog.Fatalf(\"error parsing proxy %s: %v\", proxyStr, err)\n\t}\n\ttransport := &http.Transport{Proxy: http.ProxyURL(proxyUrl)}\n\n\treturn &http.Client{\n\t\tTransport: transport,\n\t}\n}", "title": "" }, { "docid": "32ea61c1a70e1670452739659bffc25b", "score": "0.64133006", "text": "func (s *Scraper) SetProxy(proxyAddr string) error {\n\tif proxyAddr == \"\" {\n\t\ts.client.Transport = &http.Transport{\n\t\t\tTLSNextProto: make(map[string]func(authority string, c *tls.Conn) http.RoundTripper),\n\t\t\tDialContext: (&net.Dialer{\n\t\t\t\tTimeout: s.client.Timeout,\n\t\t\t}).DialContext,\n\t\t}\n\t\ts.proxy = \"\"\n\t\treturn nil\n\t}\n\tif strings.HasPrefix(proxyAddr, \"http\") {\n\t\turlproxy, err := url.Parse(proxyAddr)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\ts.client.Transport = &http.Transport{\n\t\t\tProxy: http.ProxyURL(urlproxy),\n\t\t\tTLSNextProto: make(map[string]func(authority string, c *tls.Conn) http.RoundTripper),\n\t\t\tDialContext: (&net.Dialer{\n\t\t\t\tTimeout: s.client.Timeout,\n\t\t\t}).DialContext,\n\t\t}\n\t\ts.proxy = proxyAddr\n\t\treturn nil\n\t}\n\tif strings.HasPrefix(proxyAddr, \"socks5\") {\n\t\tbaseDialer := &net.Dialer{\n\t\t\tTimeout: s.client.Timeout,\n\t\t\tKeepAlive: s.client.Timeout,\n\t\t}\n\t\tsocksHostPort := strings.ReplaceAll(proxyAddr, \"socks5://\", \"\")\n\t\tdialSocksProxy, err := proxy.SOCKS5(\"tcp\", socksHostPort, nil, baseDialer)\n\t\tif err != nil {\n\t\t\treturn errors.New(\"error creating socks5 proxy :\" + err.Error())\n\t\t}\n\t\tif contextDialer, ok := dialSocksProxy.(proxy.ContextDialer); ok {\n\t\t\tdialContext := contextDialer.DialContext\n\t\t\ts.client.Transport = &http.Transport{\n\t\t\t\tDialContext: dialContext,\n\t\t\t}\n\t\t} else {\n\t\t\treturn errors.New(\"failed type assertion to DialContext\")\n\t\t}\n\t\ts.proxy = proxyAddr\n\t\treturn nil\n\t}\n\treturn errors.New(\"only support http(s) or socks5 protocol\")\n}", "title": "" }, { "docid": "380fc6b48d73223102fa838fa3cf349d", "score": "0.6409942", "text": "func (s *Scraper) SetProxy(proxyAddr string) error {\n\tif strings.HasPrefix(proxyAddr, \"http\") {\n\t\turlproxy, err := url.Parse(proxyAddr)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\ts.client = &http.Client{\n\t\t\tTransport: &http.Transport{\n\t\t\t\tProxy: http.ProxyURL(urlproxy),\n\t\t\t\tTLSNextProto: make(map[string]func(authority string, c *tls.Conn) http.RoundTripper),\n\t\t\t\tDialContext: (&net.Dialer{\n\t\t\t\t\tTimeout: s.clientTimeout,\n\t\t\t\t}).DialContext,\n\t\t\t},\n\t\t}\n\t\treturn nil\n\t}\n\tif strings.HasPrefix(proxyAddr, \"socks5\") {\n\t\tbaseDialer := &net.Dialer{\n\t\t\tTimeout: s.clientTimeout,\n\t\t\tKeepAlive: s.clientTimeout,\n\t\t}\n\t\tsocksHostPort := strings.ReplaceAll(proxyAddr, \"socks5://\", \"\")\n\t\tdialSocksProxy, err := proxy.SOCKS5(\"tcp\", socksHostPort, nil, baseDialer)\n\t\tif err != nil {\n\t\t\treturn errors.New(\"error creating socks5 proxy :\" + err.Error())\n\t\t}\n\t\tif contextDialer, ok := dialSocksProxy.(proxy.ContextDialer); ok {\n\t\t\tdialContext := contextDialer.DialContext\n\t\t\ts.client = &http.Client{\n\t\t\t\tTransport: &http.Transport{\n\t\t\t\t\tDialContext: dialContext,\n\t\t\t\t},\n\t\t\t}\n\t\t} else {\n\t\t\treturn errors.New(\"failed type assertion to DialContext\")\n\t\t}\n\t\treturn nil\n\t}\n\treturn errors.New(\"only support http(s) or socks5 protocol\")\n}", "title": "" }, { "docid": "f6a2d0b58362ae35fd5693fe49c74f11", "score": "0.64052683", "text": "func SetProxy(proxy, proxyAuth string, tr *http.Transport) (err error) {\n\treturn net.SetProxy(proxy, proxyAuth, tr)\n}", "title": "" }, { "docid": "2ca4440501136b53b9fb2d9e6a86b96c", "score": "0.6326179", "text": "func (c *Client) proxy(req *http.Request) (*url.URL, error) {\n\tif c.Proxy == \"\" {\n\t\treturn nil, nil\n\t}\n\treturn url.Parse(c.Proxy)\n}", "title": "" }, { "docid": "2b27f48b4d78fa9d6f08de9e6895b733", "score": "0.63116014", "text": "func (client *Client) Proxy(name string) (*Proxy, error) {\n\t// TODO url encode\n\tresp, err := http.Get(client.endpoint + \"/proxies/\" + name)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = checkError(resp, http.StatusOK, \"Proxy\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tproxy := new(Proxy)\n\terr = json.NewDecoder(resp.Body).Decode(proxy)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tproxy.client = client\n\tproxy.created = true\n\n\treturn proxy, nil\n}", "title": "" }, { "docid": "dbff5fcbe3eeb458556bfea30a649f31", "score": "0.62725854", "text": "func NewHTTPProxy(host string,\n\tport int,\n\tuser, password, keyFile, passphrase string,\n\tlogger *logprinter.Logger,\n) *HTTPProxy {\n\tp := &HTTPProxy{\n\t\tconfig: &easyssh.MakeConfig{\n\t\t\tServer: host,\n\t\t\tPort: strconv.Itoa(port),\n\t\t\tUser: user,\n\t\t\tTimeout: 10 * time.Second,\n\t\t},\n\t\tlogger: logger,\n\t}\n\n\tif len(keyFile) > 0 {\n\t\tp.config.KeyPath = keyFile\n\t\tp.config.Passphrase = passphrase\n\t} else if len(password) > 0 {\n\t\tp.config.Password = password\n\t}\n\n\tdial := func(ctx context.Context, network, addr string) (net.Conn, error) {\n\t\tp.l.RLock()\n\t\tcli := p.cli\n\t\tp.l.RUnlock()\n\n\t\t// reuse the old client if dial success\n\t\tif cli != nil {\n\t\t\tc, err := cli.Dial(network, addr)\n\t\t\tif err == nil {\n\t\t\t\treturn c, nil\n\t\t\t}\n\t\t}\n\n\t\t// create a new ssh client\n\t\t// timeout is implemented inside easyssh, don't need to repeat the implementation\n\t\t_, cli, err := p.config.Connect()\n\t\tif err != nil {\n\t\t\treturn nil, perrs.Annotate(err, \"connect to ssh proxy\")\n\t\t}\n\n\t\tp.l.Lock()\n\t\tp.cli = cli\n\t\tp.l.Unlock()\n\n\t\treturn cli.Dial(network, addr)\n\t}\n\n\tp.tr = &http.Transport{DialContext: dial}\n\treturn p\n}", "title": "" }, { "docid": "a6b5fc3e925e3e40f4281e0c42d3bb76", "score": "0.62436527", "text": "func NewProxy(host string, monitor *Monitor, labels map[string]string, dialer func(context.Context, string, string) (net.Conn, error)) (*Proxy, error) {\n\tport, err := monitor.findOpenPort()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &Proxy{\n\t\tclient: &http.Client{\n\t\t\tTransport: &http.Transport{\n\t\t\t\tDialContext: dialer,\n\t\t\t},\n\t\t},\n\t\tserver: nil,\n\t\thost: host,\n\t\tPort: port,\n\t\tLabels: labels,\n\t\tmonitor: monitor,\n\t}, nil\n}", "title": "" }, { "docid": "8824fd482f4ca65afa62b993f9a3e930", "score": "0.62397563", "text": "func setupProxy() {\r\n\t// By-pass proxy\r\n\tos.Setenv(\"HTTP_PROXY\", \"http://<username>:<password>@<proxy-info>:8080\")\r\n\tos.Setenv(\"HTTPS_PROXY\", \"http://<username>:<password>@<proxy-info>:8080\")\r\n\tos.Setenv(\"NO_PROXY\", \"<no-proxy-info>\")\r\n\tos.Setenv(\"http_proxy\", \"http://<username>:<password>@<proxy-info>:8080\")\r\n\tos.Setenv(\"https_proxy\", \"http://<username>:<password>@<proxy-info>:8080\")\r\n\tos.Setenv(\"no_proxy\", \"<no-proxy-info>\")\r\n}", "title": "" }, { "docid": "6059dadd63e5773cf4f33b60f6f7dae9", "score": "0.61673015", "text": "func WithProxy(proxy string) ClientOption {\n\treturn func(c Client) error {\n\t\tc.SetProxy(proxy)\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "e7ab1ae43d7090d4edaa5bf0a98cba24", "score": "0.6165205", "text": "func NewProxyClient(p *Proxy) *http.Client {\n\tproxyURL, _ := url.Parse(fmt.Sprintf(\"http://%s:%s\", p.IP, p.Port))\n\tif proxyURL == nil {\n\t\treturn nil\n\t}\n\treturn &http.Client{\n\t\tTransport: &http.Transport{\n\t\t\tProxy: func(_ *http.Request) (*url.URL, error) {\n\t\t\t\treturn proxyURL, nil\n\t\t\t},\n\t\t\tDialContext: (&net.Dialer{\n\t\t\t\tTimeout: 30 * time.Second,\n\t\t\t\tKeepAlive: 30 * time.Second,\n\t\t\t\tDualStack: true,\n\t\t\t}).DialContext,\n\t\t\tForceAttemptHTTP2: true,\n\t\t\tMaxIdleConns: 100,\n\t\t\tIdleConnTimeout: 10 * time.Second,\n\t\t\tTLSHandshakeTimeout: 10 * time.Second,\n\t\t\tExpectContinueTimeout: 1 * time.Second,\n\t\t},\n\t}\n}", "title": "" }, { "docid": "a12a43abcb7f0dab99b9a14c4d495397", "score": "0.61566323", "text": "func (c *Client) Proxy(proxyURL string) *Client {\n\tnewClient := c.Clone()\n\tnewClient.SetProxy(proxyURL)\n\treturn newClient\n}", "title": "" }, { "docid": "f55295719de7d1b34deb5c8a536b152d", "score": "0.6122532", "text": "func SetProxy(proxy func(*http.Request) (*url.URL, error)) error {\n\treturn std.SetProxy(proxy)\n}", "title": "" }, { "docid": "9a718daea6cecfc41694d6df2c8ba333", "score": "0.61072975", "text": "func (c *endpointClient) Proxy(ctx context.Context, opts *ProxyOpts) error {\n\tq := opts.Request.URL.Query()\n\tdelete(q, \"callback\")\n\tdelete(q, \":service\") // injected as named param by DelayedRouter\n\tdelete(q, \":instance\") // injected as named param by DelayedRouter\n\tdelete(q, \":mux-route-name\") // injected as named param by DelayedRouter\n\tdelete(q, \":mux-path-template\") // injected as named param by DelayedRouter\n\tqstring := q.Encode()\n\tif qstring != \"\" {\n\t\tqstring = fmt.Sprintf(\"?%s\", qstring)\n\t}\n\trawurl := strings.TrimRight(c.endpoint, \"/\") + \"/\" + strings.Trim(opts.Path, \"/\") + qstring\n\turl, err := url.Parse(rawurl)\n\tif err != nil {\n\t\tlog.Errorf(\"Got error while creating service proxy url %s: %s\", rawurl, err)\n\t\treturn err\n\t}\n\theader, err := baseHeader(ctx, opts.Event, opts.Instance, opts.RequestID)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor k, v := range header {\n\t\topts.Request.Header[k] = v\n\t}\n\tdirector := func(req *http.Request) {\n\t\treq.Header = opts.Request.Header\n\t\treq.SetBasicAuth(c.username, c.password)\n\t\treq.Host = url.Host\n\t\treq.URL = url\n\t\t*req = *req.WithContext(ctx)\n\t}\n\tproxy := &httputil.ReverseProxy{\n\t\tTransport: net.Dial15Full300ClientWithPool.Transport,\n\t\tDirector: director,\n\t}\n\tproxy.ServeHTTP(opts.Writer, opts.Request)\n\treturn nil\n}", "title": "" }, { "docid": "5ea391319057e9bc40cefa2e4616cd3e", "score": "0.60747653", "text": "func (c *Client) SetProxy() (err error) {\n\tproxy, err := formatProxy(c.Proxy)\n\tif err != nil {\n\t\tproxy = \"\"\n\t}\n\tif len(proxy) == 0 {\n\t\tcolor.Green(\"Current proxy is based on environment\")\n\t} else {\n\t\tcolor.Green(\"Current proxy is %v\", proxy)\n\t}\n\tcolor.Cyan(`Set a new proxy (e.g. \"http://127.0.0.1:80\", \"socks5://127.0.0.1:1080\"`)\n\tcolor.Cyan(`Enter empty line if you want to use default proxy from environment`)\n\tcolor.Cyan(`Note: Proxy URL should match \"proxyProtocol://proxyIp:proxyPort\"`)\n\tfor {\n\t\tproxy, err = formatProxy(util.ScanlineTrim())\n\t\tif err == nil {\n\t\t\tbreak\n\t\t}\n\t\tcolor.Red(err.Error())\n\t}\n\tc.Proxy = proxy\n\tif len(proxy) == 0 {\n\t\tcolor.Green(\"Current proxy is based on environment\")\n\t} else {\n\t\tcolor.Green(\"Current proxy is %v\", proxy)\n\t}\n\treturn c.save()\n}", "title": "" }, { "docid": "86a9bb3a8e5fe7360be1315558a3db9d", "score": "0.6006448", "text": "func (client *PdfToTextClient) SetHttpsProxy(proxy string) *PdfToTextClient {\n client.fields[\"https_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "b6d8969d528ce1cffb893e0481052965", "score": "0.6006247", "text": "func (client *HtmlToPdfClient) SetHttpsProxy(proxy string) *HtmlToPdfClient {\n client.fields[\"https_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "3f426f5c4ea415783c23d7dbacb50c30", "score": "0.5964575", "text": "func (client *ImageToPdfClient) SetHttpsProxy(proxy string) *ImageToPdfClient {\n client.fields[\"https_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "14c6ce0e6121ece35d89182aa796097d", "score": "0.59614635", "text": "func (s *ProxyServer) Proxy(ctx context.Context, proxyCtx *common.ProxyContext, clientConn, serviceConn net.Conn) error {\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\n\t// Wrap a client connection with a monitor that auto-terminates\n\t// idle connection and connection with expired cert.\n\tvar err error\n\tctx, clientConn, err = s.cfg.ConnectionMonitor.MonitorConn(ctx, proxyCtx.AuthContext, clientConn)\n\tif err != nil {\n\t\tclientConn.Close()\n\t\tserviceConn.Close()\n\t\treturn trace.Wrap(err)\n\t}\n\n\tvar labels prometheus.Labels\n\tif len(proxyCtx.Servers) > 0 {\n\t\tlabels = getLabelsFromDB(proxyCtx.Servers[0].GetDatabase())\n\t} else {\n\t\tlabels = getLabelsFromDB(nil)\n\t}\n\n\tactiveConnections.With(labels).Inc()\n\tdefer activeConnections.With(labels).Dec()\n\n\treturn trace.Wrap(utils.ProxyConn(ctx, clientConn, serviceConn))\n}", "title": "" }, { "docid": "7612f44537ef9900ce2e6bc1c12e1dba", "score": "0.5908655", "text": "func (client *PdfToHtmlClient) SetHttpsProxy(proxy string) *PdfToHtmlClient {\n client.fields[\"https_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "078cde0b9bad58a196e5c6b27cd9d3a5", "score": "0.58984995", "text": "func getProxyClient() *http.Client {\n\tresp, err := http.Get(fmt.Sprintf(\"%s/get/\", proxyPoolUrl))\n\tif err != nil {\n\t\tfmt.Println(errors.Wrap(err, \"get proxy url err\"))\n\t\treturn nil\n\t}\n\tif resp == nil {\n\t\tfmt.Println(\"proxy url is nil\")\n\t\treturn nil\n\t}\n\tdefer resp.Body.Close()\n\n\tdata, _ := ioutil.ReadAll(resp.Body)\n\t//var builder strings.Builder\n\t//builder.WriteString(\"http://\")\n\t//builder.Write(data)\n\tvar buf bytes.Buffer\n\tbuf.WriteString(\"http://\")\n\tbuf.Write(data)\n\tfmt.Printf(\"proxy url: %s\\n\", buf.String())\n\n\treturn &http.Client{\n\t\tTimeout: timeout, // 15s\n\t\tTransport: &http.Transport{\n\t\t\tDialContext: (&net.Dialer{\n\t\t\t\tTimeout: timeout,\n\t\t\t\tKeepAlive: timeout,\n\t\t\t}).DialContext,\n\t\t\tProxy: func(request *http.Request) (i *url.URL, e error) {\n\t\t\t\treturn url.Parse(buf.String())\n\t\t\t},\n\t\t\tTLSClientConfig: &tls.Config{InsecureSkipVerify: true},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "ec54fd76c40f0c63d59ae56b96b1cf35", "score": "0.5875644", "text": "func Proxy(w http.ResponseWriter, req *http.Request) {\n\tvar accountID string\n\n\tif req.Method == \"OPTIONS\" {\n\t\tw.WriteHeader(http.StatusOK)\n\t\treturn\n\t}\n\tif req.Body == nil {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tw.Write([]byte(\"empty request body\"))\n\t\treturn\n\t}\n\n\tbody, err := ioutil.ReadAll(req.Body)\n\tif err != nil {\n\t\tlog.Panicf(\"error: %v\", err.Error())\n\t}\n\n\tur, err := proxy.UnmarshalRequest(body)\n\tif err != nil {\n\t\tresponse, _ := json.Marshal(proxy.NewErrorResponse(err.Error(), proxy.ErrProxy))\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tw.Write(response)\n\t\treturn\n\t}\n\n\tif config.IsAccountV1Enabled() {\n\t\taccountID, err = users.GetAccountIDFromRequest(req)\n\t\tif err != nil {\n\t\t\tresponse, _ := json.Marshal(proxy.NewErrorResponse(err.Error(), proxy.ErrProxyAuthFailed))\n\t\t\tw.WriteHeader(http.StatusForbidden)\n\t\t\tw.Write(response)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\taccountID = \"\"\n\t}\n\n\tlbrynetResponse, err := proxy.Proxy(ur, accountID)\n\tif err != nil {\n\t\tlogger.LogF(monitor.F{\"query\": ur, \"error\": err}).Error(\"proxy errored\")\n\t\tresponse, _ := json.Marshal(proxy.NewErrorResponse(err.Error(), proxy.ErrProxy))\n\t\tw.WriteHeader(http.StatusServiceUnavailable)\n\t\tw.Write(response)\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\tw.WriteHeader(http.StatusOK)\n\tw.Write(lbrynetResponse)\n}", "title": "" }, { "docid": "9ac0b6364e5c4faba5c6b83a885839bc", "score": "0.5872867", "text": "func (_m *mockMNSClient) SetProxy(url string) {\n\t_m.Called(url)\n}", "title": "" }, { "docid": "cd450ab3fef8f1ad369d118423a1864f", "score": "0.5866828", "text": "func ExampleProxy() {\n\tlog.Println(\"\\n***** ExampleProxy *****\\n\")\n\n\t// Real\n\tvar realServer = offers.MustNewServer(configure(\"Real Server\"), offers.Offers{Configuration: configure(\"Server\")}, \":4030\") // Use s as the service.\n\tgo func() {\n\t\tif err := realServer.Start(); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\tvar realClient = offers.MustNewClient(configure(\"Real Client\"), \":4030\")\n\n\t// Proxy\n\tvar proxyServer = offers.MustNewServer(configure(\"Proxy Server\"), realClient, \":4031\") // Use realClient as the service.\n\tgo func() {\n\t\tif err := proxyServer.Start(); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\tvar proxyClient = offers.MustNewClient(configure(\"Proxy Client\"), \":4031\").ForContext(offers.Context{ID: tigertonic.RandomBase62String(8)}).(offers.Client)\n\n\t// Use\n\tm, err := proxyClient.New(offer.Offer{Name: \"nerddomo\"})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tlog.Printf(\"New: Got: %# v\\n\\n\", pretty.Formatter(simplify(m)))\n\tm, err = proxyClient.Get(m.ID)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tlog.Printf(\"Get: Got: %# v\\n\\n\", pretty.Formatter(simplify(m)))\n\tm.Name = \"test\"\n\tm, err = proxyClient.Set(m)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tlog.Printf(\"Set: Got: %# v\\n\\n\", pretty.Formatter(simplify(m)))\n\tm, err = proxyClient.Delete(m)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tlog.Printf(\"Delete: Got: %# v\\n\\n\", pretty.Formatter(simplify(m)))\n\n\t// Proxy\n\tif err := proxyClient.Close(); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := proxyServer.Stop(); err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Real\n\tif err := realClient.Close(); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := realServer.Stop(); err != nil {\n\t\tpanic(err)\n\t}\n}", "title": "" }, { "docid": "dc62824a884c074f5a92799047141445", "score": "0.58287233", "text": "func (r *Req) SetProxy(proxy func(*http.Request) (*url.URL, error)) error {\n\ttrans := r.getTransport()\n\tif trans == nil {\n\t\treturn errors.New(\"req: no transport\")\n\t}\n\ttrans.Proxy = proxy\n\treturn nil\n}", "title": "" }, { "docid": "a2ae6158944b1bf9b694d6a169d2933a", "score": "0.5820467", "text": "func SetProxy(proxyDialer proxy.Dialer) {\n\tdialerOnce.Do(func() {\n\t\tdialer = proxyDialer\n\t})\n}", "title": "" }, { "docid": "eff1c7f17b77100cb97dd38d0229a864", "score": "0.58092874", "text": "func (client *HtmlToImageClient) SetHttpsProxy(proxy string) *HtmlToImageClient {\n client.fields[\"https_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "d75ae25c23e9d2fab20fe7ed9d2b5fb0", "score": "0.57888865", "text": "func (options *Options) setGitProxy() error {\n\thttpClient, err := options.newHttpClient()\n\tif err != nil {\n\t\treturn err\n\t}\n\tclient.InstallProtocol(\"https\", ghttp.NewClient(httpClient))\n\tclient.InstallProtocol(\"http\", ghttp.NewClient(httpClient))\n\treturn nil\n}", "title": "" }, { "docid": "78cc65fed7e08b42850aa11ed364911b", "score": "0.5767144", "text": "func (d *Dialer) DialProxy() (net.Conn, error) {\n\treturn d.DialProxyContext(context.Background())\n}", "title": "" }, { "docid": "1a1c326fa4d162f02d4bd01f558de3e7", "score": "0.57510376", "text": "func InitClientProxy(host string, port, id int, nameType string) ClientProxy {\n\tvar clientProxy ClientProxy\n\tclientProxy.Host = host\n\tclientProxy.Port = port\n\tclientProxy.ID = id\n\tclientProxy.TypeName = nameType\n\treturn clientProxy\n}", "title": "" }, { "docid": "93fcb7e1c265bb0794cb9cf389344fa2", "score": "0.57509303", "text": "func (b *Base) SetClientProxyAddress(addr string) error {\n\tif addr == \"\" {\n\t\treturn nil\n\t}\n\tproxy, err := url.Parse(addr)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"setting proxy address error %s\",\n\t\t\terr)\n\t}\n\n\terr = b.Requester.SetProxy(proxy)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif b.Websocket != nil {\n\t\terr = b.Websocket.SetProxyAddress(addr)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "af24200762e0cf67e6e87072bc583cd8", "score": "0.57382286", "text": "func (o *EmailRequestSmtp) SetProxy(v string) {\n\to.Proxy = &v\n}", "title": "" }, { "docid": "372e261e280bac674354504152eec998", "score": "0.573611", "text": "func WithProxy(request *AddProxyRequest) Option {\n\treturn func(client *Client) {\n\t\tclient.AddProxy(request)\n\t}\n}", "title": "" }, { "docid": "10fb58d8b234261ad2b94118cd3bf870", "score": "0.573589", "text": "func (p *ProxyConfig) HttpProxy() string {\n\treturn p.httpProxy\n}", "title": "" }, { "docid": "1ea6edc7ad11d0f6213a429f8a761c2f", "score": "0.57339925", "text": "func proxyCmd(proxy string) (string, error) {\n\tu, err := url.Parse(proxy)\n\tif err != nil {\n\t\terr = fmt.Errorf(\"not valid proxy address: %s\", err.Error())\n\t\treturn \"\", err\n\t}\n\tif u.Port() == \"\" {\n\t\tif proxy[len(proxy)-1] != ':' {\n\t\t\tproxy += \":\"\n\t\t}\n\t\tproxy += defaultPort\n\t}\n\treturn proxy, nil\n}", "title": "" }, { "docid": "f5943c704bc750bb9da1e310e5fa3613", "score": "0.5722847", "text": "func (client *ImageToImageClient) SetHttpsProxy(proxy string) *ImageToImageClient {\n client.fields[\"https_proxy\"] = proxy\n return client\n}", "title": "" }, { "docid": "0e430b0abdbea087586ce03907fb6fbf", "score": "0.57123125", "text": "func NewProxyWithHostConverter(hostConverter func(string) string) *Proxy {\n\treturn &Proxy{\n\t\tRequestConverter: func(originalRequest, proxy *http.Request) {\n\t\t\tproxy.URL.Host = hostConverter(originalRequest.Host)\n\t\t},\n\t\tTransport: http.DefaultTransport,\n\t}\n}", "title": "" }, { "docid": "7c2b326c0a5973bbf59ce047ef4b2983", "score": "0.5681594", "text": "func (d *Dice) setupProxy() error {\n\tport := d.config.GetString(\"proxy-port\")\n\taddress := fmt.Sprintf(\":%v\", port)\n\n\tlogfile := d.config.GetString(\"proxy-logfile\")\n\n\tproxyConfig := proxy.Config{\n\t\tAddress: address,\n\t\tLogfile: logfile,\n\t}\n\n\td.proxy = proxy.New(proxyConfig, d.registry)\n\n\treturn nil\n}", "title": "" }, { "docid": "67bbc608c387f6aaecf8aa1bf95ed80b", "score": "0.56638145", "text": "func NewProxy(opts ...ProxyOption) *Proxy {\n\topt := newProxyOptions(opts...)\n\n\tp := &Proxy{\n\t\topts: *opt,\n\t}\n\n\tif h, ok := p.opts.tun.Handler().(*tunClientHandler); ok {\n\t\th.crypt = opt.tunCrypt\n\t\th.proxyAddr = opt.listenAddr\n\t\th.connectAddr = opt.connectAddr\n\t}\n\n\treturn p\n}", "title": "" }, { "docid": "163da26a8f0ba78c8d7ad185bcbf4eef", "score": "0.56636804", "text": "func NewProxyDialer(timeout time.Duration, proxy, connect string) *ProxyDialer {\n\treturn &ProxyDialer{timeout: timeout, proxy: proxy, connect: connect}\n}", "title": "" }, { "docid": "9682673fe3d395d0ed5d6c798d43c2b5", "score": "0.5662168", "text": "func (k *Kf) Proxy(ctx context.Context, appName string, port int) {\n\tk.t.Helper()\n\tLogf(k.t, \"running proxy for %q...\", appName)\n\tdefer Logf(k.t, \"done running proxy for %q.\", appName)\n\toutput, errs := k.kf(ctx, k.t, KfTestConfig{\n\t\tArgs: []string{\n\t\t\t\"proxy\",\n\t\t\t\"--namespace\", SpaceFromContext(ctx),\n\t\t\tappName,\n\t\t\tfmt.Sprintf(\"--port=%d\", port),\n\t\t},\n\t})\n\tPanicOnError(ctx, k.t, fmt.Sprintf(\"proxy %q\", appName), errs)\n\tStreamOutput(ctx, k.t, output)\n}", "title": "" }, { "docid": "100a12c71729257bcc2ce55ddef23879", "score": "0.56342566", "text": "func (sess *Session) SetProxy(addr string) error {\n\tif addr == \"\" {\n\t\treturn ErrEmptyProxy\n\t}\n\tproxyURL, err := url.Parse(addr)\n\tif err != nil {\n\t\treturn err\n\t}\n\tswitch proxyURL.Scheme {\n\tcase \"socks5\", \"socks4\":\n\t\tdialer, err := proxy.SOCKS5(\"tcp\", proxyURL.Host, nil, proxy.Direct)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tsess.Transport.Dial = dialer.Dial\n\tdefault:\n\t\tsess.Transport.Proxy = http.ProxyURL(proxyURL)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "79ed41a7fc18ff38d63e0dfddc546b29", "score": "0.56177205", "text": "func DialViaProxy(url *url.URL, farendAddress string) (net.Conn, error) {\n\tproxyAddress := net.JoinHostPort(url.Hostname(), url.Port())\n proxyAuth := \"\"\n\tif url.User != nil {\n\t\tproxyAuth = \"Basic \" + base64.StdEncoding.EncodeToString([]byte(url.User.String()))\n\t}\n\n\tswitch url.Scheme {\n\tcase \"http\":\n\t\tconn, err := net.Dial(\"tcp\", proxyAddress)\n\t\tif err != nil {\n\t\t\treturn nil, errors.WithMessage(err, \"Failed to connect to proxy\")\n\t\t}\n\n\t\treturn establishProxyConnect(conn, farendAddress, proxyAuth)\n\n\tcase \"https\":\n\t\tconn, err := tls.Dial(\"tcp\", proxyAddress, nil)\n\t\tif err != nil {\n\t\t\treturn nil, errors.WithMessage(err, \"Failed to connect via TLS to proxy\")\n\t\t}\n\n\t\treturn establishProxyConnect(conn, farendAddress, proxyAuth)\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"URL scheme '%v' not supported for proxy\", url.Scheme)\n\t}\n}", "title": "" }, { "docid": "af2263cc8f2746fc781ed94d77b80457", "score": "0.5613747", "text": "func (b *RunnerBuilder) Proxy(value string) *RunnerBuilder {\n\tb.proxy = value\n\treturn b\n}", "title": "" }, { "docid": "98e280fe9d547b06fa18057b357d5cde", "score": "0.5585275", "text": "func ProxyReq(req string, proxy string) (res *http.Response, err error) {\n\ttimeout := time.Duration(2 * time.Second)\n\tproxyURL, err := url.Parse(\"http://\" + proxy)\n\treqURL, err := url.Parse(req)\n\n\ttransport := &http.Transport{Proxy: http.ProxyURL(proxyURL)}\n\tclient := &http.Client{\n\t\tTimeout: timeout,\n\t\tTransport: transport,\n\t}\n\n\tres, err = client.Get(reqURL.String())\n\treturn res, err\n}", "title": "" }, { "docid": "1368bfdae4771dcf1d57e4986516c7d8", "score": "0.5565801", "text": "func (c *ConfigArg) SetProxyAddress(address string) {\n\tif c.args == nil {\n\t\tc.args = make(map[string]interface{})\n\t}\n\n\tif len(address) < 1 {\n\t\tc.args[\"proxyaddress\"] = \"none\"\n\t} else {\n\t\tc.args[\"proxyaddress\"] = address\n\t}\n}", "title": "" }, { "docid": "14034d0e72236a026237aa01f90bb0a3", "score": "0.5561715", "text": "func ProxyClient(ctx context.Context, namespaces []string) (v1.ProxyClient, error) {\n\tcustomFactory := getConfigClientFactory()\n\tif customFactory != nil {\n\t\treturn v1.NewProxyClient(ctx, customFactory)\n\t}\n\tkubecontext, err := contextoptions.KubecontextFrom(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tcfg, err := kubeutils.GetConfigWithContext(\"\", \"\", kubecontext)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"getting kube config\")\n\t}\n\tcache := kube.NewKubeCache(ctx)\n\tproxyClient, err := v1.NewProxyClient(ctx, &factory.KubeResourceClientFactory{\n\t\tCrd: v1.ProxyCrd,\n\t\tCfg: cfg,\n\t\tSharedCache: cache,\n\t\tNamespaceWhitelist: namespaces,\n\t})\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"creating proxys client\")\n\t}\n\tif err := proxyClient.Register(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn proxyClient, nil\n}", "title": "" }, { "docid": "9ba9886e2bc5dde95099513bff1896b6", "score": "0.5552887", "text": "func NewProxy(options ...Option) (*Proxy, error) {\n\treturn NewProxyWithOptions(options...)\n}", "title": "" }, { "docid": "5c07db9e6fd5c4d123a4c5f172e5393e", "score": "0.5550971", "text": "func (r *Request) SetProxy(proxy func(*http.Request) (*url.URL, error)) *Request {\n\tif r == nil {\n\t\treturn nil\n\t}\n\tif r.setting == nil {\n\t\tr.setting = &Setting{}\n\t}\n\tr.setting.Proxy = proxy\n\treturn r\n}", "title": "" }, { "docid": "58dc77a2fbd4e4565505a08809cd70d4", "score": "0.5510989", "text": "func SetProxy(proxy string) error {\n\treturn defaultScraper.SetProxy(proxy)\n}", "title": "" }, { "docid": "ef04106ad54887025a84ca885f4348ff", "score": "0.55064666", "text": "func (p *Proxy) Proxy() func(*http.Request) (*url.URL, error) {\n\tvar u *url.URL\n\tvar ustr string\n\tvar err error\n\n\tswitch p.Type {\n\tcase \"DIRECT\":\n\t\tbreak\n\tcase \"PROXY\":\n\t\tif p.Username != \"\" && p.Password != \"\" {\n\t\t\tustr = fmt.Sprintf(\"http://%s:%s@%s\", p.Username, p.Password, p.Address)\n\t\t} else {\n\t\t\tustr = fmt.Sprintf(\"http://%s\", p.Address)\n\t\t}\n\tdefault:\n\t\tif p.Username != \"\" && p.Password != \"\" {\n\t\t\tustr = fmt.Sprintf(\"%s:%s@%s://%s\", p.Username, p.Password, strings.ToLower(p.Type), p.Address)\n\t\t} else {\n\t\t\tustr = fmt.Sprintf(\"%s://%s\", strings.ToLower(p.Type), p.Address)\n\t\t}\n\t}\n\n\tif ustr != \"\" {\n\t\tu, err = url.Parse(ustr)\n\t}\n\n\treturn func(*http.Request) (*url.URL, error) {\n\t\treturn u, err\n\t}\n}", "title": "" }, { "docid": "552880fc98bdec8a13262e9a6e400946", "score": "0.5505536", "text": "func (s *httpProxy) Dial(network, addr string) (net.Conn, error) {\n\tc, err := s.forward.Dial(\"tcp\", s.host)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treqURL, err := url.Parse(\"http://\" + addr)\n\tif err != nil {\n\t\tc.Close()\n\t\treturn nil, err\n\t}\n\treqURL.Scheme = \"\"\n\n\treq, err := http.NewRequest(\"CONNECT\", reqURL.String(), nil)\n\tif err != nil {\n\t\tc.Close()\n\t\treturn nil, err\n\t}\n\treq.Close = false\n\tif s.haveAuth {\n\t\treq.SetBasicAuth(s.username, s.password)\n\t}\n\treq.Header.Set(\"User-Agent\", \"Poweredby Golang\")\n\n\terr = req.Write(c)\n\tif err != nil {\n\t\tc.Close()\n\t\treturn nil, err\n\t}\n\n\tresp, err := http.ReadResponse(bufio.NewReader(c), req)\n\tif err != nil {\n\t\tresp.Body.Close()\n\t\tc.Close()\n\t\treturn nil, err\n\t}\n\tresp.Body.Close()\n\tif resp.StatusCode != 200 {\n\t\tc.Close()\n\t\terr = fmt.Errorf(\"Connect server using proxy error, StatusCode [%d]\", resp.StatusCode)\n\t\treturn nil, err\n\t}\n\n\treturn c, nil\n}", "title": "" }, { "docid": "2bd4dc12c707db28137f59e8b9720fc0", "score": "0.54954773", "text": "func ProxyParams() cke.ServiceParams {\n\targs := []string{\n\t\t\"kube-proxy\",\n\t\tfmt.Sprintf(\"--config=%s\", proxyConfigPath),\n\t}\n\treturn cke.ServiceParams{\n\t\tExtraArguments: args,\n\t\tExtraBinds: []cke.Mount{\n\t\t\t{\n\t\t\t\tSource: \"/etc/machine-id\",\n\t\t\t\tDestination: \"/etc/machine-id\",\n\t\t\t\tReadOnly: true,\n\t\t\t\tPropagation: \"\",\n\t\t\t\tLabel: \"\",\n\t\t\t},\n\t\t\t{\n\t\t\t\tSource: \"/etc/kubernetes\",\n\t\t\t\tDestination: \"/etc/kubernetes\",\n\t\t\t\tReadOnly: true,\n\t\t\t\tPropagation: \"\",\n\t\t\t\tLabel: cke.LabelShared,\n\t\t\t},\n\t\t\t{\n\t\t\t\tSource: \"/lib/modules\",\n\t\t\t\tDestination: \"/lib/modules\",\n\t\t\t\tReadOnly: true,\n\t\t\t\tPropagation: \"\",\n\t\t\t\tLabel: \"\",\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "3b7e6f82fe3b4b7473473f92c4bd6a3a", "score": "0.549299", "text": "func NewProxy() *Proxy {\n\tp := new(Proxy)\n\tconn, err := grpc.Dial(address, grpc.WithInsecure())\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"did not connect: %v\", err))\n\t}\n\tp.connection = conn\n\treturn p\n}", "title": "" }, { "docid": "e2081fc022b186f61b8e3244d4b8add1", "score": "0.54913336", "text": "func WithProxy(p *proxy.Proxy) Option {\n\treturn func(s *Server) error {\n\t\tif p == nil {\n\t\t\treturn errors.Errorf(\"nil proxy\")\n\t\t}\n\t\ts.proxy = p\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "36d78e9c6a18fdcb5b8ef09926db16f0", "score": "0.5468159", "text": "func httpProxy() {\n\tserver := &http.Server{\n\t\tAddr: proxyPath,\n\t\tHandler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\thandler(w, r)\n\t\t}),\n\t}\n\tlog.Fatal(server.ListenAndServe())\n\n}", "title": "" }, { "docid": "97e865851106e28245568ee4c569f0ec", "score": "0.5464265", "text": "func (ba BasicAuth) Client() (*http.Client, error) {\n\n\tbasicAuth := func(req *http.Request) (*url.URL, error) {\n\t\treq.SetBasicAuth(ba.Username, ba.Password)\n\t\treturn http.ProxyFromEnvironment(req)\n\t}\n\n\treturn &http.Client{\n\t\tTransport: &http.Transport{\n\t\t\tProxy: basicAuth,\n\t\t},\n\t}, nil\n}", "title": "" }, { "docid": "93f9e34e4611cb2263b2a35180de66aa", "score": "0.54461354", "text": "func NewProxy(name string,\n\taddr string,\n\trequestTimeout time.Duration,\n\tloadBalancer LoadBalancer) *Proxy {\n\tproxy := &Proxy{name: name,\n\t\taddr: addr,\n\t\trequestTimeout: requestTimeout,\n\t\tseqIdAllocator: NewSeqIdAllocator(),\n\t\tloadBalancer: loadBalancer,\n\t\tclients: make([]*Client, 0)}\n\n\treturn proxy\n}", "title": "" }, { "docid": "d1cfbe1036ef0177ebfc1ea19b591c81", "score": "0.5443536", "text": "func HTTPProxy(w http.ResponseWriter, r *http.Request) error {\n\trwc, buf, err := w.(http.Hijacker).Hijack()\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn err\n\t}\n\tdefer rwc.Close()\n\thostPortURL, err := url.Parse(r.RequestURI)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn err\n\t}\n\taddress := hostPortURL.Host\n\tif strings.Index(address, \":\") == -1 {\n\t\taddress = address + \":80\"\n\t}\n\tremote, err := net.DialTimeout(\"tcp\", address, time.Minute)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer remote.Close()\n\tr.Write(remote)\n\tvar wg sync.WaitGroup\n\twg.Add(2)\n\tgo func() { io.Copy(remote, buf); wg.Done() }()\n\tgo func() { io.Copy(buf, remote); wg.Done() }()\n\twg.Wait()\n\treturn nil\n}", "title": "" }, { "docid": "b641fa06dbea5d5deceae0daa67923f7", "score": "0.5433693", "text": "func SetCollyProxy(c *colly.Collector, pry string, timeout int) {\n\ttrp := &http.Transport{\n\t\tDial: (&net.Dialer{\n\t\t\tTimeout: time.Duration(timeout) * time.Second,\n\t\t}).Dial,\n\t\tTLSClientConfig: &tls.Config{InsecureSkipVerify: true},\n\t}\n\tproxyPool := []string{}\n\tif pry == \"\" {\n\t\treturn\n\t}\n\tif strings.Contains(pry, \";\") {\n\t\tproxyPool = stringo.StrSplit(pry, \";\", 1000000)\n\t} else if pry != \"\" {\n\t\tproxyPool = append(proxyPool, pry)\n\t}\n\turlproxy, _ := RandProxy(pry)\n\ttrp.Proxy = http.ProxyURL(urlproxy)\n\n\tif len(proxyPool) > 1 {\n\t\turlproxy, _ := RandProxy(pry)\n\t\ttrp.Proxy = http.ProxyURL(urlproxy)\n\t}\n\tc.WithTransport(trp)\n}", "title": "" }, { "docid": "61688dc4817c189ec692a7976f9417c0", "score": "0.54278517", "text": "func NewProxy(kubeConfig, kubeContext string, namespaces, portNames, labels []string, timeout time.Duration) *Proxy {\n\tmapPortNames := make(map[string]bool)\n\tfor _, s := range portNames {\n\t\tmapPortNames[s] = true\n\t}\n\n\treturn &Proxy{\n\t\tkubeConfig: kubeConfig,\n\t\tkubeContext: kubeContext,\n\t\tnamespaces: namespaces,\n\t\tportNames: mapPortNames,\n\t\tlabels: labels,\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "e6f3fd94bbed0dc3aad6302563b16799", "score": "0.5425701", "text": "func WithClient(c client.Client) options.Option {\n\treturn options.WithValue(\"proxy.client\", c)\n}", "title": "" }, { "docid": "179108c36816cff314a6803ab2c4c8f7", "score": "0.5423504", "text": "func (o *RemoteProcessGroupDTO) SetProxyHost(v string) {\n\to.ProxyHost = &v\n}", "title": "" }, { "docid": "904a951041e78ad3c1b21d93fd243a96", "score": "0.5417863", "text": "func NewProxy(upstream *upstream.Upstream, opts *opts.Cmd, logger *zap.Logger) *Proxy {\n\t// transport := makeTransport(keepaliveConns, maxConnsPerHost, proxyConnectTimeout, proxyReadTimeout)\n\tccache := ccache.New(ccache.Configure().MaxSize(maxCacheSize).ItemsToPrune(cachePruneSize))\n\n\tproxy := &Proxy{\n\t\tupstream: upstream,\n\t\topts: opts,\n\t\tcache: ccache,\n\t\tlogger: logger,\n\t}\n\n\tif opts.Mode == \"http\" || opts.OverrideHost != \"\" {\n\t\ttransport := proxy.makeTransport(opts.OverrideHost)\n\t\tproxy.defaultTransport = transport\n\t}\n\n\treturn proxy\n}", "title": "" }, { "docid": "8dbdd5ecb878f0d6331ef3b0757e2c06", "score": "0.5404643", "text": "func Proxy(c cluster.TestCluster) error {\n\tmasterconf.CoreOS.Etcd2.Discovery, _ = c.GetDiscoveryURL(1)\n\tmaster, err := c.NewMachine(masterconf.String())\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Cluster.NewMachine: %s\", err)\n\t}\n\tdefer master.Destroy()\n\n\tproxyconf.CoreOS.Etcd2.Discovery = masterconf.CoreOS.Etcd2.Discovery\n\tproxy, err := c.NewMachine(proxyconf.String())\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Cluster.NewMachine: %s\", err)\n\t}\n\tdefer proxy.Destroy()\n\n\terr = platform.InstallFile(strings.NewReader(fleetunit), proxy, \"/home/core/hello.service\")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"InstallFile: %s\", err)\n\t}\n\n\t// settling...\n\tfleetStart := func() error {\n\t\t_, err = proxy.SSH(\"fleetctl start /home/core/hello.service\")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"fleetctl start: %s\", err)\n\t\t}\n\t\treturn nil\n\t}\n\tif err := util.Retry(5, 5*time.Second, fleetStart); err != nil {\n\t\treturn fmt.Errorf(\"fleetctl start failed: %v\", err)\n\t}\n\n\tvar status []byte\n\n\tfleetList := func() error {\n\t\tstatus, err = proxy.SSH(\"fleetctl list-units -l -fields active -no-legend\")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"fleetctl list-units: %s\", err)\n\t\t}\n\n\t\tif !bytes.Equal(status, []byte(\"active\")) {\n\t\t\treturn fmt.Errorf(\"unit not active\")\n\t\t}\n\n\t\treturn nil\n\t}\n\n\tif err := util.Retry(5, 1*time.Second, fleetList); err != nil {\n\t\treturn fmt.Errorf(\"fleetctl list-units failed: %v\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "160dc875b7ef40fbf462ed5a4e086021", "score": "0.5401412", "text": "func buildProxy(host, bind *url.URL, tokenSource oauth2.TokenSource) *httputil.ReverseProxy {\n\t// Build and configure the proxy.\n\tproxy := httputil.NewSingleHostReverseProxy(host)\n\n\t// Configure the director.\n\toriginalDirector := proxy.Director\n\tproxy.Director = func(r *http.Request) {\n\t\t// Call the original director, which configures most of the URL bits for us.\n\t\toriginalDirector(r)\n\n\t\t// Override host - this is not done by the default director, but Cloud Run\n\t\t// requires it.\n\t\tr.Header.Set(\"Host\", host.Host)\n\t\tr.Host = host.Host\n\n\t\tctx := r.Context()\n\n\t\t// Get the oauth token.\n\t\ttoken, err := tokenSource.Token()\n\t\tif err != nil {\n\t\t\t*r = *r.WithContext(context.WithValue(ctx, contextKeyError,\n\t\t\t\tfmt.Errorf(\"failed to get token: %w\", err)))\n\t\t\treturn\n\t\t}\n\n\t\t// Get the id_token from the oauth token.\n\t\tidTokenRaw := token.Extra(\"id_token\")\n\t\tif idTokenRaw == nil {\n\t\t\t*r = *r.WithContext(context.WithValue(ctx, contextKeyError,\n\t\t\t\tfmt.Errorf(\"missing id_token\")))\n\t\t\treturn\n\t\t}\n\t\tidToken, ok := idTokenRaw.(string)\n\t\tif !ok {\n\t\t\t*r = *r.WithContext(context.WithValue(ctx, contextKeyError,\n\t\t\t\tfmt.Errorf(\"id_token is not a string: %T\", idTokenRaw)))\n\t\t\treturn\n\t\t}\n\n\t\t// Set the bearer token to be the id token\n\t\tr.Header.Set(\"Authorization\", \"Bearer \"+idToken)\n\t}\n\n\t// Configure error handling.\n\tproxy.ModifyResponse = func(r *http.Response) error {\n\t\t// In case of redirection, make sure the local address is still used for\n\t\t// host. If it has location header && the location url host is the proxied\n\t\t// host, change it to local address with http.\n\t\tlocation := r.Header.Get(\"Location\")\n\t\tif location != \"\" {\n\t\t\tlocationURL, err := url.Parse(location)\n\t\t\tif err == nil && locationURL.Host == host.Host {\n\t\t\t\tlocationURL.Scheme = bind.Scheme\n\t\t\t\tlocationURL.Host = bind.Host\n\t\t\t\tr.Header.Set(\"Location\", locationURL.String())\n\t\t\t}\n\t\t}\n\n\t\tctx := r.Request.Context()\n\t\tif err, ok := ctx.Value(contextKeyError).(error); ok && err != nil {\n\t\t\treturn fmt.Errorf(\"[PROXY ERROR] %w\", err)\n\t\t}\n\n\t\treturn nil\n\t}\n\tproxy.ErrorHandler = func(w http.ResponseWriter, r *http.Request, err error) {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n\n\treturn proxy\n}", "title": "" }, { "docid": "df82e5e381d73556c148e1821d5fa06f", "score": "0.54013455", "text": "func TestClientSettingsProxy(t *testing.T) {\n\tservers, teardown := startServers(t)\n\tdefer teardown()\n\n\t// Start a client with ClientSettings.Proxy set to the proxy listener.\n\texecClient(t,\n\t\t\"TEST_SERVER_URL=\"+servers.serverURL,\n\t\t\"TEST_PROXY_URL=\"+servers.proxyURL)\n\t// We expect one proxy request and 0 server requests\n\tassert.Equal(t, 0, servers.serverRequestCount())\n\tassert.Equal(t, 1, servers.proxyRequestCount())\n}", "title": "" }, { "docid": "f52b1b0cdb4ff4b2bf0d2be59cc3775c", "score": "0.54006624", "text": "func NewProxy() *Proxy {\n\treturn &Proxy{\n\t\tclients: make(map[clientKey]*client),\n\t}\n}", "title": "" }, { "docid": "22a74f3f9a26871a0c41a8d67e356a95", "score": "0.5383759", "text": "func httpProxyService(name, namespace, host string) *servingv1.Service {\n\tproxy := test.Service(name, namespace, httpProxyImage, nil)\n\tproxy.Spec.Template.Spec.Containers[0].Env = append(proxy.Spec.Template.Spec.Containers[0].Env, core.EnvVar{\n\t\tName: \"TARGET_HOST\",\n\t\tValue: host,\n\t})\n\n\treturn proxy\n}", "title": "" }, { "docid": "1232b5cd204b376319a92e5d1a6dfd13", "score": "0.53818595", "text": "func CreateProxyClient() *http.Client {\n\tclient := &http.Client{\n\t\tTransport: &http.Transport{\n\t\t\t// TODO make configurable, should be fine for now for most but extreme -c values\n\t\t\tMaxIdleConnsPerHost: 128, // must be more than incoming parallelization; divided by number of fan out if using parallel mode\n\t\t\tMaxIdleConns: 256,\n\t\t\t// This avoids Accept-Encoding: gzip being added to outgoing requests when no encoding accept is specified\n\t\t\t// yet if passed by request, it will do gzip end to end. Issue #624.\n\t\t\tDisableCompression: true,\n\t\t},\n\t}\n\treturn client\n}", "title": "" }, { "docid": "48aa8c9a8022b41012d6fee101f38b30", "score": "0.5379733", "text": "func NewClientProxy(rpcURL, rpcToken, grpcURL, grpcToken string, insecure bool) (*ClientProxy, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tdefer cancel()\n\n\topts := []grpc.DialOption{grpc.WithBlock()}\n\n\t// handle TLS/SSL connection\n\tif insecure {\n\t\topts = append(opts, grpc.WithInsecure())\n\t} else {\n\t\topts = append(opts, grpc.WithTransportCredentials(credentials.NewTLS(nil)))\n\t}\n\n\tgrpcConn, err := grpc.DialContext(ctx, grpcURL, opts...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create gRPC client connection : %w\", err)\n\t}\n\n\thttpClient := &http.Client{\n\t\tCheckRedirect: nil,\n\t\tJar: nil,\n\t\tTimeout: 0,\n\t}\n\n\tif rpcToken != \"\" {\n\t\thttpClient.Transport = AddTokenRoundTripper{\n\t\t\trt: http.DefaultTransport,\n\t\t\ttoken: rpcToken,\n\t\t}\n\t}\n\n\trpcClient, err := rpc.NewWithClient(rpcURL, \"/websocket\", httpClient)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create RPC client connection: %w\", err)\n\t}\n\n\treturn &ClientProxy{\n\t\trpcClient: rpcClient,\n\t\tgrpcConn: grpcConn,\n\t}, nil\n}", "title": "" }, { "docid": "4a916a3318fbfb25a893f71c8bf3226e", "score": "0.5376651", "text": "func ProxyRequest(w http.ResponseWriter, r *http.Request) {\n\ttoxiproxyURL := url.URL{Scheme: k8s.Config.Scheme, Host: fmt.Sprintf(\"%s.%s:%d\", k8s.Config.Name, k8s.Config.Namespace, k8s.Config.Port)}\n\tlog.Logger().Info(\"Proxy request to: \", toxiproxyURL.String())\n\tlog.Logger().Info(\"CONFIG NAME %s . CONFIG NAMESPACE %s . CONFIG PORT %s .\",k8s.Config.Name,k8s.Config.Namespace,k8s.Config.Port)\n\tserveReverseProxy(&toxiproxyURL, w, r)\n}", "title": "" }, { "docid": "72bee47fbb01f7dec91bfc30cf74a295", "score": "0.53728765", "text": "func proxyFromEnvironment(req *http.Request) (*url.URL, error) {\n\tproxy := getenvEitherCase(\"HTTP_PROXY\")\n\tif proxy == \"\" {\n\t\treturn nil, nil\n\t}\n\tproxyURL, err := url.Parse(proxy)\n\tif err != nil || proxyURL.Scheme == \"\" {\n\t\tif u, err := url.Parse(\"http://\" + proxy); err == nil {\n\t\t\tproxyURL = u\n\t\t\terr = nil\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"invalid proxy address %q: %v\", proxy, err)\n\t}\n\treturn proxyURL, nil\n}", "title": "" }, { "docid": "17decbec36096ef9368f5854e1701872", "score": "0.53727806", "text": "func (p *Pool) Proxy(r *http.Request) (*url.URL, error) {\n\tproxy, err := p.getNextProxy(r)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"connect: %v\", err)\n\t}\n\n\treturn proxy.URL, nil\n}", "title": "" }, { "docid": "a5329a2693038cdd394a663b25c90a7d", "score": "0.537275", "text": "func serveProxyConfig(addr string) string {\n\t// if (url.split(\":\")[0] == \"http\") {\n\t// return \"HTTP \"+host\n\t// }\n\n\treturn `function FindProxyForURL(url, host) {\n\t\tif (shExpMatch(url, \"*://\" + host + \":*\")) {\n\t\t\treturn \"PROXY ` + addr + `\";\n\t\t}\n\t\treturn \"DIRECT\";\n\t}`\n}", "title": "" }, { "docid": "3933df08a7a3238175b9e6f171d69728", "score": "0.53630745", "text": "func NewProxy(opts ...ProxyOption) *Proxy {\n\tp := Proxy{\n\t\tlogger: nil,\n\t\tallowedHosts: []string{},\n\t\terrorServerHeader: []string{\"httpproxy\"},\n\t\terrorBody: []byte(\"error communicating with matched service\"),\n\n\t\tVisitsPerServiceAndPath: make(map[string]map[string]int),\n\t\tStatusPerServiceAndPath: make(map[string]map[string]map[int]int),\n\t}\n\n\tfor _, opt := range opts {\n\t\topt(&p)\n\t}\n\n\tp.handler.logger = p.logger\n\tp.handler.ruleMutex = &p.ruleMutex\n\tp.handler.proxy = &p\n\tp.certMan = &autocert.Manager{\n\t\tCache: autocert.DirCache(\"certs\"),\n\t\tPrompt: autocert.AcceptTOS,\n\t\tHostPolicy: autocert.HostWhitelist(p.allowedHosts...),\n\t}\n\n\treturn &p\n}", "title": "" }, { "docid": "c3337c395c3599e581e304fb7d7c85b6", "score": "0.53617364", "text": "func HTTPClient(certificate string, proxy proxyFunc) (*http.Client, error) {\n\tvar err error\n\tvar cert *x509.Certificate\n\n\tif certificate != \"\" {\n\t\tcertBlock, _ := pem.Decode([]byte(certificate))\n\t\tif certBlock == nil {\n\t\t\treturn nil, fmt.Errorf(\"Invalid certificate\")\n\t\t}\n\n\t\tcert, err = x509.ParseCertificate(certBlock.Bytes)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\ttlsConfig, err := shared.GetTLSConfig(\"\", \"\", \"\", cert)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttr := &http.Transport{\n\t\tTLSClientConfig: tlsConfig,\n\t\tDialContext: shared.RFC3493Dialer,\n\t\tProxy: proxy,\n\t\tDisableKeepAlives: true,\n\t\tExpectContinueTimeout: time.Second * 30,\n\t\tResponseHeaderTimeout: time.Second * 3600,\n\t\tTLSHandshakeTimeout: time.Second * 5,\n\t}\n\n\tmyhttp := http.Client{\n\t\tTransport: tr,\n\t}\n\n\t// Setup redirect policy\n\tmyhttp.CheckRedirect = func(req *http.Request, via []*http.Request) error {\n\t\t// Replicate the headers\n\t\treq.Header = via[len(via)-1].Header\n\n\t\treturn nil\n\t}\n\n\treturn &myhttp, nil\n}", "title": "" }, { "docid": "9381cd183822819188c35a3e09085af6", "score": "0.53615695", "text": "func (self *Surf) SetProxy(proxy string) Surfer {\n\tself.proxy = proxy\n\treturn self\n}", "title": "" }, { "docid": "b6b2b5a57dcb3ac292f1a684210f8b09", "score": "0.53530765", "text": "func (pm *MockPackageManager) SetProxy(proxy.Settings) error {\n\treturn nil\n}", "title": "" }, { "docid": "52cc29d6669bb933c29e212a7130e255", "score": "0.53517854", "text": "func NewProxy(director func(*http.Request) (Options, int), pool *Pool, maxActive int, client *http.Client) *Proxy {\n\tif director == nil || pool == nil || client == nil || maxActive <= 0 {\n\t\treturn nil\n\t}\n\n\tp := &Proxy{\n\t\tDirector: director,\n\t\tClient: client,\n\t\tAccept: DefaultAccept,\n\t\tServer: DefaultServer,\n\t\tUserAgent: DefaultUserAgent,\n\t\tpool: pool,\n\t\tactive: make(chan bool, maxActive),\n\t}\n\n\tfor i := 0; i < maxActive; i++ {\n\t\tp.active <- true\n\t}\n\n\treturn p\n}", "title": "" }, { "docid": "51bdcde48953dd2648d25876d4c2ae47", "score": "0.5333299", "text": "func (c *ConfigArg) ProxyAddress() string {\n\tif ret, ok := c.args[\"proxyaddress\"].(string); ok {\n\t\treturn ret\n\t}\n\treturn \"\"\n}", "title": "" }, { "docid": "b86e1020b229147ae63bcefe2751b09e", "score": "0.53183365", "text": "func NewProxy(config *configmanager.Configuration) ProxyManager {\n\treturn ProxyManager{\n\t\tconfig: config,\n\t\tclient: &http.Client{}}\n}", "title": "" }, { "docid": "57c4537c13af72819cc087c6001e6f48", "score": "0.5318173", "text": "func (o *NetworkConfig) SetHttpproxy(v string) {\n\to.Httpproxy = &v\n}", "title": "" }, { "docid": "4b2cafa8465b95d6ab52371ff8c1e70d", "score": "0.5303314", "text": "func NewProxy(options *CallerOptions) *httputil.ReverseProxy {\n\treturn &httputil.ReverseProxy{\n\t\tDirector: func(*http.Request) {},\n\t\tFlushInterval: options.FlushInterval,\n\t\tTransport: &http.Transport{\n\t\t\tProxy: http.ProxyFromEnvironment,\n\t\t\tDialContext: (&net.Dialer{\n\t\t\t\tTimeout: options.Timeout,\n\t\t\t\tKeepAlive: options.KeepAlive,\n\t\t\t\tDualStack: true,\n\t\t\t}).DialContext,\n\t\t\tMaxIdleConns: options.MaxIdleConns,\n\t\t\tIdleConnTimeout: 90 * time.Second,\n\t\t\tTLSHandshakeTimeout: 10 * time.Second,\n\t\t\tExpectContinueTimeout: 1 * time.Second,\n\t\t\tTLSClientConfig: &tls.Config{\n\t\t\t\tInsecureSkipVerify: options.Insecure,\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" } ]
a99fd79b651c12d68d62713e1d6dbd19
Controller is a free data retrieval call binding the contract method 0xf77c4791. Solidity: function controller() constant returns(address)
[ { "docid": "6080c76980ff26b4ce03e36246b2cbbc", "score": "0.69834864", "text": "func (_RoundsManager *RoundsManagerCaller) Controller(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _RoundsManager.contract.Call(opts, out, \"controller\")\n\treturn *ret0, err\n}", "title": "" } ]
[ { "docid": "19dd0435981bff4b56e5cdfbc4cfc047", "score": "0.70125777", "text": "func (_HarvestAutoStake *HarvestAutoStakeCaller) Controller(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _HarvestAutoStake.contract.Call(opts, out, \"controller\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "1ef613fe741d2ba36925fe5ecbf1694d", "score": "0.61950046", "text": "func bindController(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor) (*bind.BoundContract, error) {\n\tparsed, err := abi.JSON(strings.NewReader(ControllerABI))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn bind.NewBoundContract(address, parsed, caller, transactor), nil\n}", "title": "" }, { "docid": "c1d9f581d88994627c3b9de43dec63cc", "score": "0.61095846", "text": "func (_RoundsManager *RoundsManagerSession) Controller() (common.Address, error) {\n\treturn _RoundsManager.Contract.Controller(&_RoundsManager.CallOpts)\n}", "title": "" }, { "docid": "2fd31d1c0b085199076a5e548521ede3", "score": "0.60930777", "text": "func (_IControlled *IControlledCaller) GetController(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _IControlled.contract.Call(opts, out, \"getController\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "5c38b53889abff8fe942ba6dcd5cd4e3", "score": "0.60730714", "text": "func (n *Name) Controller() (common.Address, error) {\n\treturn n.registry.Owner(n.Name)\n}", "title": "" }, { "docid": "9a35c5647741270cc8f53122ab9f0b02", "score": "0.6069669", "text": "func (_RoundsManager *RoundsManagerCallerSession) Controller() (common.Address, error) {\n\treturn _RoundsManager.Contract.Controller(&_RoundsManager.CallOpts)\n}", "title": "" }, { "docid": "cf8cb5b94b7db09899d3b316f32d748d", "score": "0.604927", "text": "func (_ReputationToken *ReputationTokenCaller) GetController(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _ReputationToken.contract.Call(opts, out, \"getController\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "cc99f45e0ac0029ca38b347016047832", "score": "0.60390514", "text": "func (_Controlled *ControlledCaller) GetController(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _Controlled.contract.Call(opts, out, \"getController\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "be1418bc277bd28e7654392e1bc98f95", "score": "0.59933746", "text": "func (_CashAutoConverter *CashAutoConverterCaller) GetController(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _CashAutoConverter.contract.Call(opts, out, \"getController\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "7e209c791f740df200674953eaa52646", "score": "0.5947966", "text": "func (_IControlled *IControlledSession) GetController() (common.Address, error) {\n\treturn _IControlled.Contract.GetController(&_IControlled.CallOpts)\n}", "title": "" }, { "docid": "235b65fc21af2036a9d68ec5a1a56e45", "score": "0.5917071", "text": "func (_HarvestAutoStake *HarvestAutoStakeSession) Controller() (common.Address, error) {\n\treturn _HarvestAutoStake.Contract.Controller(&_HarvestAutoStake.CallOpts)\n}", "title": "" }, { "docid": "d7a666b26476645cead3e7982544d6fa", "score": "0.5878817", "text": "func (_Controlled *ControlledSession) GetController() (common.Address, error) {\n\treturn _Controlled.Contract.GetController(&_Controlled.CallOpts)\n}", "title": "" }, { "docid": "a2eed3f6491a217174bad82a7a007740", "score": "0.5867448", "text": "func (_CashAutoConverter *CashAutoConverterSession) GetController() (common.Address, error) {\n\treturn _CashAutoConverter.Contract.GetController(&_CashAutoConverter.CallOpts)\n}", "title": "" }, { "docid": "c9f2e0806f9fad43377f0cd219c20b07", "score": "0.5854829", "text": "func controllerHandler(s *state.State, w http.ResponseWriter, r *http.Request) {\n\t// query all microcontrollers and connected peripherals\n\tlist, err := db.GetRegistrationPeripherals(s)\n\tif err != nil {\n\t\ts.Log.Errorln(err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tw.Write([]byte(\"internal server error\"))\n\t\treturn\n\t}\n\t// convert data to JSON\n\tbuff, err := json.Marshal(list)\n\tif err != nil {\n\t\ts.Log.Errorln(err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tw.Write([]byte(\"internal server error\"))\n\t\treturn\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write(buff)\n}", "title": "" }, { "docid": "f1d67a12f48940d0a5495d4202eaf371", "score": "0.58303756", "text": "func (_HarvestAutoStake *HarvestAutoStakeCallerSession) Controller() (common.Address, error) {\n\treturn _HarvestAutoStake.Contract.Controller(&_HarvestAutoStake.CallOpts)\n}", "title": "" }, { "docid": "5a97a929c451f97f9667df62b16addb6", "score": "0.5773845", "text": "func (_IControlled *IControlledCallerSession) GetController() (common.Address, error) {\n\treturn _IControlled.Contract.GetController(&_IControlled.CallOpts)\n}", "title": "" }, { "docid": "077d46dc68999d19cfcfa4aac0144f68", "score": "0.5758772", "text": "func (_ReputationToken *ReputationTokenSession) GetController() (common.Address, error) {\n\treturn _ReputationToken.Contract.GetController(&_ReputationToken.CallOpts)\n}", "title": "" }, { "docid": "ec3374db3fd33c712f499e95df9bd8c8", "score": "0.5756295", "text": "func GetUserController(c echo.Context) error", "title": "" }, { "docid": "86efd2265fd96ca871c577d40e6d74ec", "score": "0.5756248", "text": "func (_Controlled *ControlledCallerSession) GetController() (common.Address, error) {\n\treturn _Controlled.Contract.GetController(&_Controlled.CallOpts)\n}", "title": "" }, { "docid": "a0c34fe9b4f276b909612cdddf733508", "score": "0.5700895", "text": "func (_CashAutoConverter *CashAutoConverterCallerSession) GetController() (common.Address, error) {\n\treturn _CashAutoConverter.Contract.GetController(&_CashAutoConverter.CallOpts)\n}", "title": "" }, { "docid": "8df30896f5633e8e5b9e2182fd9c9d13", "score": "0.5669974", "text": "func (_ReputationToken *ReputationTokenCallerSession) GetController() (common.Address, error) {\n\treturn _ReputationToken.Contract.GetController(&_ReputationToken.CallOpts)\n}", "title": "" }, { "docid": "71193d5f56b452daacba33c3c0f0e712", "score": "0.5656943", "text": "func (_Delegator *DelegatorCaller) GetController(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _Delegator.contract.Call(opts, out, \"getController\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "8e820cf899333c1ca46a495aa984f336", "score": "0.56419754", "text": "func (cl *Client) controller(ctx context.Context) (*broker, error) {\n\tget := func() int32 {\n\t\tcl.controllerIDMu.Lock()\n\t\tdefer cl.controllerIDMu.Unlock()\n\t\treturn cl.controllerID\n\t}\n\n\tvar id int32\n\tif id = get(); id < 0 {\n\t\tif err := cl.fetchBrokerMetadata(ctx); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif id = get(); id < 0 {\n\t\t\treturn nil, &errUnknownController{id}\n\t\t}\n\t}\n\n\treturn cl.brokerOrErr(nil, id, &errUnknownController{id})\n}", "title": "" }, { "docid": "1844427173508ec805f815834a52ab16", "score": "0.555035", "text": "func GetUserController(c echo.Context) error {\n\t\n}", "title": "" }, { "docid": "dd7187429848492923ac5d208949a84d", "score": "0.55113244", "text": "func (_Delegator *DelegatorSession) GetController() (common.Address, error) {\n\treturn _Delegator.Contract.GetController(&_Delegator.CallOpts)\n}", "title": "" }, { "docid": "cf6ce96612640e6ffc5cdfbbc1a53153", "score": "0.543402", "text": "func (_Controller *ControllerRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error {\n\treturn _Controller.Contract.ControllerCaller.contract.Call(opts, result, method, params...)\n}", "title": "" }, { "docid": "4d625c6ad4e617dc99206a2ef2ce8e4c", "score": "0.5410332", "text": "func (_DelegationTarget *DelegationTargetCaller) GetController(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _DelegationTarget.contract.Call(opts, out, \"getController\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "d1c78ca0203bf300d62a1005914f79fa", "score": "0.5390515", "text": "func (m *crdManager) Controller(resource CustomResource) *CRDController {\n\t//controller, present = m.clientMap[resource]\n\treturn m.clientMap[resource]\n}", "title": "" }, { "docid": "359d0315d798aaf06197672ced6bb473", "score": "0.5389084", "text": "func (stack Stack) Controller(c Controller) http.Handler {\n\treturn stack.Handler(c)\n}", "title": "" }, { "docid": "259b075b8d34cd6e073b35d067b0c7aa", "score": "0.536942", "text": "func newGRPCController(Type string, Address string) (*GRPCController, error) {\n\tc := GRPCController{\n\t\tType: Type,\n\t\tVersion: \"1.0.0\",\n\t\tAddress: Address,\n\t\tConnection: nil,\n\t\tClient: nil,\n\t}\n\n\t// define the communication options\n\tvar opts []grpc.DialOption\n\topts = append(opts, grpc.WithInsecure())\n\n\t// create the connection\n\tconnection, err := grpc.Dial(Address, opts...)\n\tif err != nil {\n\t\tutil.LogError(\"main\", \"CTRL\", \"unable to connect to the controller: \" + Type + \"\\n\" + err.Error())\n\t\treturn nil, err\n\t}\n\tc.Connection = connection\n\n\t// create the client\n\tc.Client = NewControllerClient(c.Connection)\n\n\t// check availability of client\n\tif !c.Check() {\n\t\tutil.LogError(\"main\", \"CTRL\", \"controller: \" + Type + \" unavailable\")\n\t\treturn nil, errors.New(\"controller: \" + Type + \" unavailable\")\n\t}\n\n\t// success\n\tutil.LogInfo(\"main\", \"CTRL\", \"controller: \" + Type + \" available\")\n\treturn &c, nil\n}", "title": "" }, { "docid": "7c8b610b7ba92a9ae2989c4c286a299a", "score": "0.53059906", "text": "func (_Controller *ControllerCaller) Owner(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _Controller.contract.Call(opts, out, \"owner\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "500f8eda914e2c88f7ced2b1f2ea14d1", "score": "0.53045636", "text": "func (_Delegator *DelegatorCallerSession) GetController() (common.Address, error) {\n\treturn _Delegator.Contract.GetController(&_Delegator.CallOpts)\n}", "title": "" }, { "docid": "d51df92309d04cbac23ff8e1c10e20a3", "score": "0.5287392", "text": "func (kc *KClient) Controller() (*sarama.Broker, error) {\n\treturn kc.cl.Controller()\n}", "title": "" }, { "docid": "03417694eab7d4382ae0ab2cc90dfb67", "score": "0.5275194", "text": "func (pk *PublicKey) Controller() string {\n\treturn stringEntry((*pk)[jsonldController])\n}", "title": "" }, { "docid": "905ba643aadf75b17e8e424792c745db", "score": "0.5268038", "text": "func (controller *MainController) Get() {\n\tcontroller.TplName = \"index.html\"\n\tbeego.ReadFromRequest(&controller.Controller)\n}", "title": "" }, { "docid": "d1e12ea3127245b04405256fba538b3b", "score": "0.5231478", "text": "func Index(w http.ResponseWriter, r *http.Request, channel *models.Channel) {\n fmt.Fprintln(w, \"Request Service for Ocean Attestations!\")\n}", "title": "" }, { "docid": "d8bf926e7e65df369e164f9c83f5e5d8", "score": "0.51941115", "text": "func (_DelegationTarget *DelegationTargetSession) GetController() (common.Address, error) {\n\treturn _DelegationTarget.Contract.GetController(&_DelegationTarget.CallOpts)\n}", "title": "" }, { "docid": "fe17003c2cafd1c4745807d25aaa0447", "score": "0.51588506", "text": "func (c *MainController) Get() {\n c.Ctx.WriteString(\"Hello World!\\n\")\n}", "title": "" }, { "docid": "a7f7abbcfffc5d4ca4c3a5b62aba68bd", "score": "0.5151396", "text": "func (_IControlled *IControlledTransactor) SetController(opts *bind.TransactOpts, _controller common.Address) (*types.Transaction, error) {\n\treturn _IControlled.contract.Transact(opts, \"setController\", _controller)\n}", "title": "" }, { "docid": "c76c85cf6a78e383f669dfc32d6f26e0", "score": "0.51364297", "text": "func (_Controller *ControllerCallerRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error {\n\treturn _Controller.Contract.contract.Call(opts, result, method, params...)\n}", "title": "" }, { "docid": "430c2acd1cd3ca6e6de6268f51755c4f", "score": "0.511137", "text": "func (_DelegationTarget *DelegationTargetCallerSession) GetController() (common.Address, error) {\n\treturn _DelegationTarget.Contract.GetController(&_DelegationTarget.CallOpts)\n}", "title": "" }, { "docid": "f5f364feeea73ea24b2c6872037ed317", "score": "0.5107374", "text": "func (_Controller *ControllerSession) Owner() (common.Address, error) {\n\treturn _Controller.Contract.Owner(&_Controller.CallOpts)\n}", "title": "" }, { "docid": "e90636ab79a7ad3f1ea4035f5ac5965d", "score": "0.51038224", "text": "func (e *executor) Controller(t *api.Task) (exec.Controller, error) {\n\t// Get the node description from the executor field\n\te.mutex.Lock()\n\tnodeDescription := e.node\n\te.mutex.Unlock()\n\tctlr, err := newController(e.client, nodeDescription, t, secrets.Restrict(e.secrets, t))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn ctlr, nil\n}", "title": "" }, { "docid": "fc12c63473214d1c103ba2388e58cb70", "score": "0.50965804", "text": "func FooController(w http.ResponseWriter, r *http.Request) {\n\tapiResponse.APIVersion = \"v1\"\n\tapiResponse.APIName = \"foo\"\n\tapiResponse.AppVersion = \"v0.0.1\"\n\tapiResponse.ResourcePath = \"/\"\n\n\t// get remote\n\tremote, err := net.GetRemote(context.GetRemoteHost())\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tapiResponse.Remote = remote\n\n\tjson.NewEncoder(w).Encode(apiResponse)\n}", "title": "" }, { "docid": "8da58883b669db92f94ad152320be571", "score": "0.5091202", "text": "func (nm *NetworkManager) Controller() libnetwork.NetworkController {\n\treturn nm.controller\n}", "title": "" }, { "docid": "0eafb696bad75397a005b370139655f7", "score": "0.508852", "text": "func (_CashAutoConverter *CashAutoConverterTransactor) SetController(opts *bind.TransactOpts, _controller common.Address) (*types.Transaction, error) {\n\treturn _CashAutoConverter.contract.Transact(opts, \"setController\", _controller)\n}", "title": "" }, { "docid": "e93c40bb2531aa7a79bdbf58676b3fb7", "score": "0.5084873", "text": "func (self *OvsDriver) AddController(ipAddr string, portNo uint16) error {\n\t// Format target string\n\ttarget := fmt.Sprintf(\"tcp:%s:%d\", ipAddr, portNo)\n\tctrlerUuidStr := fmt.Sprintf(\"local\")\n\tctrlerUuid := []libovsdb.UUID{{ctrlerUuidStr}}\n\n\t// If controller already exists, nothing to do\n\tif self.IsControllerPresent(ipAddr, portNo) {\n\t\treturn nil\n\t}\n\n\t// insert a row in Controller table\n\tcontroller := make(map[string]interface{})\n\tcontroller[\"target\"] = target\n\n\t// Add an entry in Controller table\n\tctrlerOp := libovsdb.Operation{\n\t\tOp: \"insert\",\n\t\tTable: \"Controller\",\n\t\tRow: controller,\n\t\tUUIDName: ctrlerUuidStr,\n\t}\n\n\t// mutate the Controller column of the row in the Bridge table\n\tmutateSet, _ := libovsdb.NewOvsSet(ctrlerUuid)\n\tmutation := libovsdb.NewMutation(\"controller\", \"insert\", mutateSet)\n\tcondition := libovsdb.NewCondition(\"name\", \"==\", self.OvsBridgeName)\n\tmutateOp := libovsdb.Operation{\n\t\tOp: \"mutate\",\n\t\tTable: \"Bridge\",\n\t\tMutations: []interface{}{mutation},\n\t\tWhere: []interface{}{condition},\n\t}\n\n\t// Perform OVS transaction\n\toperations := []libovsdb.Operation{ctrlerOp, mutateOp}\n\treturn self.ovsdbTransact(operations)\n}", "title": "" }, { "docid": "1b4065479e2ddca7b326ad3aacfadd07", "score": "0.5045847", "text": "func (u *LogStruct) Controller(context echo.Context) *logrus.Entry {\n\trequestID := context.Response().Header().Get(echo.HeaderXRequestID)\n\tuserAgent := context.Request().Header.Get(\"User-Agent\")\n\treturn u.Log.WithFields(logrus.Fields{\n\t\t\"time\": time.Now(),\n\t\t\"method\": context.Request().Method,\n\t\t\"requestID\": requestID,\n\t\t\"User-Agent\": userAgent,\n\t\t\"path\": context.Path(),\n\t})\n}", "title": "" }, { "docid": "6658b4915ac00e764d0e8f2189565186", "score": "0.5039322", "text": "func (_Controlled *ControlledTransactor) SetController(opts *bind.TransactOpts, _controller common.Address) (*types.Transaction, error) {\n\treturn _Controlled.contract.Transact(opts, \"setController\", _controller)\n}", "title": "" }, { "docid": "501e56485957e006cf99b404569c5e38", "score": "0.50215197", "text": "func (_IControlled *IControlledTransactorSession) SetController(_controller common.Address) (*types.Transaction, error) {\n\treturn _IControlled.Contract.SetController(&_IControlled.TransactOpts, _controller)\n}", "title": "" }, { "docid": "3670dd77c79ca4eb1fc1f5903264da08", "score": "0.4992869", "text": "func NewController(address common.Address, backend bind.ContractBackend) (*Controller, error) {\n\tcontract, err := bindController(address, backend, backend)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &Controller{ControllerCaller: ControllerCaller{contract: contract}, ControllerTransactor: ControllerTransactor{contract: contract}}, nil\n}", "title": "" }, { "docid": "89c3fc116765a76c0494aeb01b5cca24", "score": "0.4988015", "text": "func NewController() *Controller {\n\treturn &Controller{\n\t\toutputWriter: os.Stdout,\n\t\tstatusStore: make(map[int64]goRoutineStatus),\n\t\tbreakpointHints: make(map[uint64]breakpointHint),\n\t\tcallInstAddrCache: make(map[uint64][]uint64),\n\t\tinterruptCh: make(chan bool, chanBufferSize),\n\t\tpendingStartTracePoint: make(chan uint64, chanBufferSize),\n\t\tpendingEndTracePoint: make(chan uint64, chanBufferSize),\n\t}\n}", "title": "" }, { "docid": "1ca1819c856879b22f8f57b97d3b63f3", "score": "0.49798644", "text": "func (_Controller *ControllerTransactorSession) UpdateController(_id [32]byte, _controller common.Address) (*types.Transaction, error) {\n\treturn _Controller.Contract.UpdateController(&_Controller.TransactOpts, _id, _controller)\n}", "title": "" }, { "docid": "081bdc58646cf166f7ef193ba1df17db", "score": "0.497446", "text": "func (_IControlled *IControlledSession) SetController(_controller common.Address) (*types.Transaction, error) {\n\treturn _IControlled.Contract.SetController(&_IControlled.TransactOpts, _controller)\n}", "title": "" }, { "docid": "6c9faa13f582287f0188414e3b98b1ef", "score": "0.49717316", "text": "func (_CashAutoConverter *CashAutoConverterSession) SetController(_controller common.Address) (*types.Transaction, error) {\n\treturn _CashAutoConverter.Contract.SetController(&_CashAutoConverter.TransactOpts, _controller)\n}", "title": "" }, { "docid": "c961a1764f6a9284f63918570569c4c0", "score": "0.49648926", "text": "func (_Controlled *ControlledTransactorSession) SetController(_controller common.Address) (*types.Transaction, error) {\n\treturn _Controlled.Contract.SetController(&_Controlled.TransactOpts, _controller)\n}", "title": "" }, { "docid": "5de52e1baa0fd52d291fe4c82c6fc8af", "score": "0.49491808", "text": "func (_CashAutoConverter *CashAutoConverterTransactorSession) SetController(_controller common.Address) (*types.Transaction, error) {\n\treturn _CashAutoConverter.Contract.SetController(&_CashAutoConverter.TransactOpts, _controller)\n}", "title": "" }, { "docid": "ff401e7941e9d5d4a77c0d34974ee1b5", "score": "0.4937192", "text": "func ClientTransactionRequestController(r *pb.TransactionRequest, db *sql.DB) error {\n\texists, err := local.GetTransactionExists(r.Name, db)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcurrentTime := time.Now().UTC()\n\n\tif exists {\n\t\tif err := saveTransactionToRiak(r, currentTime); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := local.UpdateTransactionStatus(r.Name, currentTime, db); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := MulticastTransactionClient(r, currentTime); err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tif err := transferTransaction(r); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn err\n}", "title": "" }, { "docid": "55220472d096968b443f64f46ef45fbc", "score": "0.4925659", "text": "func (_Controller *ControllerCallerSession) Owner() (common.Address, error) {\n\treturn _Controller.Contract.Owner(&_Controller.CallOpts)\n}", "title": "" }, { "docid": "7a64b368d31ab29cc72dd5b66113c919", "score": "0.49057922", "text": "func (_Delegator *DelegatorCaller) ControllerLookupName(opts *bind.CallOpts) ([32]byte, error) {\n\tvar (\n\t\tret0 = new([32]byte)\n\t)\n\tout := ret0\n\terr := _Delegator.contract.Call(opts, out, \"controllerLookupName\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "7d79bfa2787cbe41401aed94ee7c594b", "score": "0.49027497", "text": "func GetWeatherController(c echo.Context) error {\n\t\tkey := \"97a0db62b9ec2607eaf18e0dc038ea42\"\n\t\tcity := c.Param(\"city\")\n\t\tresponse, _ := http.Get(\"https://api.openweathermap.org/data/2.5/weather?appid=\"+key+\"&q=\"+city)\n\t\tresponseData, _ := ioutil.ReadAll(response.Body)\n\t defer response.Body.Close()\n\t\n\t var TempCity WeatherCity\n\t json.Unmarshal(responseData, &TempCity)\n \n\t TemperaturCelcius := (math.Round((TempCity.Main.Temp - 273.00)*100)/100)\n \n\t\treturn c.JSON(http.StatusOK, map[string]interface{}{\n\t \"city\": city,\n\t \"temperatur\": TemperaturCelcius,\n\t})\n}", "title": "" }, { "docid": "b9449a68e00c2dc021c6f17bd49668a1", "score": "0.49022496", "text": "func (_Controlled *ControlledSession) SetController(_controller common.Address) (*types.Transaction, error) {\n\treturn _Controlled.Contract.SetController(&_Controlled.TransactOpts, _controller)\n}", "title": "" }, { "docid": "89b17d14d8296dc024d102f44c259abc", "score": "0.49011013", "text": "func (_CSAI *CSAICaller) Comptroller(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _CSAI.contract.Call(opts, out, \"comptroller\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "f312a253c4e579d914a10fd9db423185", "score": "0.48912856", "text": "func NewController(sockAddr string) *Controller {\n\treturn &Controller{socket.New(sockAddr)}\n}", "title": "" }, { "docid": "d10ba3c4db1bdd83e5b915ee99b88336", "score": "0.48843196", "text": "func (_Controller *ControllerSession) UpdateController(_id [32]byte, _controller common.Address) (*types.Transaction, error) {\n\treturn _Controller.Contract.UpdateController(&_Controller.TransactOpts, _id, _controller)\n}", "title": "" }, { "docid": "3b7bee28b99442dfcc6490cf81d8f1d6", "score": "0.4870461", "text": "func (_Delegator *DelegatorTransactor) SetController(opts *bind.TransactOpts, _controller common.Address) (*types.Transaction, error) {\n\treturn _Delegator.contract.Transact(opts, \"setController\", _controller)\n}", "title": "" }, { "docid": "d1345a175fdd9b3fe4702364afdf4496", "score": "0.48697522", "text": "func (_Vendingmachine *VendingmachineCaller) Backend(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _Vendingmachine.contract.Call(opts, out, \"backend\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "4a4df91f33e6368feaae699d97c45caf", "score": "0.48416206", "text": "func (_Controller *ControllerTransactor) UpdateController(opts *bind.TransactOpts, _id [32]byte, _controller common.Address) (*types.Transaction, error) {\n\treturn _Controller.contract.Transact(opts, \"updateController\", _id, _controller)\n}", "title": "" }, { "docid": "cc7342bbc0cca559af7c50dbf74d3d6f", "score": "0.48344868", "text": "func newController(width, height int, bg, fg color.RGBA) *controller {\n\timg := image.NewRGBA(image.Rect(0, 0, width, height))\n\tdraw.Draw(img, img.Bounds(), &image.Uniform{bg}, image.ZP, draw.Src)\n\trs := rand.NewSource(time.Now().UTC().UnixNano())\n\twp := perlin.NewPerlinRandSource(2., 2., 3, rs)\n\thp := perlin.NewPerlinRandSource(2., 2., 3, rs)\n\tclients := make(map[string]*client)\n\tnewClients := make(chan *client)\n\tleavingClients := make(chan *client)\n\treturn &controller{img, wp, hp, bg, fg, clients, newClients, leavingClients, 0}\n}", "title": "" }, { "docid": "fbc76aa0ed7e37dd310ba0d4130a7a3c", "score": "0.48156664", "text": "func (m Message) Controller() int8 {\n\tif m.MsgType.Is(ControlChangeMsg) {\n\t\tc, _ := utils.ParseTwoUint7(m.Data[1], m.Data[2])\n\t\treturn int8(c)\n\t}\n\n\treturn -1\n}", "title": "" }, { "docid": "5dd64a308650bf700a2baa866cfb5cc6", "score": "0.48109224", "text": "func newController() *myController {\n\tc := &myController{}\n\n\t// create the view\n\tc.a = newApp(c)\n\n\t// create the turtle to control\n\tc.mt = newMiniTurtle()\n\n\treturn c\n}", "title": "" }, { "docid": "f45a0c773d25f8c0aea4de7b84cfa30f", "score": "0.48013613", "text": "func CondorMainCtrl(configs models.Configuration) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\t// We need to provide models.DataTableSearchBuilderRequest to the controller initializer and use same type in casting\n\t\tctx, cancel, req := InitializeCtxAndBindRequestBody(c, models.DataTableRequest{})\n\t\tdefer cancel()\n\n\t\tc.JSON(http.StatusOK,\n\t\t\tgetCondorWfCpuEffResults(\n\t\t\t\tctx,\n\t\t\t\tc,\n\t\t\t\treq.(models.DataTableRequest),\n\t\t\t\tconfigs,\n\t\t\t))\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "aedd3c8f25ab9e274c42dd5e9bf3b812", "score": "0.47995663", "text": "func AccountGetController(c *fiber.Ctx) error {\n\tadminLevel := index.ParsePrivileges(index.GetSession(c).Get(\"User\"))\n\n\tif !userModels.IsAllowedAccess(adminLevel, 0) { // level >= required\n\t\treturn c.Redirect(\"/users/sign-out?err=You have been signed out\")\n\t}\n\n\tvar err error\n\tuser := index.GetSession(c).Get(\"User\")\n\tx, ok := user.(index.SessionData)\n\n\tif !ok {\n\t\treturn c.Redirect(\"/?err=an error has occured\")\n\t}\n\n\ttempUser := userModels.User{Username: x.Username}\n\n\tuser, err = tempUser.GetUserByUsername()\n\n\tif err != nil {\n\t\treturn c.Redirect(\"/?err=session invalid\")\n\t}\n\n\tif user != nil {\n\t\treturn c.Render(\"account/account\", fiber.Map{\n\t\t\t\"Title\": \"Account\",\n\t\t\t\"Subtitle\": \"In publishing and graphic design, Lorem ipsum is a placeholder text commonly used to demonstrate the visual form of a document or a typeface without relying on meaningful content. Lorem ipsum may\",\n\t\t\t\"User\": user,\n\t\t\t\"Breadcrumbs\": []map[string]string{\n\t\t\t\t{\"text\": \"Home\", \"linkTo\": \"/\"},\n\t\t\t\t{\"text\": \"Account\", \"linkTo\": \"/users/account\"},\n\t\t\t},\n\t\t\t\"Error\": c.Query(\"err\"),\n\t\t\t\"Success\": c.Query(\"s\"),\n\t\t}, \"layouts/main\")\n\t}\n\n\treturn c.Redirect(\"/?err=please sign in\")\n}", "title": "" }, { "docid": "ef4f0a4d7569284217cc328970b1f3bc", "score": "0.47868416", "text": "func (v *execControllerValue) GetController() controller.Controller {\n\treturn v.ctrl\n}", "title": "" }, { "docid": "9d144acca0cf7392564201e6b906aeab", "score": "0.47818872", "text": "func HealthzController(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\ttraceKey := r.Context().Value(config.TraceKey)\n\tpack := runtime.FuncForPC(reflect.ValueOf(HealthzController).Pointer()).Name()\n\n\tlog.Info(log.Msg(\"Request healthz\"), log.O(\"version\", config.Version), log.O(\"package\", pack),\n\t\tlog.O(\"project\", config.ProjectName), log.O(config.TraceKey, traceKey))\n\n\tdb, err := provider.ConnectSQL()\n\tif err != nil {\n\t\tlog.Error(log.Msg(\"Failed connect to database\", err.Error()), log.O(\"version\", config.Version),\n\t\t\tlog.O(\"project\", config.ProjectName), log.O(config.TraceKey, traceKey), log.O(\"package\", pack))\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Fprintf(w, helper.FailResponse(err.Error()).Stringify())\n\t\treturn\n\t}\n\n\tif err := provider.SQLPing(db); err != nil {\n\t\tlog.Error(log.Msg(\"Failed ping database\", err.Error()), log.O(\"version\", config.Version),\n\t\t\tlog.O(\"project\", config.ProjectName), log.O(config.TraceKey, traceKey), log.O(\"package\", pack))\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Fprintf(w, helper.FailResponse(err.Error()).Stringify())\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusOK)\n\tfmt.Fprintf(w, helper.SuccessResponse().Stringify())\n}", "title": "" }, { "docid": "1d752cbf5c0dd7e6cb27ee4a2e35fbbb", "score": "0.4778056", "text": "func GetWeatherCtrl(c *gin.Context) {\n\tf, err := GetHQWeatherForecast(\"now\")\n\tif err != nil {\n\t\tlog.Printf(\"ERROR %v\", err)\n\t\tc.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\tc.JSON(http.StatusOK, f)\n}", "title": "" }, { "docid": "d68dd05fd539cc029d749a8d2d191981", "score": "0.4777274", "text": "func (c *ProductController) Index(w http.ResponseWriter, r *http.Request) {\n\n}", "title": "" }, { "docid": "a4460bff1303091ab48bc28e91e0802a", "score": "0.47726706", "text": "func (*ipReApi) Index(r *ghttp.Request) {\n\t// 返回ip\n\tr.Response.Write(gstr.StrTillEx(r.RemoteAddr, string(':')))\n\tsaveIP(r.RemoteAddr)\n}", "title": "" }, { "docid": "3f02db117de70b2a66a2f72abc7d25df", "score": "0.4753962", "text": "func (_ReputationToken *ReputationTokenCaller) ControllerLookupName(opts *bind.CallOpts) ([32]byte, error) {\n\tvar (\n\t\tret0 = new([32]byte)\n\t)\n\tout := ret0\n\terr := _ReputationToken.contract.Call(opts, out, \"controllerLookupName\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "6d64064ec6c7c6644359a50113fa45e0", "score": "0.47512087", "text": "func NewController(app AppInterface) *Controller {\n\tc := new(Controller)\n\tc.connectMode = ServerMode\n\tc.id = uint16(rand.Uint32())\n\n\t// for debug logs\n\t// log.SetLevel(log.DebugLevel)\n\n\t// Save the handler\n\tc.app = app\n\tc.exitCh = make(chan struct{})\n\treturn c\n}", "title": "" }, { "docid": "fa99bff2e46dc3f7ca6154ee32e9e491", "score": "0.4745399", "text": "func (bunny *Bunny) Controller(controller interface{}) {\n\tclassType := reflect.TypeOf(controller)\n\tclsName := classType.Name()\n\tclsName = strings.ToLower(strings.Replace(clsName, \"Controller\", \"\", 1))\n\tbunny.controllers[clsName] = controller\n}", "title": "" }, { "docid": "da7bf61d1a07598e4edc96e0255f55da", "score": "0.47269446", "text": "func Get() internal.Presenter {\n\treturn controller.Get()\n}", "title": "" }, { "docid": "fc48d9ecadb2d97bca04b06c8d71c577", "score": "0.4719087", "text": "func (cs controllers) Controller(name string) *controller {\n\tfor i := 0; i < len(cs.list); i++ {\n\t\tif cs.list[i].Name == name {\n\t\t\treturn cs.list[i]\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6f56a1497941523b1a9116176a732f7b", "score": "0.47181946", "text": "func getControllerRPCHandler(anonymous bool) http.Handler {\n\tvar mwHandlers = []MiddlewareHandler{\n\t\tTimeValidityHandler,\n\t}\n\tif !anonymous {\n\t\tmwHandlers = append(mwHandlers, RPCSignatureHandler)\n\t}\n\n\ts := jsonrpc.NewServer()\n\tcodec := json.NewCodec()\n\ts.RegisterCodec(codec, \"application/json\")\n\ts.RegisterCodec(codec, \"application/json; charset=UTF-8\")\n\ts.RegisterService(new(controllerRPCService), \"Controller\")\n\tmux := router.NewRouter()\n\t// Add new RPC services here\n\tmux.Handle(\"/rpc\", s)\n\tmux.Handle(\"/{file:.*}\", http.FileServer(assetFS()))\n\n\trpcHandler := registerCustomMiddleware(mux, mwHandlers...)\n\treturn rpcHandler\n}", "title": "" }, { "docid": "dc444f41f920ea8df6a6014010096710", "score": "0.47177002", "text": "func (_RoundsManager *RoundsManagerTransactor) SetController(opts *bind.TransactOpts, _controller common.Address) (*types.Transaction, error) {\n\treturn _RoundsManager.contract.Transact(opts, \"setController\", _controller)\n}", "title": "" }, { "docid": "4761c488d89caad1be84c0e6736eaa89", "score": "0.47130984", "text": "func IndexHandler(w http.ResponseWriter, r *http.Request) {\r\n\tw.Write([]byte(\"Wellcome to Gempa Line Bot - Get data from client\"))\r\n\r\n}", "title": "" }, { "docid": "46a11448646af811501dd694cdf2ab9d", "score": "0.4709169", "text": "func BuildController() {\n\tmg.SerialDeps(EnsureControllerGen, GenerateController)\n\n\tLoadMetadatda()\n\n\tmust.RunV(\"go\", \"build\", \"-o\", \"bin/manager\", \"main.go\")\n}", "title": "" }, { "docid": "deecaf8b7b99eb17c213b80417c09b92", "score": "0.47050655", "text": "func (_Controller *ControllerRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _Controller.Contract.ControllerTransactor.contract.Transfer(opts)\n}", "title": "" }, { "docid": "eeb0c6e6db47aa5726f0c6fff4795725", "score": "0.47025913", "text": "func (this *MainController) Code() {\n\tjsonData := &models.SessionResult{}\n\tdefer this.jsonResult(jsonData)\n\n\t// Get form value.\n\tjscode := this.GetString(\"js_code\")\n\n\t// Check valid.\n\tif len(jscode) == 0 {\n\t\tjsonData.WxErr.ErrCode = int(pb.Failed)\n\t\tjsonData.WxErr.ErrMsg = \"parameter error\"\n\t\treturn\n\t}\n\tbeego.Info(\"get session by jscode: \" + jscode)\n\n\tif models.RunMode() {\n\t\tjsonData.WxErr.ErrCode = int(pb.Failed)\n\t\tjsonData.WxErr.ErrMsg = \"mode closed\"\n\t\treturn\n\t}\n\n\t//if !this.isPost() {\n\t//\tjsonData.WxErr.ErrCode = int(pb.Failed)\n\t//\tjsonData.WxErr.ErrMsg = \"method error\"\n\t//\treturn\n\t//}\n\n\tif !this.token {\n\t\tjsonData.WxErr.ErrCode = int(pb.Failed)\n\t\tjsonData.WxErr.ErrMsg = \"token error\"\n\t\treturn\n\t}\n\n\t//test TODO 控制频率\n\tip := this.getClientIp()\n\tsession, err := models.GetSessionByCode(jscode, ip)\n\tif err != nil {\n\t\tjsonData.WxErr.ErrCode = int(pb.Failed)\n\t\tjsonData.WxErr.ErrMsg = err.Error()\n\t\treturn\n\t}\n\n\twsaddr := beego.AppConfig.String(\"ws.addr\")\n\tjsonData.Session = session\n\tjsonData.WsAddr = wsaddr + session\n}", "title": "" }, { "docid": "3219442cf432283867c8c09a0febf007", "score": "0.46954975", "text": "func (self *OvsDriver) SetController(target string) error {\n\tif target == \"\" {\n\t\treturn fmt.Errorf(\"target cannot be empty\")\n\t}\n\tctrlerUuidStr := fmt.Sprintf(\"local\")\n\tctrlerUuid := []libovsdb.UUID{{GoUUID: ctrlerUuidStr}}\n\n\t// If controller already exists, nothing to do\n\tif self.IsControllerPresent(target) {\n\t\treturn fmt.Errorf(\"Controller %s already exist\", target)\n\t}\n\n\t// insert a row in Controller table\n\tcontroller := make(map[string]interface{})\n\tcontroller[\"target\"] = target\n\n\t// Add an entry in Controller table\n\tctrlerOp := libovsdb.Operation{\n\t\tOp: insertOpr,\n\t\tTable: \"Controller\",\n\t\tRow: controller,\n\t\tUUIDName: ctrlerUuidStr,\n\t}\n\n\t// mutate the Controller column of Bridge table\n\tmutateSet, _ := libovsdb.NewOvsSet(ctrlerUuid)\n\tmutation := libovsdb.NewMutation(\"controller\", insertOpr, mutateSet)\n\tcondition := libovsdb.NewCondition(\"name\", \"==\", self.OvsBridgeName)\n\tmutateOp := libovsdb.Operation{\n\t\tOp: mutateOpr,\n\t\tTable: \"Bridge\",\n\t\tMutations: []interface{}{mutation},\n\t\tWhere: []interface{}{condition},\n\t}\n\n\toperations := []libovsdb.Operation{ctrlerOp, mutateOp}\n\treturn self.OvsdbTransact(operations)\n}", "title": "" }, { "docid": "4db32ea055bd0119c0cca2d5d03944f7", "score": "0.46939108", "text": "func GetUserController(c echo.Context) error {\n\tid, _ := strconv.Atoi(c.Param(\"id\"))\n\tuser, err := models.GetUser(id)\n\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusBadRequest, err.Error())\n\t}\n\treturn c.JSON(http.StatusOK, user)\n}", "title": "" }, { "docid": "250356b81dcfe9de8d135a9af696feb1", "score": "0.46862608", "text": "func newPersonController(person r.PersonRepository) *Person {\n\tlog.Println(\"Person controller started\")\n\treturn &Person{Repository: person}\n}", "title": "" }, { "docid": "4c87015ad427490beab8433ee7a6c61f", "score": "0.46783933", "text": "func (_Delegator *DelegatorSession) SetController(_controller common.Address) (*types.Transaction, error) {\n\treturn _Delegator.Contract.SetController(&_Delegator.TransactOpts, _controller)\n}", "title": "" }, { "docid": "3011f75c749c1dc8d53729a76f908db3", "score": "0.46761355", "text": "func (_DelegationTarget *DelegationTargetCaller) ControllerLookupName(opts *bind.CallOpts) ([32]byte, error) {\n\tvar (\n\t\tret0 = new([32]byte)\n\t)\n\tout := ret0\n\terr := _DelegationTarget.contract.Call(opts, out, \"controllerLookupName\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "bf9b65007161839c7df4a9169690b983", "score": "0.46700984", "text": "func NewController(safraService contract.SafraService) *Controller {\n\tonce.Do(func() {\n\t\tinstance = &Controller{\n\t\t\tsafraService: safraService,\n\t\t}\n\t})\n\treturn instance\n}", "title": "" }, { "docid": "fffea5a65533d93715d099b0d022cd8d", "score": "0.46664503", "text": "func (_Delegator *DelegatorTransactorSession) SetController(_controller common.Address) (*types.Transaction, error) {\n\treturn _Delegator.Contract.SetController(&_Delegator.TransactOpts, _controller)\n}", "title": "" }, { "docid": "827341351d89e622c903d9caa0189c5e", "score": "0.4656699", "text": "func (e *LoadDataWorker) GetController() *importer.LoadDataController {\n\treturn e.controller\n}", "title": "" }, { "docid": "f5fa0fb9f7c9a910d5d1f3e8cfc111d0", "score": "0.46535298", "text": "func Device(as authorizationserver.AuthorizationServer) http.Handler {\n\t// Display user code form\n\tdisplayForm := func(w http.ResponseWriter, r *http.Request, sub string) {\n\t\t// Only POST verb\n\t\tif r.Method != http.MethodGet {\n\t\t\twithError(w, r, http.StatusMethodNotAllowed, rfcerrors.InvalidRequest().Build())\n\t\t\treturn\n\t\t}\n\n\t\t// Prepare template\n\t\tform := template.Must(template.New(\"user-code-inupt\").Parse(`<!DOCTYPE html>\n<html>\n <head>\n </head>\n <body>\n\t<form action=\"\" method=\"post\">\n\t <label for=\"user_code\">Enter user code:\n\t\t <input type=\"text\" name=\"user_code\">\n\t </label>\n\t</form>\n </body>\n</html>`))\n\n\t\t// Write template to output\n\t\tif err := form.Execute(w, nil); err != nil {\n\t\t\twithError(w, r, http.StatusInternalServerError, rfcerrors.ServerError().Build())\n\t\t\treturn\n\t\t}\n\t}\n\n\t// Validate user code\n\tvalidateUserCode := func(w http.ResponseWriter, r *http.Request, sub string) {\n\t\tr.ParseForm()\n\n\t\t// Only POST verb\n\t\tif r.Method != http.MethodPost {\n\t\t\twithError(w, r, http.StatusMethodNotAllowed, rfcerrors.InvalidRequest().Build())\n\t\t\treturn\n\t\t}\n\n\t\t// Send request to reactor\n\t\tres, err := as.Do(r.Context(), &corev1.DeviceCodeValidationRequest{\n\t\t\tSubject: sub,\n\t\t\tUserCode: r.PostFormValue(\"user_code\"),\n\t\t})\n\t\tauthRes, ok := res.(*corev1.DeviceCodeValidationResponse)\n\t\tif !ok {\n\t\t\twithError(w, r, http.StatusInternalServerError, rfcerrors.ServerError().Build())\n\t\t\treturn\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Println(\"unable to process authorization request:\", err)\n\t\t\twithError(w, r, http.StatusBadRequest, authRes.Error)\n\t\t\treturn\n\t\t}\n\t}\n\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tctx := r.Context()\n\n\t\t// Retrieve subject form context\n\t\tsub, ok := middleware.Subject(ctx)\n\t\tif !ok || sub == \"\" {\n\t\t\twithError(w, r, http.StatusUnauthorized, rfcerrors.InvalidRequest().Build())\n\t\t\treturn\n\t\t}\n\n\t\tswitch r.Method {\n\t\tcase http.MethodGet:\n\t\t\tdisplayForm(w, r, sub)\n\t\tcase http.MethodPost:\n\t\t\tvalidateUserCode(w, r, sub)\n\t\tdefault:\n\t\t\twithError(w, r, http.StatusMethodNotAllowed, rfcerrors.InvalidRequest().Build())\n\t\t\treturn\n\t\t}\n\t})\n}", "title": "" }, { "docid": "61d9bcd06d7cfcc62d25aac47c9afdf0", "score": "0.46511096", "text": "func NewControllerCaller(address common.Address, caller bind.ContractCaller) (*ControllerCaller, error) {\n\tcontract, err := bindController(address, caller, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &ControllerCaller{contract: contract}, nil\n}", "title": "" }, { "docid": "6f668fd057cdb8b3732f9b7ea517842d", "score": "0.46497688", "text": "func (_ReputationToken *ReputationTokenTransactor) SetController(opts *bind.TransactOpts, _controller common.Address) (*types.Transaction, error) {\n\treturn _ReputationToken.contract.Transact(opts, \"setController\", _controller)\n}", "title": "" } ]
58b57c3f6ef417748750a27c69dcaa0a
providerPluginSet returns the set of valid providers that were discovered in the defined search paths.
[ { "docid": "c373b72964f1f9f49a4023c1f7a57d19", "score": "0.7769505", "text": "func (m *Meta) providerPluginSet() discovery.PluginMetaSet {\n\tplugins := discovery.FindPlugins(\"provider\", m.pluginDirs(true))\n\n\t// Add providers defined in the legacy .terraformrc,\n\tif m.PluginOverrides != nil {\n\t\tfor k, v := range m.PluginOverrides.Providers {\n\t\t\tlog.Printf(\"[DEBUG] found plugin override in .terraformrc: %q, %q\", k, v)\n\t\t}\n\t\tplugins = plugins.OverridePaths(m.PluginOverrides.Providers)\n\t}\n\n\tplugins, _ = plugins.ValidateVersions()\n\n\tfor p := range plugins {\n\t\tlog.Printf(\"[DEBUG] found valid plugin: %q, %q, %q\", p.Name, p.Version, p.Path)\n\t}\n\n\treturn plugins\n}", "title": "" } ]
[ { "docid": "8342904aaeb4154547b638dc74470da5", "score": "0.68214214", "text": "func (m *Meta) providerPluginManuallyInstalledSet() discovery.PluginMetaSet {\n\tplugins := discovery.FindPlugins(\"provider\", m.pluginDirs(false))\n\n\t// Add providers defined in the legacy .terraformrc,\n\tif m.PluginOverrides != nil {\n\t\tfor k, v := range m.PluginOverrides.Providers {\n\t\t\tlog.Printf(\"[DEBUG] found plugin override in .terraformrc: %q, %q\", k, v)\n\t\t}\n\n\t\tplugins = plugins.OverridePaths(m.PluginOverrides.Providers)\n\t}\n\n\tplugins, _ = plugins.ValidateVersions()\n\n\tfor p := range plugins {\n\t\tlog.Printf(\"[DEBUG] found valid plugin: %q, %q, %q\", p.Name, p.Version, p.Path)\n\t}\n\n\treturn plugins\n}", "title": "" }, { "docid": "b5308c6ef21e168ffde42ee8ccd7b7b6", "score": "0.64888525", "text": "func (m *Meta) providerPluginAutoInstalledSet() discovery.PluginMetaSet {\n\tplugins := discovery.FindPlugins(\"provider\", []string{m.pluginDir()})\n\tplugins, _ = plugins.ValidateVersions()\n\n\tfor p := range plugins {\n\t\tlog.Printf(\"[DEBUG] found valid plugin: %q\", p.Name)\n\t}\n\n\treturn plugins\n}", "title": "" }, { "docid": "bd209f01a873fd1f7a8322992c4a8da5", "score": "0.60465956", "text": "func FindProviderName(filePath string) ([]string, error) {\n\tfset := token.NewFileSet()\n\tf, err := parser.ParseFile(fset, filePath, nil, parser.Mode(0))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar genDecl *ast.GenDecl\n\tfor _, decl := range f.Decls {\n\t\td, ok := decl.(*ast.GenDecl)\n\t\tif ok && d.Tok == token.VAR {\n\t\t\tgenDecl = d\n\t\t}\n\t}\n\n\tif genDecl == nil {\n\t\treturn nil, err\n\t}\n\n\tvar valueSpec *ast.ValueSpec\n\tfor _, spec := range genDecl.Specs {\n\t\ts, ok := spec.(*ast.ValueSpec)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tfor _, name := range s.Names {\n\t\t\tif name.Name != \"Set\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tvalueSpec = s\n\t}\n\n\tif valueSpec == nil {\n\t\treturn nil, err\n\t}\n\n\tvar callExpr *ast.CallExpr\n\tfor _, value := range valueSpec.Values {\n\t\texpr, ok := value.(*ast.CallExpr)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tfun, ok := expr.Fun.(*ast.SelectorExpr)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\txIdent, ok := fun.X.(*ast.Ident)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tif xIdent.Name != \"wire\" || fun.Sel.Name != \"NewSet\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tcallExpr = expr\n\t}\n\n\tif callExpr == nil {\n\t\treturn nil, err\n\t}\n\n\tproviderNameList := []string{}\n\tfor _, arg := range callExpr.Args {\n\t\tident, ok := arg.(*ast.Ident)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tproviderNameList = append(providerNameList, ident.Name)\n\t}\n\n\treturn providerNameList, nil\n}", "title": "" }, { "docid": "6d37d900388482722ffdfd85ec8b0684", "score": "0.5821486", "text": "func newPluginSet(plugins ...workspace.PluginSpec) pluginSet {\n\tvar s pluginSet = make(map[string]workspace.PluginSpec, len(plugins))\n\tfor _, p := range plugins {\n\t\ts.Add(p)\n\t}\n\treturn s\n}", "title": "" }, { "docid": "65de70ace4704810dd501d8719b10383", "score": "0.5625546", "text": "func storageProviders() []string {\n\tspMu.RLock()\n\tdefer spMu.RUnlock()\n\n\tr := []string{}\n\tfor k := range spByName {\n\t\tr = append(r, k)\n\t}\n\n\treturn r\n}", "title": "" }, { "docid": "0b3661628d4f8e56697e44c005c2c31b", "score": "0.5547711", "text": "func (s *Service) Providers() []provider.Service {\n\treturn s.providers\n}", "title": "" }, { "docid": "dd9007f5a8c2a418f9728f33b80621aa", "score": "0.54638726", "text": "func SupportedProviders() []string {\n\treturn []string{Scaleway.String()}\n}", "title": "" }, { "docid": "35411a0eece201cd57ef7c63e1d9d2d9", "score": "0.5456592", "text": "func Engines() []string {\n\tvar engines []string\n\tfor engine := range providers {\n\t\tengines = append(engines, engine)\n\t}\n\treturn engines\n}", "title": "" }, { "docid": "ecee42b2692edf8736f37816811faad5", "score": "0.54346687", "text": "func (s *Server) providersGetMatching(\n\tsess *pb.Session,\n\tany []*pb.ProviderFilter,\n) ([]*pb.Provider, error) {\n\tres := make([]*pb.Provider, 0)\n\tmfils := make([]*pb.ObjectFilter, 0)\n\t// If the user specified one or more UUIDs or names in the incoming API\n\t// provider filters, the metadata service will have already handled the\n\t// translation/lookup to UUIDs, so we can pass the returned object's UUIDs\n\t// to the resource service's ProviderFilter.UuidFilter below.\n\tprimaryFiltered := false\n\t// If we get, for example, a filter on a non-existent partition, we\n\t// increment this variable. If the number of invalid conditions is equal to\n\t// the number of filters, we return an empty stream and don't bother\n\t// calling to the resource service.\n\tinvalidConds := 0\n\t// We keep a cache of partition UUIDs that were normalized during filter\n\t// expansion/solving with the metadata service so that when we pass filters\n\t// to the resource service, we have those partition UUIDs handy\n\tpartUuidsReqMap := make(map[int][]string, len(any))\n\tif len(any) > 0 {\n\t\t// Transform the supplied generic filters into the more specific\n\t\t// UuidFilter or NameFilter objects accepted by the metadata service\n\t\tfor x, filter := range any {\n\t\t\tmfil := &pb.ObjectFilter{\n\t\t\t\tObjectTypeFilter: &pb.ObjectTypeFilter{\n\t\t\t\t\tCodeFilter: &pb.CodeFilter{\n\t\t\t\t\t\tCode: \"runm.provider\",\n\t\t\t\t\t\tUsePrefix: false,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}\n\t\t\tif filter.PrimaryFilter != nil {\n\t\t\t\tif util.IsUuidLike(filter.PrimaryFilter.Search) {\n\t\t\t\t\tmfil.UuidFilter = &pb.UuidFilter{\n\t\t\t\t\t\tUuid: filter.PrimaryFilter.Search,\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tmfil.NameFilter = &pb.NameFilter{\n\t\t\t\t\t\tName: filter.PrimaryFilter.Search,\n\t\t\t\t\t\tUsePrefix: filter.PrimaryFilter.UsePrefix,\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tprimaryFiltered = true\n\t\t\t}\n\t\t\tif filter.PartitionFilter != nil {\n\t\t\t\t// The user may have specified a partition UUID or a partition\n\t\t\t\t// name with an optional prefix. We \"expand\" this by asking the\n\t\t\t\t// metadata service for the partitions matching this\n\t\t\t\t// name-or-UUID filter and then we pass those partition UUIDs\n\t\t\t\t// in the object filter.\n\t\t\t\tpartObjs, err := s.partitionsGetMatchingFilter(\n\t\t\t\t\tsess, filter.PartitionFilter,\n\t\t\t\t)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tif len(partObjs) == 0 {\n\t\t\t\t\t// This filter will never return any objects since the\n\t\t\t\t\t// searched-for partition term didn't match any partitions\n\t\t\t\t\tinvalidConds += 1\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tpartUuids := make([]string, len(partObjs))\n\t\t\t\tfor x, partObj := range partObjs {\n\t\t\t\t\tpartUuids[x] = partObj.Uuid\n\t\t\t\t}\n\t\t\t\tmfil.PartitionFilter = &pb.UuidsFilter{\n\t\t\t\t\tUuids: partUuids,\n\t\t\t\t}\n\t\t\t\t// Save in our cache so that the request service filters can\n\t\t\t\t// use the normalized partition UUIDs\n\t\t\t\tpartUuidsReqMap[x] = partUuids\n\t\t\t}\n\t\t\tif filter.PropertyFilter != nil {\n\t\t\t\tmfil.PropertyFilter = filter.PropertyFilter\n\t\t\t\tprimaryFiltered = true\n\t\t\t}\n\t\t\tmfils = append(mfils, mfil)\n\t\t}\n\t} else {\n\t\t// Just get all provider objects from the metadata service\n\t\tmfils = append(mfils, &pb.ObjectFilter{\n\t\t\tObjectTypeFilter: &pb.ObjectTypeFilter{\n\t\t\t\tCodeFilter: &pb.CodeFilter{\n\t\t\t\t\tCode: \"runm.provider\",\n\t\t\t\t\tUsePrefix: false,\n\t\t\t\t},\n\t\t\t},\n\t\t})\n\n\t}\n\n\tif len(any) > 0 && len(any) == invalidConds {\n\t\t// No point going further, since all filters will return 0 results\n\t\ts.log.L3(\n\t\t\t\"ProviderList: returning nil since all filters evaluated to \" +\n\t\t\t\t\"impossible conditions\",\n\t\t)\n\t\treturn res, nil\n\t}\n\n\t// Grab the basic object information from the metadata service first\n\tobjs, err := s.objectsGetMatching(sess, mfils)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif len(objs) == 0 {\n\t\treturn res, nil\n\t}\n\n\tobjMap := make(map[string]*pb.Object, len(objs))\n\tfor _, obj := range objs {\n\t\tobjMap[obj.Uuid] = obj\n\t}\n\n\tvar uuids []string\n\tif primaryFiltered {\n\t\tuuids = make([]string, len(objMap))\n\t\tx := 0\n\t\tfor uuid, _ := range objMap {\n\t\t\tuuids[x] = uuid\n\t\t\tx += 1\n\t\t}\n\t}\n\n\t// Create a set of pb.ProviderFilter objects and grab provider-specific\n\t// information from the runm-resource service. For now, we only supply\n\t// filters to the resource service's ProviderList API call if there were\n\t// filters passed to the API service's ProviderList API call.\n\trfils := make([]*pb.ProviderFindFilter, 0)\n\tif len(any) > 0 {\n\t\tfor x, f := range any {\n\t\t\trfil := &pb.ProviderFindFilter{}\n\t\t\tif f.PartitionFilter != nil {\n\t\t\t\trfil.PartitionFilter = &pb.UuidsFilter{\n\t\t\t\t\tUuids: partUuidsReqMap[x],\n\t\t\t\t}\n\t\t\t}\n\t\t\tif f.ProviderTypeFilter != nil {\n\t\t\t\t// TODO(jaypipes): Expand the API SearchFilter for provider\n\t\t\t\t// types into a []string{} of provider type codes by calling\n\t\t\t\t// the ProviderTypeList metadata service API. For now, just\n\t\t\t\t// pass in the Search term as an exact match...\n\t\t\t\trfil.ProviderTypeFilter = &pb.CodesFilter{\n\t\t\t\t\tCodes: []string{f.ProviderTypeFilter.Search},\n\t\t\t\t}\n\t\t\t}\n\t\t\tif primaryFiltered {\n\t\t\t\trfil.UuidFilter = &pb.UuidsFilter{\n\t\t\t\t\tUuids: uuids,\n\t\t\t\t}\n\t\t\t}\n\t\t\trfils = append(rfils, rfil)\n\t\t}\n\t}\n\n\t// OK, now we grab the provider-specific information from the resource\n\t// service and mash the generic object information into the returned API\n\t// Provider structs\n\trc, err := s.resClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treq := &pb.ProviderFindRequest{\n\t\tSession: sess,\n\t\tAny: rfils,\n\t}\n\tstream, err := rc.ProviderFind(context.Background(), req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor {\n\t\tp, err := stream.Recv()\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tobj, exists := objMap[p.Uuid]\n\t\tif !exists {\n\t\t\ts.log.ERR(\n\t\t\t\t\"DATA CORRUPTION! provider with UUID %s returned from \"+\n\t\t\t\t\t\"resource service but no matching object exists in \"+\n\t\t\t\t\t\"metadata service!\",\n\t\t\t\tp.Uuid,\n\t\t\t)\n\t\t\tcontinue\n\t\t}\n\t\tproviderMergeObject(p, obj)\n\t\tres = append(res, p)\n\t}\n\treturn res, nil\n}", "title": "" }, { "docid": "7bbb6b2924b4bd57c5029e08c6dc51dd", "score": "0.5284975", "text": "func buildProviderMap(fset *token.FileSet, hasher typeutil.Hasher, set *ProviderSet) (*typeutil.Map, *typeutil.Map, []error) {\n\tproviderMap := new(typeutil.Map)\n\tproviderMap.SetHasher(hasher)\n\tsrcMap := new(typeutil.Map) // to *providerSetSrc\n\tsrcMap.SetHasher(hasher)\n\n\tec := new(errorCollector)\n\t// Process injector arguments.\n\tif set.InjectorArgs != nil {\n\t\tgivens := set.InjectorArgs.Tuple\n\t\tfor i := 0; i < givens.Len(); i++ {\n\t\t\ttyp := givens.At(i).Type()\n\t\t\targ := &InjectorArg{Args: set.InjectorArgs, Index: i}\n\t\t\tsrc := &providerSetSrc{InjectorArg: arg}\n\t\t\tif prevSrc := srcMap.At(typ); prevSrc != nil {\n\t\t\t\tec.add(bindingConflictError(fset, typ, set, src, prevSrc.(*providerSetSrc)))\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tproviderMap.Set(typ, &ProvidedType{t: typ, a: arg})\n\t\t\tsrcMap.Set(typ, src)\n\t\t}\n\t}\n\t// Process imports, verifying that there are no conflicts between sets.\n\tfor _, imp := range set.Imports {\n\t\tsrc := &providerSetSrc{Import: imp}\n\t\timp.providerMap.Iterate(func(k types.Type, v interface{}) {\n\t\t\tif prevSrc := srcMap.At(k); prevSrc != nil {\n\t\t\t\tec.add(bindingConflictError(fset, k, set, src, prevSrc.(*providerSetSrc)))\n\t\t\t\treturn\n\t\t\t}\n\t\t\tproviderMap.Set(k, v)\n\t\t\tsrcMap.Set(k, src)\n\t\t})\n\t}\n\tif len(ec.errors) > 0 {\n\t\treturn nil, nil, ec.errors\n\t}\n\n\t// Process non-binding providers in new set.\n\tfor _, p := range set.Providers {\n\t\tsrc := &providerSetSrc{Provider: p}\n\t\tfor _, typ := range p.Out {\n\t\t\tif prevSrc := srcMap.At(typ); prevSrc != nil {\n\t\t\t\tec.add(bindingConflictError(fset, typ, set, src, prevSrc.(*providerSetSrc)))\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tproviderMap.Set(typ, &ProvidedType{t: typ, p: p})\n\t\t\tsrcMap.Set(typ, src)\n\t\t}\n\t}\n\tfor _, v := range set.Values {\n\t\tsrc := &providerSetSrc{Value: v}\n\t\tif prevSrc := srcMap.At(v.Out); prevSrc != nil {\n\t\t\tec.add(bindingConflictError(fset, v.Out, set, src, prevSrc.(*providerSetSrc)))\n\t\t\tcontinue\n\t\t}\n\t\tproviderMap.Set(v.Out, &ProvidedType{t: v.Out, v: v})\n\t\tsrcMap.Set(v.Out, src)\n\t}\n\tfor _, f := range set.Fields {\n\t\tsrc := &providerSetSrc{Field: f}\n\t\tfor _, typ := range f.Out {\n\t\t\tif prevSrc := srcMap.At(typ); prevSrc != nil {\n\t\t\t\tec.add(bindingConflictError(fset, typ, set, src, prevSrc.(*providerSetSrc)))\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tproviderMap.Set(typ, &ProvidedType{t: typ, f: f})\n\t\t\tsrcMap.Set(typ, src)\n\t\t}\n\t}\n\tif len(ec.errors) > 0 {\n\t\treturn nil, nil, ec.errors\n\t}\n\n\t// Process bindings in set. Must happen after the other providers to\n\t// ensure the concrete type is being provided.\n\tfor _, b := range set.Bindings {\n\t\tsrc := &providerSetSrc{Binding: b}\n\t\tif prevSrc := srcMap.At(b.Iface); prevSrc != nil {\n\t\t\tec.add(bindingConflictError(fset, b.Iface, set, src, prevSrc.(*providerSetSrc)))\n\t\t\tcontinue\n\t\t}\n\t\tconcrete := providerMap.At(b.Provided)\n\t\tif concrete == nil {\n\t\t\tsetName := set.VarName\n\t\t\tif setName == \"\" {\n\t\t\t\tsetName = \"provider set\"\n\t\t\t}\n\t\t\tec.add(notePosition(fset.Position(b.Pos), fmt.Errorf(\"wire.Bind of concrete type %q to interface %q, but %s does not include a provider for %q\", b.Provided, b.Iface, setName, b.Provided)))\n\t\t\tcontinue\n\t\t}\n\t\tproviderMap.Set(b.Iface, concrete)\n\t\tsrcMap.Set(b.Iface, src)\n\t}\n\tif len(ec.errors) > 0 {\n\t\treturn nil, nil, ec.errors\n\t}\n\treturn providerMap, srcMap, nil\n}", "title": "" }, { "docid": "85d8880af4104664b7a4f0c44d9ca35b", "score": "0.5282387", "text": "func (a *API) Providers() *auth.Providers {\n\treturn a.ext\n}", "title": "" }, { "docid": "01686e2d4b2158450a9f1174cca009a4", "score": "0.52645135", "text": "func Providers() (providers []Provider, err error) {\n\tstatement := \"select id, first_name, last_name, phone_number, vidyo_room, credential from provider\"\n\n\trows, err := Db.Query(statement)\n\tfmt.Println(rows)\n\n\tvar p Provider\n\n\tfor rows.Next() {\n\t\terr = rows.Scan(&p.Id, &p.FirstName, &p.LastName, &p.PhoneNumber, &p.VidyoRoom, pq.Array(&p.Credential))\n\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\tproviders = append(providers, p)\n\t}\n\treturn\n}", "title": "" }, { "docid": "2d5cdbbb19fcd44b9dad4fcc35789667", "score": "0.52199894", "text": "func Providers() []Provider {\n\tprovidersMu.RLock()\n\tdefer providersMu.RUnlock()\n\tlist := make([]Provider, 0, len(providers))\n\tfor name := range providers {\n\t\tlist = append(list, providers[name])\n\t}\n\n\treturn list\n}", "title": "" }, { "docid": "1936e57f55f8f44462240369f79d54a7", "score": "0.51920944", "text": "func (s *Store) ProvidersGetMatching(\n\tany []*pb.ProviderFindFilter,\n) ([]*ProviderRecord, error) {\n\t// TODO(jaypipes): Validate that the slice of supplied ProviderFilters is\n\t// valid (for example, that the filter contains at least one UUID,\n\t// partition, or provider type filter...\n\tqargs := make([]interface{}, 0)\n\tqs := `SELECT\n p.id\n, p.uuid AS provider_uuid\n, part.uuid AS partition_uuid\n, pt.code AS provider_type\n, p.generation\nFROM providers AS p\nJOIN provider_types AS pt\n ON p.provider_type_id = pt.id\nJOIN partitions AS part\n ON p.partition_id = part.id`\n\tif len(any) > 0 {\n\t\tqs += `\nWHERE `\n\t}\n\tfor x, filter := range any {\n\t\tif x > 0 {\n\t\t\tqs += `\nOR\n`\n\t\t}\n\t\tqs += \"(\"\n\t\texprAnd := false\n\t\tif filter.UuidFilter != nil {\n\t\t\tqs += \"p.uuid \" + InParamString(len(filter.UuidFilter.Uuids))\n\t\t\tfor _, uuid := range filter.UuidFilter.Uuids {\n\t\t\t\tqargs = append(qargs, uuid)\n\t\t\t}\n\t\t\texprAnd = true\n\t\t}\n\t\tif filter.PartitionFilter != nil {\n\t\t\tif exprAnd {\n\t\t\t\tqs += \" AND \"\n\t\t\t}\n\t\t\tqs += \"part.uuid \" + InParamString(len(filter.PartitionFilter.Uuids))\n\t\t\tfor _, uuid := range filter.PartitionFilter.Uuids {\n\t\t\t\tqargs = append(qargs, uuid)\n\t\t\t}\n\t\t\texprAnd = true\n\t\t}\n\t\tif filter.ProviderTypeFilter != nil {\n\t\t\tif exprAnd {\n\t\t\t\tqs += \" AND \"\n\t\t\t}\n\t\t\tqs += \"pt.code \" + InParamString(len(filter.ProviderTypeFilter.Codes))\n\t\t\tfor _, code := range filter.ProviderTypeFilter.Codes {\n\t\t\t\tqargs = append(qargs, code)\n\t\t\t}\n\t\t\texprAnd = true\n\t\t}\n\t\tqs += \")\"\n\t}\n\trows, err := s.DB().Query(qs, qargs...)\n\tif err != nil {\n\t\ts.log.ERR(\"failed to get providers: %s.\\nSQL: %s\", err, qs)\n\t\treturn nil, err\n\t}\n\trecs := make([]*ProviderRecord, 0)\n\tfor rows.Next() {\n\t\trec := &ProviderRecord{\n\t\t\tProvider: &pb.Provider{\n\t\t\t\tPartition: &pb.Partition{},\n\t\t\t\tProviderType: &pb.ProviderType{},\n\t\t\t},\n\t\t}\n\t\tif err := rows.Scan(\n\t\t\t&rec.ID,\n\t\t\t&rec.Provider.Uuid,\n\t\t\t&rec.Provider.Partition.Uuid,\n\t\t\t&rec.Provider.ProviderType.Code,\n\t\t\t&rec.Provider.Generation,\n\t\t); err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\trecs = append(recs, rec)\n\t}\n\treturn recs, nil\n}", "title": "" }, { "docid": "3f78bc6594fd82acba52fa9a0d61fdd3", "score": "0.516714", "text": "func (*PostSetupManager) Providers() ([]PostSetupProvider, error) {\n\tproviders, err := initialization.OpenCLProviders()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tprovidersAlias := make([]PostSetupProvider, len(providers))\n\tfor i, p := range providers {\n\t\tprovidersAlias[i] = PostSetupProvider(p)\n\t}\n\n\treturn providersAlias, nil\n}", "title": "" }, { "docid": "3bab3a538c1c2e6cc5828269cd37529c", "score": "0.5164937", "text": "func gatherPluginsFromSnapshot(plugctx *plugin.Context, target *deploy.Target) (pluginSet, error) {\n\tlogging.V(preparePluginLog).Infof(\"gatherPluginsFromSnapshot(): gathering plugins from snapshot\")\n\tset := newPluginSet()\n\tif target == nil || target.Snapshot == nil {\n\t\tlogging.V(preparePluginLog).Infof(\"gatherPluginsFromSnapshot(): no snapshot available, skipping\")\n\t\treturn set, nil\n\t}\n\tfor _, res := range target.Snapshot.Resources {\n\t\turn := res.URN\n\t\tif !providers.IsProviderType(urn.Type()) {\n\t\t\tlogging.V(preparePluginVerboseLog).Infof(\"gatherPluginsFromSnapshot(): skipping %q, not a provider\", urn)\n\t\t\tcontinue\n\t\t}\n\t\tpkg := providers.GetProviderPackage(urn.Type())\n\t\tversion, err := providers.GetProviderVersion(res.Inputs)\n\t\tif err != nil {\n\t\t\treturn set, err\n\t\t}\n\t\tdownloadURL, err := providers.GetProviderDownloadURL(res.Inputs)\n\t\tif err != nil {\n\t\t\treturn set, err\n\t\t}\n\t\tlogging.V(preparePluginLog).Infof(\n\t\t\t\"gatherPluginsFromSnapshot(): plugin %s %s is required by first-class provider %q\", pkg, version, urn)\n\t\tset.Add(workspace.PluginSpec{\n\t\t\tName: pkg.String(),\n\t\t\tKind: workspace.ResourcePlugin,\n\t\t\tVersion: version,\n\t\t\tPluginDownloadURL: downloadURL,\n\t\t})\n\t}\n\treturn set, nil\n}", "title": "" }, { "docid": "e63dea1b6ff06acdf3b08b20e947dce6", "score": "0.51604825", "text": "func ListProviders() ([]*Metadata, error) {\n\tresults := make([]*Metadata, 0)\n\n\tfor _, f := range knownDrivers {\n\t\tdrv, err := f(nil)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, drv.Self())\n\t}\n\n\t// Sort results\n\tif len(results) > 1 {\n\t\tsort.SliceStable(results, func(i, j int) bool {\n\t\t\treturn strings.Compare(results[i].ID, results[j].ID) < 0\n\t\t})\n\t}\n\n\treturn results, nil\n}", "title": "" }, { "docid": "572d4378caf4fc3e9c1bbb460e637eac", "score": "0.5143006", "text": "func All(settings environment.EnvSettings) Providers {\n\tresult := Providers{\n\t\t{\n\t\t\tSchemes: []string{\"http\", \"https\"},\n\t\t\tNew: newHTTPGetter,\n\t\t},\n\t}\n\tpluginDownloaders, _ := collectPlugins(settings)\n\tresult = append(result, pluginDownloaders...)\n\treturn result\n}", "title": "" }, { "docid": "724d0d8acf66198cbf237afbee65f165", "score": "0.5125772", "text": "func GenValidStorageProviders() []v1alpha1.StorageProvider {\n\treturn []v1alpha1.StorageProvider{\n\t\t{\n\t\t\tS3: &v1alpha1.S3StorageProvider{\n\t\t\t\tBucket: \"s3\",\n\t\t\t\tPrefix: \"prefix-\",\n\t\t\t\tEndpoint: \"s3://localhost:80\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tS3: &v1alpha1.S3StorageProvider{\n\t\t\t\tBucket: \"s3\",\n\t\t\t\tPrefix: \"prefix-\",\n\t\t\t\tEndpoint: \"s3://localhost:80\",\n\t\t\t\tSecretName: \"s3\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tGcs: &v1alpha1.GcsStorageProvider{\n\t\t\t\tProjectId: \"gcs\",\n\t\t\t\tBucket: \"gcs\",\n\t\t\t\tPrefix: \"prefix-\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tGcs: &v1alpha1.GcsStorageProvider{\n\t\t\t\tProjectId: \"gcs\",\n\t\t\t\tBucket: \"gcs\",\n\t\t\t\tPrefix: \"prefix-\",\n\t\t\t\tSecretName: \"gcs\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tLocal: &v1alpha1.LocalStorageProvider{\n\t\t\t\tPrefix: \"prefix-\",\n\t\t\t\tVolume: corev1.Volume{\n\t\t\t\t\tName: \"nfs\",\n\t\t\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\t\t\tNFS: &corev1.NFSVolumeSource{\n\t\t\t\t\t\t\tServer: \"fake-server\",\n\t\t\t\t\t\t\tPath: \"/some/path\",\n\t\t\t\t\t\t\tReadOnly: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tVolumeMount: corev1.VolumeMount{\n\t\t\t\t\tName: \"nfs\",\n\t\t\t\t\tMountPath: \"/some/path\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "39397e1e56b01785e8a04773dadbf6bb", "score": "0.4972069", "text": "func (m *ProviderManager) Providers() map[string]Provider {\n\tm.mu.RLock()\n\tdefer m.mu.RUnlock()\n\n\t// Create a new map and copy the secrets providers into it in order to\n\t// prevent race conditions\n\tproviders := make(map[string]Provider, len(m.providers))\n\tfor k, v := range m.providers {\n\t\tproviders[k] = v\n\t}\n\n\treturn providers\n}", "title": "" }, { "docid": "2a2ce8e31b1a1b399d2cfc9c913b1f3d", "score": "0.49395236", "text": "func computeDefaultProviderPlugins(languagePlugins, allPlugins pluginSet) map[tokens.Package]workspace.PluginSpec {\n\t// Language hosts are not required to specify the full set of plugins they depend on. If the set of plugins received\n\t// from the language host does not include any resource providers, fall back to the full set of plugins.\n\tlanguageReportedProviderPlugins := false\n\tfor _, plug := range languagePlugins.Values() {\n\t\tif plug.Kind == workspace.ResourcePlugin {\n\t\t\tlanguageReportedProviderPlugins = true\n\t\t}\n\t}\n\n\tsourceSet := languagePlugins\n\tif !languageReportedProviderPlugins {\n\t\tlogging.V(preparePluginLog).Infoln(\n\t\t\t\"computeDefaultProviderPlugins(): language host reported empty set of provider plugins, using all plugins\")\n\t\tsourceSet = allPlugins\n\t}\n\n\tdefaultProviderPlugins := make(map[tokens.Package]workspace.PluginSpec)\n\n\t// Sort the set of source plugins by version, so that we iterate over the set of plugins in a deterministic order.\n\t// Sorting by version gets us two properties:\n\t// 1. The below loop will never see a nil-versioned plugin after a non-nil versioned plugin, since the sort always\n\t// considers nil-versioned plugins to be less than non-nil versioned plugins.\n\t// 2. The below loop will never see a plugin with a version that is older than a plugin that has already been\n\t// seen. The sort will always have placed the older plugin before the newer plugin.\n\t//\n\t// Despite these properties, the below loop explicitly handles those cases to preserve correct behavior even if the\n\t// sort is not functioning properly.\n\tsourcePlugins := sourceSet.Values()\n\tsort.Sort(workspace.SortedPluginSpec(sourcePlugins))\n\tfor _, p := range sourcePlugins {\n\t\tlogging.V(preparePluginLog).Infof(\"computeDefaultProviderPlugins(): considering %s\", p)\n\t\tif p.Kind != workspace.ResourcePlugin {\n\t\t\t// Default providers are only relevant for resource plugins.\n\t\t\tlogging.V(preparePluginVerboseLog).Infof(\n\t\t\t\t\"computeDefaultProviderPlugins(): skipping %s, not a resource provider\", p)\n\t\t\tcontinue\n\t\t}\n\n\t\tif seenPlugin, has := defaultProviderPlugins[tokens.Package(p.Name)]; has {\n\t\t\tif seenPlugin.Version == nil {\n\t\t\t\tlogging.V(preparePluginLog).Infof(\n\t\t\t\t\t\"computeDefaultProviderPlugins(): plugin %s selected for package %s (override, previous was nil)\",\n\t\t\t\t\tp, p.Name)\n\t\t\t\tdefaultProviderPlugins[tokens.Package(p.Name)] = p\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tcontract.Assertf(p.Version != nil, \"p.Version should not be nil if sorting is correct!\")\n\t\t\tif p.Version != nil && p.Version.GTE(*seenPlugin.Version) {\n\t\t\t\tlogging.V(preparePluginLog).Infof(\n\t\t\t\t\t\"computeDefaultProviderPlugins(): plugin %s selected for package %s (override, newer than previous %s)\",\n\t\t\t\t\tp, p.Name, seenPlugin.Version)\n\t\t\t\tdefaultProviderPlugins[tokens.Package(p.Name)] = p\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tcontract.Failf(\"Should not have seen an older plugin if sorting is correct!\\n %s-%s\\n %s-%s\",\n\t\t\t\tp.Name, p.Version.String(),\n\t\t\t\tseenPlugin.Name, seenPlugin.Version.String())\n\t\t}\n\n\t\tlogging.V(preparePluginLog).Infof(\n\t\t\t\"computeDefaultProviderPlugins(): plugin %s selected for package %s (first seen)\", p, p.Name)\n\t\tdefaultProviderPlugins[tokens.Package(p.Name)] = p\n\t}\n\n\tif logging.V(preparePluginLog) {\n\t\tlogging.V(preparePluginLog).Infoln(\"computeDefaultProviderPlugins(): summary of default plugins:\")\n\t\tfor pkg, info := range defaultProviderPlugins {\n\t\t\tlogging.V(preparePluginLog).Infof(\" %-15s = %s\", pkg, info.Version)\n\t\t}\n\t}\n\n\tdefaultProviderInfo := make(map[tokens.Package]workspace.PluginSpec)\n\tfor name, plugin := range defaultProviderPlugins {\n\t\tdefaultProviderInfo[name] = plugin\n\t}\n\n\treturn defaultProviderInfo\n}", "title": "" }, { "docid": "3d829c85a7d44d9a9847b57fd16641b8", "score": "0.49177715", "text": "func (p *Provider) Packers() []packer.Packer {\n\treturn p.PackerList\n}", "title": "" }, { "docid": "770e84bac75ad25283b50352042cd990", "score": "0.4897924", "text": "func Provider() *Providers {\n\treturn NewProviders(nil)\n}", "title": "" }, { "docid": "25f195fbd4bb1d69c53a236429698ae4", "score": "0.48940453", "text": "func (c *component) getSupportedProviders() []string {\n\treturn []string{\"azure\", \"openebs\", \"restic\"}\n}", "title": "" }, { "docid": "f3f3e2a9ad53eea57c63c79a3eae9701", "score": "0.48425397", "text": "func (g *graph) findAssignableProviders(\n\trm *reflectedModule,\n\texpType reflect.Type,\n\ttypeToProvidersMap map[reflect.Type][]*reflectedModule) ([]*reflectedModule, error) {\n\tvar providers []*reflectedModule\n\tfoundAssignable := false\n\tvar foundAssignableType reflect.Type\n\tfor t, ps := range typeToProvidersMap {\n\t\tif t.AssignableTo(expType) {\n\t\t\tif foundAssignable {\n\t\t\t\treturn nil, fmt.Errorf(\"multiple assignable types %s, %s for type %s.%s\",\n\t\t\t\t\tt.Name(), foundAssignableType.Name(), rm.name, expType.Name())\n\t\t\t}\n\n\t\t\tproviders = ps\n\t\t\tfoundAssignable = true\n\t\t\tfoundAssignableType = t\n\t\t}\n\t}\n\n\treturn providers, nil\n}", "title": "" }, { "docid": "7623b502c55d99f58aa0946e845b2e8a", "score": "0.48286504", "text": "func (k *Keeper) GetProviders(ctx sdk.Context, skip, limit int64) (items types.Providers) {\n\tvar (\n\t\tstore = k.Store(ctx)\n\t\titer = hubtypes.NewPaginatedIterator(\n\t\t\tsdk.KVStorePrefixIterator(store, types.ProviderKeyPrefix),\n\t\t)\n\t)\n\n\tdefer iter.Close()\n\n\titer.Skip(skip)\n\titer.Limit(limit, func(iter sdk.Iterator) {\n\t\tvar item types.Provider\n\t\tk.cdc.MustUnmarshalBinaryBare(iter.Value(), &item)\n\t\titems = append(items, item)\n\t})\n\n\treturn items\n}", "title": "" }, { "docid": "e95326f4bf9186b311845f595b7fdb56", "score": "0.48175767", "text": "func (p *Peer) FindProviders(ctx context.Context, id cid.Cid) []peer.ID {\n\tproviderAddresses := []peer.ID{}\n\tproviders := p.DHT.FindProvidersAsync(ctx, id, 0)\nFindProvider:\n\tfor {\n\t\tselect {\n\t\tcase provider := <-providers:\n\t\t\tif provider.ID == p.Host.ID() {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif provider.ID.String() == \"\" {\n\t\t\t\tbreak FindProvider\n\t\t\t}\n\t\t\tproviderAddresses = append(providerAddresses, provider.ID)\n\t\tcase <-time.After(time.Second):\n\t\t\tbreak FindProvider\n\t\tcase <-ctx.Done():\n\t\t\tbreak FindProvider\n\t\t}\n\t}\n\treturn providerAddresses\n}", "title": "" }, { "docid": "9a8bd8399d1682c1815982a87e416822", "score": "0.47559607", "text": "func (s *Syncer) CleanProviders() {\n\ts.repositories = map[string]Provider{}\n}", "title": "" }, { "docid": "af1a23283b73425dea95e76f6520cb04", "score": "0.47552985", "text": "func (m *Meta) missingPlugins(avail discovery.PluginMetaSet, reqd discovery.PluginRequirements) discovery.PluginRequirements {\n\tmissing := make(discovery.PluginRequirements)\n\n\tcandidates := avail.ConstrainVersions(reqd)\n\tinternal := m.internalProviders()\n\n\tfor name, versionSet := range reqd {\n\t\t// internal providers can't be missing\n\t\tif _, ok := internal[name]; ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tlog.Printf(\"[DEBUG] plugin requirements: %q=%q\", name, versionSet.Versions)\n\t\tif metas := candidates[name]; metas.Count() == 0 {\n\t\t\tmissing[name] = versionSet\n\t\t}\n\t}\n\n\treturn missing\n}", "title": "" }, { "docid": "2cca48dc14e249672865f9243f502722", "score": "0.47288176", "text": "func (c *Channel) Providers() context.Client {\n\treturn c\n}", "title": "" }, { "docid": "bb824a6bfde8b2328bad28ee446b9271", "score": "0.47016633", "text": "func (o *AdminCreateIdentityImportCredentialsOidcConfig) GetProviders() []AdminCreateIdentityImportCredentialsOidcProvider {\n\tif o == nil || o.Providers == nil {\n\t\tvar ret []AdminCreateIdentityImportCredentialsOidcProvider\n\t\treturn ret\n\t}\n\treturn o.Providers\n}", "title": "" }, { "docid": "87b958e74c4fbbafcd5c828ddcf3b059", "score": "0.4696615", "text": "func Load(cfgDir string) (*Providers, error) {\n\tproviders := map[string]types.Provider{}\n\n\trawProviders, err := loadProviders(cfgDir)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"providers: couldn't load providers. %w\", err)\n\t}\n\n\tfor name, data := range rawProviders {\n\t\tprovider, err := parseProvider(name, data)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"providers: couldn't parse providers. %w\", err)\n\t\t}\n\t\tproviders[name] = provider\n\t}\n\n\treturn &Providers{providers: providers}, nil\n}", "title": "" }, { "docid": "4350f9373abe0417731f56b87338fe3f", "score": "0.46816283", "text": "func Provider() *schema.Provider {\n\treturn &schema.Provider{\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"buildkite_agent_token\": resourceAgentToken(),\n\t\t\t\"buildkite_pipeline\": resourcePipeline(),\n\t\t\t\"buildkite_pipeline_schedule\": resourcePipelineSchedule(),\n\t\t\t\"buildkite_team\": resourceTeam(),\n\t\t},\n\t\tDataSourcesMap: map[string]*schema.Resource{\n\t\t\t\"buildkite_meta\": dataSourceMeta(),\n\t\t\t\"buildkite_pipeline\": dataSourcePipeline(),\n\t\t},\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"organization\": &schema.Schema{\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"BUILDKITE_ORGANIZATION\", nil),\n\t\t\t\tDescription: \"The Buildkite organization ID\",\n\t\t\t\tRequired: true,\n\t\t\t\tType: schema.TypeString,\n\t\t\t},\n\t\t\t\"api_token\": &schema.Schema{\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"BUILDKITE_API_TOKEN\", nil),\n\t\t\t\tDescription: \"API token with GraphQL access and `write_pipelines, read_pipelines` scopes\",\n\t\t\t\tRequired: true,\n\t\t\t\tType: schema.TypeString,\n\t\t\t},\n\t\t},\n\t\tConfigureFunc: providerConfigure,\n\t}\n}", "title": "" }, { "docid": "ef360ba891b2cf4d669cd01677d4e940", "score": "0.46783704", "text": "func getProviderMap(in map[string]RunnerProvider) map[string]provider.Provider {\n\tif len(in) == 0 {\n\t\tpanic(\"no provider defined\")\n\t}\n\n\tmp := make(map[string]provider.Provider)\n\n\tfor providerName, providerMap := range in {\n\t\t// this needs to be repeated for every provider that will be added in the future as is atm\n\t\tif providerMap.Scaleway != nil {\n\t\t\tif _, ok := mp[providerName]; ok {\n\t\t\t\tpanic(fmt.Sprintf(\"more than one provider for name %s\", providerName))\n\t\t\t}\n\t\t\tmp[providerName] = providerMap.Scaleway\n\t\t}\n\t\tif providerMap.GCP != nil {\n\t\t\tif _, ok := mp[providerName]; ok {\n\t\t\t\tpanic(fmt.Sprintf(\"more than one provider for name %s\", providerName))\n\t\t\t}\n\t\t\tmp[providerName] = providerMap.GCP\n\t\t}\n\t\t// panic if no provider has been matched\n\t\tif _, ok := mp[providerName]; !ok {\n\t\t\tpanic(fmt.Sprintf(\"no provider matched for name %s\", providerName))\n\t\t}\n\t}\n\n\treturn mp\n}", "title": "" }, { "docid": "d0e844cd41cfd18e4e4b099cf8c613f9", "score": "0.4652176", "text": "func (p pluginSet) Values() []workspace.PluginSpec {\n\tplugins := slice.Prealloc[workspace.PluginSpec](len(p))\n\tfor _, value := range p {\n\t\tplugins = append(plugins, value)\n\t}\n\treturn plugins\n}", "title": "" }, { "docid": "a8edb3d1136dd4c126b58dedf91f5212", "score": "0.46208617", "text": "func loadProviders(cfgDir string) (map[string]interface{}, error) {\n\tfn := paths.ProvidersFile(cfgDir)\n\n\tfile, err := os.ReadFile(fn)\n\tif err != nil {\n\t\tif errors.Is(err, os.ErrNotExist) {\n\t\t\treturn nil, fmt.Errorf(\"file %q doesn't exist. use --create-config to create default providers\", fn)\n\t\t}\n\t\treturn nil, fmt.Errorf(\"couldn't read file %q. %w\", fn, err)\n\t}\n\n\trawProviders := map[string]interface{}{}\n\tif err := toml.Unmarshal(file, &rawProviders); err != nil {\n\t\treturn nil, fmt.Errorf(\"couldn't toml unmarshal file %q. %w\", fn, err)\n\t}\n\n\treturn rawProviders, nil\n}", "title": "" }, { "docid": "23d0a15bbb379466efe3f721a371abea", "score": "0.46194166", "text": "func GetAll() map[string]Plugin {\n\treturn pluginRegister\n}", "title": "" }, { "docid": "28444f25ba474c9bb43423f17692bf82", "score": "0.46116295", "text": "func DetermineWhichRequiredResourceProvidersRequireRegistration(requiredResourceProviders map[string]struct{}) (*[]string, error) {\n\tif registeredResourceProviders == nil || unregisteredResourceProviders == nil {\n\t\treturn nil, fmt.Errorf(\"internal-error: the registered/unregistered Resource Provider cache isn't populated\")\n\t}\n\n\trequiringRegistration := make([]string, 0)\n\tfor providerName := range requiredResourceProviders {\n\t\tif _, isRegistered := (*registeredResourceProviders)[providerName]; isRegistered {\n\t\t\tcontinue\n\t\t}\n\n\t\tif _, isUnregistered := (*unregisteredResourceProviders)[providerName]; !isUnregistered {\n\t\t\t// this is likely a typo in the Required Resource Providers list, so we should surface this\n\t\t\treturn nil, fmt.Errorf(\"the required Resource Provider %q wasn't returned from the Azure API\", providerName)\n\t\t}\n\n\t\trequiringRegistration = append(requiringRegistration, providerName)\n\t}\n\n\treturn &requiringRegistration, nil\n}", "title": "" }, { "docid": "2fdb919efebfb45aa3503fccdc525eff", "score": "0.46038815", "text": "func (g *graph) computeProviders() (\n\tmap[string]*reflectedModule,\n\tmap[reflect.Type][]*reflectedModule,\n\terror) {\n\n\tnameToProviderMap := make(map[string]*reflectedModule)\n\ttypeToProvidersMap := make(map[reflect.Type][]*reflectedModule)\n\n\tfor _, provider := range g.modules {\n\t\tfor _, instance := range provider.instances {\n\t\t\tname := instance.name\n\t\t\tif existingProvider, ok := nameToProviderMap[name]; ok {\n\t\t\t\treturn nil, nil,\n\t\t\t\t\tfmt.Errorf(\"duplicated name %s in module %s and %s\", name, existingProvider.name, provider.name)\n\t\t\t}\n\t\t\tnameToProviderMap[name] = provider\n\n\t\t\tt := instance.tp\n\t\t\texistingProviders, _ := typeToProvidersMap[t]\n\t\t\texistingProviders = append(existingProviders, provider)\n\t\t\ttypeToProvidersMap[t] = existingProviders\n\t\t}\n\t}\n\n\treturn nameToProviderMap, typeToProvidersMap, nil\n}", "title": "" }, { "docid": "f15abb46afb5cafb9291db07ba10f6c2", "score": "0.46028733", "text": "func (_m *OrchestratorPlugin) GetStreamerProviders() []providers.StreamProvider {\n\tret := _m.Called()\n\n\tvar r0 []providers.StreamProvider\n\tif rf, ok := ret.Get(0).(func() []providers.StreamProvider); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]providers.StreamProvider)\n\t\t}\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "3441ed72f93e463f41663d54a6daf0ec", "score": "0.45990035", "text": "func (s *Server) ProviderList(\n\treq *pb.ProviderListRequest,\n\tstream pb.RunmAPI_ProviderListServer,\n) error {\n\tprovs, err := s.providersGetMatching(req.Session, req.Any)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor _, prov := range provs {\n\t\tif err = stream.Send(prov); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f70d6ab02af4a38887ce1e2e670f8c63", "score": "0.4592534", "text": "func Provider() terraform.ResourceProvider {\n\treturn &schema.Provider{\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"credentials\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tDefaultFunc: schema.MultiEnvDefaultFunc([]string{\n\t\t\t\t\t\"GOOGLE_CREDENTIALS\",\n\t\t\t\t\t\"GOOGLE_CLOUD_KEYFILE_JSON\",\n\t\t\t\t\t\"GCLOUD_KEYFILE_JSON\",\n\t\t\t\t}, nil),\n\t\t\t\tValidateFunc: validateCredentials,\n\t\t\t},\n\t\t\t\"access_token\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tDefaultFunc: schema.MultiEnvDefaultFunc([]string{\n\t\t\t\t\t\"GOOGLE_OAUTH_ACCESS_TOKEN\",\n\t\t\t\t}, nil),\n\t\t\t\tConflictsWith: []string{\"credentials\"},\n\t\t\t},\n\t\t},\n\t\tDataSourcesMap: map[string]*schema.Resource{\n\t\t\t// \"fastly_ip_ranges\": dataSourceFastlyIPRanges(),\n\t\t},\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t// \"tagmanager_v2\": resourceTagManagerAPIV2(),\n\t\t},\n\t\tConfigureFunc: providerConfigure,\n\t}\n}", "title": "" }, { "docid": "1ffe043904df0bc056f5dfc53e76c843", "score": "0.45848912", "text": "func (m *IdentityContainer) SetIdentityProviders(value []IdentityProviderBaseable)() {\n m.identityProviders = value\n}", "title": "" }, { "docid": "d0478f116ea98da0f557b4c7d244f3cd", "score": "0.4583957", "text": "func Provider() *schema.Provider {\n\treturn &schema.Provider{\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"flexkube_etcd_cluster\": resourceEtcdCluster(),\n\t\t\t\"flexkube_kubelet_pool\": resourceKubeletPool(),\n\t\t\t\"flexkube_controlplane\": resourceControlplane(),\n\t\t\t\"flexkube_apiloadbalancer_pool\": resourceAPILoadBalancerPool(),\n\t\t\t\"flexkube_helm_release\": resourceHelmRelease(),\n\t\t\t\"flexkube_containers\": resourceContainers(),\n\t\t\t\"flexkube_pki\": resourcePKI(),\n\t\t},\n\t\tConfigureFunc: providerConfigure,\n\t}\n}", "title": "" }, { "docid": "2720a9aa6b3224845b40218d1c197506", "score": "0.4570461", "text": "func Plugins(p pgs.Parameters) (plugins []string, all bool) {\n\ts, ok := p[pluginsKey]\n\tif !ok {\n\t\treturn\n\t}\n\n\tif all = s == \"\"; all {\n\t\treturn\n\t}\n\n\tplugins = strings.Split(s, pluginsSep)\n\treturn\n}", "title": "" }, { "docid": "39be2adba5e8d1161174374151e93b88", "score": "0.456736", "text": "func (m *Manager) ProviderNames() []string {\n\tvar names []string\n\tm.mu.RLock()\n\tfor n := range m.providers {\n\t\tnames = append(names, n)\n\t}\n\tm.mu.RUnlock()\n\treturn names\n}", "title": "" }, { "docid": "b7042ab3c43318a9b287ea5fb875d888", "score": "0.4562592", "text": "func Load(ctx context.Context, wd string, env []string, patterns []string) (*Info, []error) {\n\tpkgs, errs := load(ctx, wd, env, patterns)\n\tif len(errs) > 0 {\n\t\treturn nil, errs\n\t}\n\tif len(pkgs) == 0 {\n\t\treturn new(Info), nil\n\t}\n\tfset := pkgs[0].Fset\n\tinfo := &Info{\n\t\tFset: fset,\n\t\tSets: make(map[ProviderSetID]*ProviderSet),\n\t}\n\toc := newObjectCache(pkgs)\n\tec := new(errorCollector)\n\tfor _, pkg := range pkgs {\n\t\tif isWireImport(pkg.PkgPath) {\n\t\t\t// The marker function package confuses analysis.\n\t\t\tcontinue\n\t\t}\n\t\tscope := pkg.Types.Scope()\n\t\tfor _, name := range scope.Names() {\n\t\t\tobj := scope.Lookup(name)\n\t\t\tif !isProviderSetType(obj.Type()) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\titem, errs := oc.get(obj)\n\t\t\tif len(errs) > 0 {\n\t\t\t\tec.add(notePositionAll(fset.Position(obj.Pos()), errs)...)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tpset := item.(*ProviderSet)\n\t\t\t// pset.Name may not equal name, since it could be an alias to\n\t\t\t// another provider set.\n\t\t\tid := ProviderSetID{ImportPath: pset.PkgPath, VarName: name}\n\t\t\tinfo.Sets[id] = pset\n\t\t}\n\t\tfor _, f := range pkg.Syntax {\n\t\t\tfor _, decl := range f.Decls {\n\t\t\t\tfn, ok := decl.(*ast.FuncDecl)\n\t\t\t\tif !ok {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tbuildCall, err := findInjectorBuild(pkg.TypesInfo, fn)\n\t\t\t\tif err != nil {\n\t\t\t\t\tec.add(notePosition(fset.Position(fn.Pos()), fmt.Errorf(\"inject %s: %v\", fn.Name.Name, err)))\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tif buildCall == nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tsig := pkg.TypesInfo.ObjectOf(fn.Name).Type().(*types.Signature)\n\t\t\t\tins, out, err := injectorFuncSignature(sig)\n\t\t\t\tif err != nil {\n\t\t\t\t\tif w, ok := err.(*wireErr); ok {\n\t\t\t\t\t\tec.add(notePosition(w.position, fmt.Errorf(\"inject %s: %v\", fn.Name.Name, w.error)))\n\t\t\t\t\t} else {\n\t\t\t\t\t\tec.add(notePosition(fset.Position(fn.Pos()), fmt.Errorf(\"inject %s: %v\", fn.Name.Name, err)))\n\t\t\t\t\t}\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tinjectorArgs := &InjectorArgs{\n\t\t\t\t\tName: fn.Name.Name,\n\t\t\t\t\tTuple: ins,\n\t\t\t\t\tPos: fn.Pos(),\n\t\t\t\t}\n\t\t\t\tset, errs := oc.processNewSet(pkg.TypesInfo, pkg.PkgPath, buildCall, injectorArgs, \"\")\n\t\t\t\tif len(errs) > 0 {\n\t\t\t\t\tec.add(notePositionAll(fset.Position(fn.Pos()), errs)...)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\t_, errs = solve(fset, out.out, ins, set)\n\t\t\t\tif len(errs) > 0 {\n\t\t\t\t\tec.add(mapErrors(errs, func(e error) error {\n\t\t\t\t\t\tif w, ok := e.(*wireErr); ok {\n\t\t\t\t\t\t\treturn notePosition(w.position, fmt.Errorf(\"inject %s: %v\", fn.Name.Name, w.error))\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn notePosition(fset.Position(fn.Pos()), fmt.Errorf(\"inject %s: %v\", fn.Name.Name, e))\n\t\t\t\t\t})...)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tinfo.Injectors = append(info.Injectors, &Injector{\n\t\t\t\t\tImportPath: pkg.PkgPath,\n\t\t\t\t\tFuncName: fn.Name.Name,\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n\treturn info, ec.errors\n}", "title": "" }, { "docid": "26017578d0e296e2d2db40580598d88b", "score": "0.455936", "text": "func (a *FilecoinRetrievalProviderAdmin) ListProviders() (\n\t[]string, // id\n\t[]string, // region code\n\t[]string, // alias\n) {\n\ta.lock.RLock()\n\tdefer a.lock.RUnlock()\n\tids := make([]string, 0)\n\tregion := make([]string, 0)\n\talias := make([]string, 0)\n\tfor k, v := range a.activeProviders {\n\t\tids = append(ids, k)\n\t\tregion = append(region, v.regionCode)\n\t\talias = append(alias, v.alias)\n\t}\n\treturn ids, region, alias\n}", "title": "" }, { "docid": "c2433c23c2749b8f523f2d30e1ab6d2f", "score": "0.45311043", "text": "func (s *pluginsServer) registerPlugins(pluginPaths []string, grpcReg grpc.ServiceRegistrar, gwArgs gwHandlerArgs, serveOpts ServeOptions) ([]*plugins.Plugin, error) {\n\tpluginDetails := []*plugins.Plugin{}\n\n\tconfigGetter, err := createConfigGetter(serveOpts, s.clustersConfig)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to create a ClientGetter: %w\", err)\n\t}\n\n\tfor _, pluginPath := range pluginPaths {\n\t\tp, err := plugin.Open(pluginPath)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"unable to open plugin %q: %w\", pluginPath, err)\n\t\t}\n\n\t\tvar pluginDetail *plugins.Plugin\n\t\tif pluginDetail, err = getPluginDetail(p, pluginPath); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tpluginDetails = append(pluginDetails, pluginDetail)\n\t\t}\n\n\t\tif err = s.registerGRPC(p, pluginDetail, grpcReg, configGetter); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif err = registerHTTP(p, pluginDetail, gwArgs); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tlog.Infof(\"Successfully registered plugin %q\", pluginPath)\n\t}\n\treturn pluginDetails, nil\n}", "title": "" }, { "docid": "c6f1466888c13e21ed7137fd02d038e3", "score": "0.45280865", "text": "func getSupportedProvidersString() string {\n\tstringProviders := []string{}\n\tfor _, provider := range dns.SuportedProviders {\n\t\tstringProviders = append(stringProviders, string(provider))\n\t}\n\treturn strings.Join(stringProviders, \", \")\n}", "title": "" }, { "docid": "d19aacfe3d02fef38b676c32a9b15ab5", "score": "0.4523534", "text": "func (c *Client) GetProviders(query string) (ProviderList, error) {\n\tvar (\n\t\turi = \"/rest/providers\"\n\t\tqueryParams = createQuery(&map[string]string{\n\t\t\t\"query\": query,\n\t\t})\n\t\tproviders ProviderList\n\t)\n\n\tresponse, err := c.RestAPICall(rest.GET, uri, queryParams, nil)\n\n\tif err != nil {\n\t\treturn providers, err\n\t}\n\n\tif err := json.Unmarshal([]byte(response), &providers); err != nil {\n\t\treturn providers, apiResponseError(response, err)\n\t}\n\n\treturn providers, err\n}", "title": "" }, { "docid": "e390f1347cbc7fe8bd784cd951267e6d", "score": "0.4512414", "text": "func (m *IdentityContainer) GetIdentityProviders()([]IdentityProviderBaseable) {\n return m.identityProviders\n}", "title": "" }, { "docid": "debcd10d6003f8684e3953ad48b2e665", "score": "0.45042726", "text": "func sync() error {\n\tprovidersURL := \"https://registry.terraform.io/v2/providers?filter[tier]=official,partner&page[number]=%d&page[size]=100\"\n\tpage := 1\n\tvar finish bool\n\texportedProviders := map[string]Provider{\n\t\t\"flexibleengine\": Provider{\n\t\t\tAttributes: ProviderAttibutes{\n\t\t\t\tSource: \"https://github.com/FlexibleEngineCloud/terraform-provider-flexibleengine\",\n\t\t\t},\n\t\t},\n\t\t\"openstack\": Provider{\n\t\t\tAttributes: ProviderAttibutes{\n\t\t\t\tSource: \"https://github.com/terraform-provider-openstack/terraform-provider-openstack\",\n\t\t\t},\n\t\t},\n\t}\n\n\tfor !finish {\n\t\turl := fmt.Sprintf(providersURL, page)\n\t\tres, err := http.Get(url)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"could not http.Get: %w\", err)\n\t\t}\n\t\tdefer res.Body.Close()\n\n\t\tif res.StatusCode != http.StatusOK {\n\t\t\treturn fmt.Errorf(\"Wrong HTTP status from Providers %d with url %s\", res.StatusCode, url)\n\t\t}\n\n\t\tvar pvs ProvidersResponse\n\t\terr = json.NewDecoder(res.Body).Decode(&pvs)\n\t\tif res.StatusCode != http.StatusOK {\n\t\t\treturn fmt.Errorf(\"Failed to decode body %w\", err)\n\t\t}\n\n\t\tfor _, p := range pvs.Data {\n\t\t\texportedProviders[p.Attributes.Name] = p\n\t\t}\n\n\t\t// 0 means it has no value\n\t\tif pvs.Meta.Pagination.NextPage != 0 {\n\t\t\tpage = pvs.Meta.Pagination.NextPage\n\t\t} else {\n\t\t\tfinish = true\n\t\t}\n\t}\n\n\tdeletedProviders := make(map[string]struct{})\n\taddedProviders := make(map[string]struct{})\n\tcurrentProviders := make(map[string]struct{})\n\n\t// New we have all the Providers we want to document\n\t// on providers\n\tde, err := os.ReadDir(repositoriesPath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"could not read %q %w\", repositoriesPath, err)\n\t}\n\n\tfor _, d := range de {\n\t\tif d.IsDir() {\n\t\t\t// If a local ones is not on the exported list it means\n\t\t\t// it has been deleted\n\t\t\tif _, ok := exportedProviders[d.Name()]; !ok {\n\t\t\t\tdeletedProviders[d.Name()] = struct{}{}\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tcurrentProviders[d.Name()] = struct{}{}\n\t\t}\n\t}\n\n\tfor k := range exportedProviders {\n\t\tif _, ok := currentProviders[k]; !ok {\n\t\t\taddedProviders[k] = struct{}{}\n\t\t}\n\t}\n\n\t// We then delete the Providers that need it\n\tdeletedCount := 0\n\tfor k := range deletedProviders {\n\t\tdeletedCount++\n\t\tfmt.Printf(\"Deleting [%d/%d] %s\\n\", deletedCount, len(deletedProviders), k)\n\t\tos.RemoveAll(filepath.Join(repositoriesPath, k))\n\t\tos.RemoveAll(filepath.Join(providersPath, k))\n\t}\n\n\t// Now we first clone the new ones\n\t// which are on addedProviders\n\taddedCount := 0\n\tfor k := range addedProviders {\n\t\taddedCount++\n\t\tp := exportedProviders[k]\n\t\tfmt.Printf(\"Cloning [%d/%d] %s\\n\", addedCount, len(addedProviders), k)\n\t\t_, err = git.PlainClone(filepath.Join(repositoriesPath, k), false, &git.CloneOptions{\n\t\t\tURL: p.Attributes.Source,\n\t\t\tRecurseSubmodules: git.DefaultSubmoduleRecursionDepth,\n\t\t})\n\t\tif err != nil && err != transport.ErrAuthenticationRequired {\n\t\t\treturn fmt.Errorf(\"failed to clone %q %q: %w\", k, p.Attributes.Source, err)\n\t\t}\n\t}\n\n\t// Then we pull the existing ones\n\t// which are on currentProviders\n\tcurrentCount := 0\n\tfor k := range currentProviders {\n\t\tcurrentCount++\n\n\t\tfmt.Printf(\"Pulling [%d/%d] %s\\n\", currentCount, len(currentProviders), k)\n\n\t\tpath := filepath.Join(repositoriesPath, k)\n\t\tr, err := git.PlainOpen(path)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to open %q %q: %w\", k, path, err)\n\t\t}\n\n\t\tw, err := r.Worktree()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to operate worktree %q %q: %w\", k, path, err)\n\t\t}\n\n\t\terr = w.Pull(&git.PullOptions{})\n\t\tif err != nil && !errors.Is(err, git.NoErrAlreadyUpToDate) {\n\t\t\tfmt.Printf(\"%+v\\n\", err)\n\t\t\treturn fmt.Errorf(\"failed to pull %q %q: %w\", k, path, err)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "9f14138edc97b7d6545c545390f4c321", "score": "0.44980088", "text": "func GetCloudProviderCompletionValues(f *factory.Factory) (validProviders []string, directive cobra.ShellCompDirective) {\n\tvalidProviders = []string{}\n\tdirective = cobra.ShellCompDirectiveNoSpace\n\n\tconn, err := f.Connection(connection.DefaultConfigSkipMasAuth)\n\tif err != nil {\n\t\treturn validProviders, directive\n\t}\n\n\tcloudProviderResponse, _, err := conn.API().Kafka().GetCloudProviders(f.Context).Execute()\n\tif err != nil {\n\t\treturn validProviders, directive\n\t}\n\n\tcloudProviders := cloudProviderResponse.GetItems()\n\tvalidProviders = GetEnabledCloudProviderNames(cloudProviders)\n\n\treturn validProviders, directive\n}", "title": "" }, { "docid": "9381aa4d6fb3266ac007e282eee3dcd5", "score": "0.44950262", "text": "func (o *AdminCreateIdentityImportCredentialsOidcConfig) SetProviders(v []AdminCreateIdentityImportCredentialsOidcProvider) {\n\to.Providers = v\n}", "title": "" }, { "docid": "7f83075213dc7c3193c539c798708d68", "score": "0.44912863", "text": "func Provider() *schema.Provider {\n\treturn &schema.Provider{\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"region\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDefaultFunc: schema.MultiEnvDefaultFunc([]string{\n\t\t\t\t\t\"AWS_REGION\",\n\t\t\t\t\t\"AWS_DEFAULT_REGION\",\n\t\t\t\t}, nil),\n\t\t\t\tInputDefault: \"us-east-1\",\n\t\t\t},\n\t\t\t\"profile\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tConflictsWith: []string{\"role_arn\"},\n\t\t\t},\n\t\t\t\"role_arn\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tConflictsWith: []string{\"profile\"},\n\t\t\t},\n\t\t},\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"bless_ca\": CA(),\n\t\t\t\"bless_ecdsa_ca\": ECDSACA(),\n\t\t},\n\t\tDataSourcesMap: map[string]*schema.Resource{\n\t\t\t\"bless_kms_public_key\": KMSPublicKey(),\n\t\t},\n\t\tConfigureFunc: configureProvider,\n\t}\n}", "title": "" }, { "docid": "9951b7115dd81a76705d47464167ed83", "score": "0.44847485", "text": "func (m *Manager) InitProviders(appCfg *config.Config, logger log.Loggerer) error {\n\tm.mu.Lock()\n\tfor n, p := range m.providers {\n\t\tif err := p.Init(n, appCfg, logger); err != nil {\n\t\t\tm.mu.Unlock()\n\t\t\treturn err\n\t\t}\n\t}\n\tm.mu.Unlock()\n\treturn nil\n}", "title": "" }, { "docid": "80d0516e52e794c71928f38fe5d087e7", "score": "0.4483555", "text": "func PossibleVolumeProviderValues() []VolumeProvider {\n\treturn []VolumeProvider{SFAzureFile}\n}", "title": "" }, { "docid": "af3bd001bdb887366ce9679df1679eac", "score": "0.44834253", "text": "func Get() []configprovider.Factory {\n\treturn []configprovider.Factory{\n\t\tenvvarconfigsource.NewFactory(),\n\t\tetcd2configsource.NewFactory(),\n\t\tincludeconfigsource.NewFactory(),\n\t\tvaultconfigsource.NewFactory(),\n\t\tzookeeperconfigsource.NewFactory(),\n\t}\n}", "title": "" }, { "docid": "395cc2ff0b81fdaf6360cf100dff5771", "score": "0.44816682", "text": "func (rs *Reposurgeon) evalPathset(state selEvalState,\n\tpreselection *fastOrderedIntSet, matcher string) *fastOrderedIntSet {\n\ttype vendPaths interface {\n\t\tpaths(orderedStringSet) orderedStringSet\n\t}\n\thits := newFastOrderedIntSet()\n\tevents := rs.chosen().events\n\tit := preselection.Iterator()\n\tfor it.Next() {\n\t\tif e, ok := events[it.Value()].(vendPaths); ok &&\n\t\t\te.paths(nil).Contains(matcher) {\n\t\t\thits.Add(it.Value())\n\t\t}\n\t}\n\treturn hits\n}", "title": "" }, { "docid": "82a85d25f06e36aaf33d65c7b426d02a", "score": "0.44750926", "text": "func (o *AdminCreateIdentityImportCredentialsOidcConfig) HasProviders() bool {\n\tif o != nil && o.Providers != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "e00531c3c5cd145002c69648835483b6", "score": "0.44677877", "text": "func (expp *ExpectedProvider) GetRegistries() []*registry.Registry {\n\tregistryObjects, found := expp.store[api.SchemeGroupVersion.WithKind(\"Registry\")]\n\tif !found {\n\t\tpanic(\"cannot get registry objects\")\n\t}\n\tregistries := make([]*registry.Registry, len(registryObjects))\n\tfor i, reg := range registryObjects {\n\t\tapiRegistry, ok := reg.(*api.Registry)\n\t\tif !ok {\n\t\t\tpanic(\"cannot assert registry object\")\n\t\t}\n\t\tregistries[i] = registry.New(apiRegistry, (*ApiObjectStore)(expp))\n\t}\n\treturn registries\n}", "title": "" }, { "docid": "b7a08d5b7a9d8dc02a111f0c8d29b15e", "score": "0.44614697", "text": "func GetWellKnownPlugins() []*Plugin {\n\treturn []*Plugin{\n\t\tGetPluginProtocGenGo(),\n\t\tGetPluginProtocGenGoGRPC(),\n\t\t{\n\t\t\tName: \"protoc-gen-grpc-gateway\",\n\t\t\tPkg: \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-grpc-gateway@latest\",\n\t\t\tOptions: []string{\n\t\t\t\t\"--grpc-gateway_out=.\",\n\t\t\t\t\"--grpc-gateway_opt=paths=source_relative\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tOptionsValue: \"protoc-gen-go-grpc (SupportPackageIsVersion6)\",\n\t\t\tName: \"protoc-gen-go-grpc\",\n\t\t\tPkg: \"google.golang.org/grpc/cmd/protoc-gen-go-grpc@ad51f572fd270f2323e3aa2c1d2775cab9087af2\",\n\t\t\tOptions: []string{\n\t\t\t\t\"--go-grpc_out=.\",\n\t\t\t\t\"--go-grpc_opt=paths=source_relative\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"protoc-gen-openapiv2\",\n\t\t\tPkg: \"github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2@latest\",\n\t\t\tOptions: []string{\n\t\t\t\t\"--openapiv2_out=.\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"protoc-gen-gogo\",\n\t\t\tPkg: \"github.com/gogo/protobuf/protoc-gen-gogo@latest\",\n\t\t\tOptions: []string{\n\t\t\t\t\"--gogo_out=plugins=grpc:.\",\n\t\t\t\t\"--gogo_opt=paths=source_relative\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"protoc-gen-gofast\",\n\t\t\tPkg: \"github.com/gogo/protobuf/protoc-gen-gofast@latest\",\n\t\t\tOptions: []string{\n\t\t\t\t\"--gofast_out=plugins=grpc:.\",\n\t\t\t\t\"--gofast_opt=paths=source_relative\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"protoc-gen-deepcopy\",\n\t\t\tPkg: \"istio.io/tools/cmd/protoc-gen-deepcopy@latest\",\n\t\t\tOptions: []string{\n\t\t\t\t\"--deepcopy_out=source_relative:.\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"protoc-gen-go-json\",\n\t\t\tPkg: \"github.com/mitchellh/protoc-gen-go-json@latest\",\n\t\t\tOptions: []string{\n\t\t\t\t\"--go-json_out=.\",\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "685c2fe551edf0712154d6846a6cd43e", "score": "0.44604573", "text": "func pluginDirs(rootDir string) ([]string, error) {\n\tdirs := []string{}\n\n\t// current directory\n\tdirs = append(dirs, rootDir)\n\n\t// same directory as this executable (not terraform)\n\texePath, err := os.Executable()\n\tif err != nil {\n\t\treturn []string{}, fmt.Errorf(\"Failed to get executable path: %s\", err)\n\t}\n\tdirs = append(dirs, filepath.Dir(exePath))\n\n\t// user vendor directory, which is part of an implied local mirror.\n\tarch := runtime.GOOS + \"_\" + runtime.GOARCH\n\tvendorDir := filepath.Join(rootDir, \"terraform.d\", \"plugins\", arch)\n\tdirs = append(dirs, vendorDir)\n\n\t// auto installed directory for Terraform v0.14+\n\t// The path contains a fully qualified provider name and version.\n\t// .terraform/providers/registry.terraform.io/hashicorp/aws/3.17.0/darwin_amd64\n\t// So we peek a lock file (.terraform.lock.hcl) and build a\n\t// path of plugin directories.\n\t// We don't check the `plugin_cache_dir` setting in the Terraform CLI\n\t// configuration or the TF_PLUGIN_CACHE_DIR environmental variable here,\n\t// https://github.com/hashicorp/terraform/blob/v0.14.0-rc1/website/docs/commands/cli-config.html.markdown#provider-plugin-cache\n\t// but even if it is configured, the lock file is stored under\n\t// the root directory and provider binaries are symlinked to the\n\t// cache directory when running terraform init.\n\tautoInstalledDirsV014, err := newLockFile(filepath.Join(rootDir, \".terraform.lock.hcl\")).pluginDirs()\n\tif err != nil {\n\t\treturn []string{}, err\n\t}\n\n\tdirs = append(dirs, autoInstalledDirsV014...)\n\n\t// auto installed directory for Terraform v0.13\n\t// The path contains a fully qualified provider name and version.\n\t// .terraform/plugins/registry.terraform.io/hashicorp/aws/2.67.0/darwin_amd64\n\t// So we peek a selection file (.terraform/plugins/selections.json) and build a\n\t// path of plugin directories.\n\t// We don't check the `plugin_cache_dir` setting in the Terraform CLI\n\t// configuration or the TF_PLUGIN_CACHE_DIR environmental variable here,\n\t// https://github.com/hashicorp/terraform/blob/v0.13.0-beta2/website/docs/commands/cli-config.html.markdown#provider-plugin-cache\n\t// but even if it is configured, the selection file is stored under\n\t// .terraform/plugins directory and provider binaries are symlinked to the\n\t// cache directory when running terraform init.\n\tautoInstalledDirsV013, err := newSelectionFile(filepath.Join(rootDir, \".terraform\", \"plugins\", \"selections.json\")).pluginDirs()\n\tif err != nil {\n\t\treturn []string{}, err\n\t}\n\n\tdirs = append(dirs, autoInstalledDirsV013...)\n\n\t// auto installed directory for Terraform < v0.13\n\tlegacyAutoInstalledDir := filepath.Join(rootDir, \".terraform\", \"plugins\", arch)\n\tdirs = append(dirs, legacyAutoInstalledDir)\n\n\t// global plugin directory\n\thomeDir, err := homedir.Dir()\n\tif err != nil {\n\t\treturn []string{}, fmt.Errorf(\"Failed to get home dir: %s\", err)\n\t}\n\tconfigDir := filepath.Join(homeDir, \".terraform.d\", \"plugins\")\n\tdirs = append(dirs, configDir)\n\tdirs = append(dirs, filepath.Join(configDir, arch))\n\n\t// We don't check a provider_installation block in the Terraform CLI configuration.\n\t// Because it needs parse HCL and a lot of considerations to implement it\n\t// precisely such as include/exclude rules and a packed layout.\n\t// https://github.com/hashicorp/terraform/blob/v0.13.0-beta2/website/docs/commands/cli-config.html.markdown#explicit-installation-method-configuration\n\n\t// For completeness, we also should check implied local mirror directories.\n\t// https://github.com/hashicorp/terraform/blob/v0.13.0-beta2/website/docs/commands/cli-config.html.markdown#implied-local-mirror-directories\n\t// The set of directies depends on the operating system where you are running Terraform.\n\t// but we cannot enough test for them without test environments,\n\t// so we intentionally don't support it for now.\n\t// - Windows: %APPDATA%/HashiCorp/Terraform/plugins\n\t// - Mac OS X: ~/Library/Application Support/io.terraform/plugins and\n\t// /Library/Application Support/io.terraform/plugins\n\t// - Linux and other Unix-like systems: Terraform implements the XDG Base\n\t// Directory specification and appends terraform/plugins to all of the\n\t// specified data directories. Without any XDG environment variables set,\n\t// Terraform will use ~/.local/share/terraform/plugins,\n\t// /usr/local/share/terraform/plugins, and /usr/share/terraform/plugins.\n\n\t// GOPATH\n\t// This is not included in the Terraform, but for convenience.\n\tgopath := build.Default.GOPATH\n\tdirs = append(dirs, filepath.Join(gopath, \"bin\"))\n\n\tlog.Printf(\"[DEBUG] plugin dirs: %#v\", dirs)\n\treturn dirs, nil\n}", "title": "" }, { "docid": "168f2953fef68dfeb746d5dc664ebc2e", "score": "0.44599202", "text": "func Provider() *schema.Provider {\n\treturn &schema.Provider{\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"address\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDescription: \"Address of goterra\",\n\t\t\t},\n\t\t\t\"apikey\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDescription: \"User API Key\",\n\t\t\t},\n\t\t},\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"goterra_deployment\": resourceDeployment(),\n\t\t\t\"goterra_push\": resourcePush(),\n\t\t\t\"goterra_application\": resourceApplication(),\n\t\t},\n\t\tDataSourcesMap: map[string]*schema.Resource{\n\t\t\t\"goterra_deployment\": dataSourceDeployment(),\n\t\t},\n\t\tConfigureFunc: providerConfigure,\n\t}\n}", "title": "" }, { "docid": "c77b16f603c8736ff0535c992e912e45", "score": "0.44588614", "text": "func EnvProviders(getEnv func(string) string) (providers []AuthProvider) {\n\tproviders = make([]AuthProvider, 0, 4)\n\tprotoProviders := []AuthProvider{\n\t\t{\n\t\t\tID: \"google\",\n\t\t\tName: \"Google\",\n\t\t},\n\t\t{\n\t\t\tID: \"facebook\",\n\t\t\tName: \"Facebook\",\n\t\t},\n\t\t{\n\t\t\tID: \"twitter\",\n\t\t\tName: \"Twitter\",\n\t\t},\n\t\t{\n\t\t\tID: \"github\",\n\t\t\tName: \"Github\",\n\t\t},\n\t}\n\n\t// read client id and key from environment\n\tvar clientIDKey, clientSecretKey string\n\tfor _, provider := range protoProviders {\n\t\tclientIDKey = fmt.Sprintf(\"OAUTH2_%s_CLIENT_ID\", strings.ToUpper(provider.ID))\n\t\tclientSecretKey = fmt.Sprintf(\"OAUTH2_%s_CLIENT_SECRET\", strings.ToUpper(provider.ID))\n\t\tif clientID, clientSecret := getEnv(clientIDKey), getEnv(clientSecretKey); clientID != \"\" && clientSecret != \"\" {\n\t\t\tprovider.ClientID, provider.ClientSecret = clientID, clientSecret\n\t\t\tproviders = append(\n\t\t\t\tproviders,\n\t\t\t\tprovider,\n\t\t\t)\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "e30a57230d17298e672c59cbb46cd359", "score": "0.44583845", "text": "func (c *TkgClient) WaitForProviders(clusterClient clusterclient.Client, options waitForProvidersOptions) error {\n\tvar err error\n\n\tif clusterClient == nil {\n\t\tclusterClient, err = clusterclient.NewClient(options.Kubeconfig, \"\", clusterclient.Options{OperationTimeout: c.timeout})\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"unable to get deletion cluster client\")\n\t\t}\n\t}\n\n\t// Get the all the installed provider info\n\tproviders := &clusterctlv1.ProviderList{}\n\terr = clusterClient.ListResources(providers, &crtclient.ListOptions{})\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"cannot get installed provider config\")\n\t}\n\t// Wait for each provider(core-provider, bootstrap-provider, infrastructure-providers) to be up and running\n\tvar wg sync.WaitGroup\n\tresults := make(chan error, len(providers.Items))\n\tfor i := range providers.Items {\n\t\twg.Add(1)\n\t\tgo func(wg *sync.WaitGroup, provider clusterctlv1.Provider) {\n\t\t\tdefer wg.Done()\n\t\t\tt, err := TimedExecution(func() error {\n\t\t\t\tlog.V(3).Infof(\"Waiting for provider %s\", provider.Name)\n\t\t\t\tproviderNameVersion := provider.ProviderName + \":\" + provider.Version\n\t\t\t\treturn c.waitForProvider(clusterClient, providerNameVersion, provider.Type, options.TargetNamespace, options.WatchingNamespace)\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tlog.V(3).Warningf(\"Failed waiting for provider %v after %v\", provider.Name, t)\n\t\t\t\tresults <- err\n\t\t\t} else {\n\t\t\t\tlog.V(3).Infof(\"Passed waiting on provider %s after %v\", provider.Name, t)\n\t\t\t}\n\t\t}(&wg, providers.Items[i])\n\t}\n\n\twg.Wait()\n\tclose(results)\n\tfor err := range results {\n\t\tlog.Warning(\"Failed waiting for at least one provider, check logs for more detail.\")\n\t\treturn err\n\t}\n\tlog.V(3).Info(\"Success waiting on all providers.\")\n\treturn nil\n}", "title": "" }, { "docid": "b99609df5a03afc2e0c2f1566e159f71", "score": "0.44566116", "text": "func Provider() terraform.ResourceProvider {\n\treturn &schema.Provider{\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"server_url\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"SHAKENFIST_API_URL\", \"\"),\n\t\t\t},\n\t\t\t\"namespace\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"SHAKENFIST_NAMESPACE\", \"\"),\n\t\t\t},\n\t\t\t\"key\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tSensitive: true,\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"SHAKENFIST_KEY\", \"\"),\n\t\t\t},\n\t\t},\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"shakenfist_namespace\": resourceNamespace(),\n\t\t\t\"shakenfist_key\": resourceKey(),\n\t\t\t\"shakenfist_network\": resourceNetwork(),\n\t\t\t\"shakenfist_instance\": resourceInstance(),\n\t\t\t\"shakenfist_float\": resourceFloat(),\n\t\t},\n\t\tConfigureFunc: providerConfigure,\n\t}\n}", "title": "" }, { "docid": "f7b389dab5d09f9512022506d5958683", "score": "0.44535464", "text": "func Provider(ctx context.Context, wg *sync.WaitGroup) *schema.Provider {\n\tp := schema.Provider{\n\t\tDataSourcesMap: map[string]*schema.Resource{\n\t\t\t\"git_file\": dataSourceGitFile(),\n\t\t},\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"git_file\": resourceGitFile(),\n\t\t},\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"repository_url\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"GIT_REPOSITORY_URL\", nil),\n\t\t\t},\n\t\t\t\"author_name\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tDefault: \"Terraform Git Provider\",\n\t\t\t},\n\t\t\t\"author_email\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tDefault: \"tf@fourplusone.de\",\n\t\t\t},\n\t\t},\n\t}\n\tp.ConfigureFunc = configureProviderFunc(ctx, &p, wg)\n\n\treturn &p\n}", "title": "" }, { "docid": "5ec058c019d739c217a403f8b436131b", "score": "0.4449749", "text": "func getProviders(neighborPeers []Peer, cachedBlocks []Block,\n\tresource []Resource, netState State, params schedulerParams) ([]PeerBlock, error) {\n\n\t// get all PeerBlocks from neighbor peers\n\tneighPbs := filterPeersBlocks(neighborPeers, netState)\n\n\t// gets all PeerBlocks from its neighbor peers that are not yet cached locally\n\tnonCachedNeighPbs := filterBlocks(neighPbs, cachedBlocks)\n\n\t// select final PeerBlocks based on configuration params and requested\n\t// resource\n\trpbs := getBlocksResourcePeers(resource, neighborPeers)\n\tfinalPbs := schedule(rpbs, nonCachedNeighPbs, params.obfuscationP, params.spreadP)\n\n\treturn finalPbs, nil\n}", "title": "" }, { "docid": "713fa4e6c94cce76ad6c5d2108ed9c54", "score": "0.4448337", "text": "func SetProvider(p Provider) error {\n\tif err := p.SetAll(settings); err != nil {\n\t\treturn err\n\t}\n\tprovider = p\n\treturn nil\n}", "title": "" }, { "docid": "572a5ec683d4165ee7c6c7f289121b95", "score": "0.44446248", "text": "func (m *Meta) pluginDirs(includeAutoInstalled bool) []string {\n\t// user defined paths take precedence\n\tif len(m.pluginPath) > 0 {\n\t\treturn m.pluginPath\n\t}\n\n\t// When searching the following directories, earlier entries get precedence\n\t// if the same plugin version is found twice, but newer versions will\n\t// always get preference below regardless of where they are coming from.\n\t// TODO: Add auto-install dir, default vendor dir and optional override\n\t// vendor dir(s).\n\tdirs := []string{\".\"}\n\n\t// Look in the same directory as the Terraform executable.\n\t// If found, this replaces what we found in the config path.\n\texePath, err := osext.Executable()\n\tif err != nil {\n\t\tlog.Printf(\"[ERROR] Error discovering exe directory: %s\", err)\n\t} else {\n\t\tdirs = append(dirs, filepath.Dir(exePath))\n\t}\n\n\t// add the user vendor directory\n\tdirs = append(dirs, DefaultPluginVendorDir)\n\n\tif includeAutoInstalled {\n\t\tdirs = append(dirs, m.pluginDir())\n\t}\n\tdirs = append(dirs, m.GlobalPluginDirs...)\n\n\treturn dirs\n}", "title": "" }, { "docid": "75b68628aa7ff1276bbbf0e3fd193759", "score": "0.44386393", "text": "func validatePlugins(plugins ...plugin.Base) error {\n\tpluginKeySet := make(map[string]struct{}, len(plugins))\n\tfor _, p := range plugins {\n\t\tif err := validatePlugin(p); err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// Check for duplicate plugin keys.\n\t\tpluginKey := plugin.KeyFor(p)\n\t\tif _, seen := pluginKeySet[pluginKey]; seen {\n\t\t\treturn fmt.Errorf(\"two plugins have the same key: %q\", pluginKey)\n\t\t}\n\t\tpluginKeySet[pluginKey] = struct{}{}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "ae60281b63e1a8cfca85d21bf89595be", "score": "0.4434718", "text": "func Provider() terraform.ResourceProvider {\n\n\t// return the Provider with map of resources and their operations\n\treturn &schema.Provider{\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"broker_list\": &schema.Schema{\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"KAFKA_BROKER_LIST\", nil),\n\t\t\t\tValidateFunc: func(v interface{}, k string) (ws []string, errors []error) {\n\t\t\t\t\tvalue := v.(string)\n\t\t\t\t\tif value == \"\" {\n\t\t\t\t\t\terrors = append(errors, fmt.Errorf(\"Broker list must not be an empty string\"))\n\t\t\t\t\t}\n\n\t\t\t\t\treturn\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"kafka_topic\": resourceKafkaTopic(),\n\t\t},\n\n\t\tConfigureFunc: provideConfigure,\n\t}\n\n}", "title": "" }, { "docid": "b76e56baf811658b3b80c8dbf6593a2a", "score": "0.44291747", "text": "func (config *Config) PluginRepos() []PluginRepos {\n\treturn config.ConfigFile.PluginRepos\n}", "title": "" }, { "docid": "e3949a92b23667db95987b111b266872", "score": "0.44289294", "text": "func ListAll() []compulsive.Provider {\n\treturn list(nil)\n}", "title": "" }, { "docid": "34e6d52da3a9acc3cba5f8803ad2b063", "score": "0.44285148", "text": "func GetProviders(c *gin.Context) {\n\t//Asking to model\n\tproviders := model.GetProviders()\n\t//If length of providers is zero,\n\t//is because no exist providers\n\tif checkSize(providers) {\n\t\tresponse := gin.H{\n\t\t\t\"status\": \"error\",\n\t\t\t\"data\": nil,\n\t\t\t\"message\": GetMessageErrorPlural + \" providers\",\n\t\t}\n\t\tc.JSON(http.StatusNotFound, response)\n\t} else {\n\t\tresponse := gin.H{\n\t\t\t\"status\": \"success\",\n\t\t\t\"data\": providers,\n\t\t\t\"message\": nil,\n\t\t}\n\t\tc.JSON(http.StatusOK, response)\n\t}\n}", "title": "" }, { "docid": "030646c2508914b581d37bd410ac503c", "score": "0.44198525", "text": "func HelpProviders() map[string]HelpProvider {\n\treturn pluginHelp\n}", "title": "" }, { "docid": "f0d1872e73929854fd6bd001ab8104c9", "score": "0.44165152", "text": "func (o GoogleCloudIdentitytoolkitAdminV2MultiFactorAuthConfigPtrOutput) ProviderConfigs() GoogleCloudIdentitytoolkitAdminV2ProviderConfigArrayOutput {\n\treturn o.ApplyT(func(v *GoogleCloudIdentitytoolkitAdminV2MultiFactorAuthConfig) []GoogleCloudIdentitytoolkitAdminV2ProviderConfig {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.ProviderConfigs\n\t}).(GoogleCloudIdentitytoolkitAdminV2ProviderConfigArrayOutput)\n}", "title": "" }, { "docid": "700ba37e65eab0754c3205ad85ea8a2c", "score": "0.44128945", "text": "func New(c *configpb.ProviderConfig, l *logger.Logger) (*Provider, error) {\n\tfilePaths := c.GetFilePath()\n\tp := &Provider{\n\t\tfilePaths: filePaths,\n\t\tlisters: make(map[string]*lister),\n\t}\n\n\tfor _, filePath := range filePaths {\n\t\tlister, err := newLister(filePath, c, l)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tp.listers[filePath] = lister\n\t}\n\n\treturn p, nil\n}", "title": "" }, { "docid": "b1174fc13d15ea043985c26e18a08905", "score": "0.44120026", "text": "func Provider() *schema.Provider {\n\treturn &schema.Provider{\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"operator\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(ProviderOperator, nil),\n\t\t\t\tDescription: \"The operator, please provides your English name, which named rtx ago\",\n\t\t\t},\n\t\t},\n\n\t\tDataSourcesMap: map[string]*schema.Resource{\n\t\t\t\"xac_123_images\": &schema.Resource{},\n\t\t\t\"xac_obs_ops_products\": &schema.Resource{},\n\t\t\t\"xac_cmdb_modules\": &schema.Resource{},\n\t\t},\n\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"xac_123\": xac123.ResourceXaC123(),\n\t\t\t//\"xac_007\": xac007.ResourceXaC007(),\n\t\t\t//\"xac_store_mdb\": xac_store.ResourceXaCStoreMDB(),\n\t\t\t//\"xac_store_bdb\": xac_store.ResourceXaCStoreMDB(),\n\t\t\t//\"xac_store_dcache\": xac_store.ResourceXaCStoreMDB(),\n\t\t\t//\"xac_store_redis\": xac_store.ResourceXaCStoreMDB(),\n\t\t\t//\"xac_paas_cos\": xac_paas.ResourceXaCPaaSCOS(),\n\t\t\t//\"xac_paas_cvm\": xac_paas.ResourceXaCPaaSCOS(),\n\t\t\t//\"xac_paas_es\": xac_paas.ResourceXaCPaaSCOS(),\n\t\t\t//\"xac_paas_ckafka\": xac_paas.ResourceXaCPaaSCOS(),\n\t\t},\n\t}\n}", "title": "" }, { "docid": "f292b53805ca903480107a8635764c75", "score": "0.44039768", "text": "func ClientsProvider(client client.Client) Clientset {\n\treturn NewClientset(client)\n}", "title": "" }, { "docid": "2e3e0ebf22b5819e6692554806a95999", "score": "0.44029456", "text": "func GetAllServiceProviders(classname string) (serverList []*RpcService) {\n\tfor _, v := range NOS.Services {\n\t\tif v != nil && v.Provides == classname {\n\t\t\tserverList = append(serverList, v)\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "f69e373a572d4038682fe46909a6e0a1", "score": "0.43941498", "text": "func (set *ProviderSet) Outputs() []types.Type {\n\treturn set.providerMap.Keys()\n}", "title": "" }, { "docid": "89dc12c0f051de3fff9e644c6c9e4bf8", "score": "0.43920377", "text": "func New(providers ...Provider) Provider {\n\tvar registry Registry\n\tfor _, provider := range providers {\n\t\tregistry = append(registry, provider)\n\t}\n\treturn registry\n}", "title": "" }, { "docid": "a0c65264dd5c388890384d1f30e31943", "score": "0.4387724", "text": "func Provider() terraform.ResourceProvider {\n\treturn &schema.Provider{\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"token_file\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tDefault: \"token.json\",\n\t\t\t},\n\t\t\t\"user_id\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tDefault: \"me\",\n\t\t\t},\n\t\t},\n\t\tDataSourcesMap: map[string]*schema.Resource{\n\t\t\t\"gmail_labels\": dataSourceGmailLabels(),\n\t\t\t\"gmail_label\": dataSourceGmailLabel(),\n\t\t},\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"gmail_filter\": resourceGmailFilter(),\n\t\t\t\"gmail_label\": resourceGmailLabel(),\n\t\t},\n\t\tConfigureFunc: providerConfigure,\n\t}\n}", "title": "" }, { "docid": "3ed7296272bd31e99f29963b263105b7", "score": "0.4387719", "text": "func (pa *ConfigAgent) getPlugins(owner, repo string) []string {\n\tvar plugins []string\n\n\tfullName := fmt.Sprintf(\"%s/%s\", owner, repo)\n\tif !sets.NewString(pa.configuration.Plugins[owner].ExcludedRepos...).Has(repo) {\n\t\tplugins = append(plugins, pa.configuration.Plugins[owner].Plugins...)\n\t}\n\tplugins = append(plugins, pa.configuration.Plugins[fullName].Plugins...)\n\n\treturn plugins\n}", "title": "" }, { "docid": "fed0ee59bc334ec9ced5ab8ffb9c5ccd", "score": "0.4383", "text": "func (i *ProviderInstaller) listProviderVersions(name string) ([]Version, error) {\n\tversions, err := listPluginVersions(i.providerVersionsURL(name))\n\tif err != nil {\n\t\t// listPluginVersions returns a verbose error message indicating\n\t\t// what was being accessed and what failed\n\t\treturn nil, err\n\t}\n\treturn versions, nil\n}", "title": "" }, { "docid": "8e4935ea491c6b7aa9534a2d09b4c597", "score": "0.43820906", "text": "func (o GoogleCloudIdentitytoolkitAdminV2MultiFactorAuthConfigOutput) ProviderConfigs() GoogleCloudIdentitytoolkitAdminV2ProviderConfigArrayOutput {\n\treturn o.ApplyT(func(v GoogleCloudIdentitytoolkitAdminV2MultiFactorAuthConfig) []GoogleCloudIdentitytoolkitAdminV2ProviderConfig {\n\t\treturn v.ProviderConfigs\n\t}).(GoogleCloudIdentitytoolkitAdminV2ProviderConfigArrayOutput)\n}", "title": "" }, { "docid": "a9466792ef44ee18c4917d5accf90e93", "score": "0.43719932", "text": "func (r MongoDBRepository) GetAllProviders() (model.Providers, error) {\n\tres, err := r.getAll(providersCollectionName, new(model.Providers))\n\treturn *((res).(*model.Providers)), err\n}", "title": "" }, { "docid": "28c1f3ae2d5268461144701eda5e9217", "score": "0.43717584", "text": "func (h *Handler) ServeProviders(w http.ResponseWriter, req *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tctx := req.Context()\n\tinfo := make([]registeredProvider, 0, len(h.providers))\n\n\tu, err := url.Parse(req.RequestURI)\n\tif errutil.HTTPError(ctx, w, err) {\n\t\treturn\n\t}\n\t// Detect current pathPrefix instead of using CallbackURL since it\n\t// will be used for browser linking.\n\t//\n\t// Also handles edge cases around first-time setup/localhost/etc...\n\tpathPrefix := strings.TrimSuffix(u.Path, req.URL.Path)\n\n\tfor id, p := range h.providers {\n\t\tif !p.Info(ctx).Enabled {\n\t\t\tcontinue\n\t\t}\n\n\t\tinfo = append(info, registeredProvider{\n\t\t\tID: id,\n\t\t\tURL: path.Join(pathPrefix, \"/api/v2/identity/providers\", url.PathEscape(id)),\n\t\t\tProviderInfo: p.Info(ctx),\n\t\t})\n\t}\n\n\tsort.Slice(info, func(i, j int) bool { return info[i].ID < info[j].ID })\n\tdata, err := json.Marshal(info)\n\tif errutil.HTTPError(req.Context(), w, err) {\n\t\treturn\n\t}\n\t_, _ = w.Write(data)\n}", "title": "" }, { "docid": "fbb948a7c955d29f9c19a7918a5e6b32", "score": "0.4362675", "text": "func Provider() terraform.ResourceProvider {\n\n\t// The actual provider\n\treturn &schema.Provider{\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"token\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"GITLAB_TOKEN\", nil),\n\t\t\t\tDescription: descriptions[\"token\"],\n\t\t\t\tSensitive: true,\n\t\t\t},\n\t\t\t\"base_url\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t\tDefaultFunc: schema.EnvDefaultFunc(\"GITLAB_BASE_URL\", \"\"),\n\t\t\t\tDescription: descriptions[\"base_url\"],\n\t\t\t},\n\t\t},\n\t\tResourcesMap: map[string]*schema.Resource{\n\t\t\t\"gitlabx_group\": resourceGitlabGroup(),\n\t\t\t\"gitlabx_project\": resourceGitlabProject(),\n\t\t\t\"gitlabx_project_hook\": resourceGitlabProjectHook(),\n\t\t},\n\n\t\tConfigureFunc: providerConfigure,\n\t}\n}", "title": "" }, { "docid": "1506893b3b5ba0c63e797a34e1254d07", "score": "0.43588048", "text": "func (l *loader) plugins() []string {\n\tdir, err := os.Open(l.pluginsDir)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdefer dir.Close()\n\tnames, err := dir.Readdirnames(-1)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tvar res []string\n\tfor _, name := range names {\n\t\tif filepath.Ext(name) == \".go\" {\n\t\t\tres = append(res, name)\n\t\t}\n\t}\n\treturn res\n}", "title": "" }, { "docid": "7268881449cc79e48804fc40ac4f536c", "score": "0.43536648", "text": "func (v *ClusterClient) GetClusterProviders() ([]ClusterProvider, error) {\n\n\t//Construct key and tag to select the entry\n\tkey := ClusterProviderKey{\n\t\tClusterProviderName: \"\",\n\t}\n\n\tvar resp []ClusterProvider\n\tvalues, err := db.DBconn.Find(v.db.storeName, key, v.db.tagMeta)\n\tif err != nil {\n\t\treturn []ClusterProvider{}, pkgerrors.Wrap(err, \"Get ClusterProviders\")\n\t}\n\n\tfor _, value := range values {\n\t\tcp := ClusterProvider{}\n\t\terr = db.DBconn.Unmarshal(value, &cp)\n\t\tif err != nil {\n\t\t\treturn []ClusterProvider{}, pkgerrors.Wrap(err, \"Unmarshalling Value\")\n\t\t}\n\t\tresp = append(resp, cp)\n\t}\n\n\treturn resp, nil\n}", "title": "" }, { "docid": "3c7ce52e54a4727fa6b3e5f71a928585", "score": "0.43505675", "text": "func getProviderConfig() (pc *schema.HTTPHeaderAuthProvider, multiple bool) {\n\tfor _, p := range conf.AuthProviders() {\n\t\tif p.HttpHeader != nil {\n\t\t\tif pc != nil {\n\t\t\t\treturn pc, true // multiple http-header auth providers\n\t\t\t}\n\t\t\tpc = p.HttpHeader\n\t\t}\n\t}\n\treturn pc, false\n}", "title": "" }, { "docid": "3ff7a8e882990e441ae8fc0f3f4a1284", "score": "0.4347995", "text": "func (options *GetVolumesOptions) SetProvider(provider string) *GetVolumesOptions {\n\toptions.Provider = core.StringPtr(provider)\n\treturn options\n}", "title": "" }, { "docid": "8a891e02856b13faedef15b41e4341f3", "score": "0.43463564", "text": "func (o *AdminCreateIdentityImportCredentialsOidcConfig) GetProvidersOk() ([]AdminCreateIdentityImportCredentialsOidcProvider, bool) {\n\tif o == nil || o.Providers == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Providers, true\n}", "title": "" }, { "docid": "b68eaa8d8f327d13b50c2e9beb120818", "score": "0.43437648", "text": "func WaitForAllProvidersInstalled(ctx context.Context, c client.Client, interval time.Duration, timeout time.Duration) error {\n\tif err := wait.PollImmediate(interval, timeout, func() (bool, error) {\n\t\tl := &v1.ProviderList{}\n\t\tif err := c.List(ctx, l); err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\tif len(l.Items) != 1 {\n\t\t\treturn false, nil\n\t\t}\n\t\tfor _, p := range l.Items {\n\t\t\tif p.GetCondition(v1.TypeInstalled).Status != corev1.ConditionTrue {\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t\tif p.GetCondition(v1.TypeHealthy).Status != corev1.ConditionTrue {\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t}\n\t\treturn true, nil\n\t}); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c797a99fb0061eba38b3322ef0dca906", "score": "0.43422732", "text": "func (config *Config) PluginRepositories() []PluginRepository {\n\trepos := config.ConfigFile.PluginRepositories\n\tsort.Slice(repos, func(i, j int) bool {\n\t\treturn strings.ToLower(repos[i].Name) < strings.ToLower(repos[j].Name)\n\t})\n\treturn repos\n}", "title": "" }, { "docid": "53e8bf890a9b7dd00326ee7795f45265", "score": "0.43341696", "text": "func RegisteredWorkerPlugins(context context.T) runpluginutil.PluginRegistry {\n\tonce.Do(func() {\n\t\tloadWorkers(context)\n\t})\n\treturn *registeredPlugins\n}", "title": "" } ]
76a60a33558a25d565a6b25c5e4380da
Swap two nodes while ensuring the map stays up to date
[ { "docid": "8a25e9f2a41946b7a44907d88b0d9931", "score": "0.70100343", "text": "func (m *MaxHeap) swapNodes(a, b int) {\n\ttemp := m.data[a]\n\tm.data[a] = m.data[b]\n\tm.data[b] = temp\n\n\tm.keyMap[m.data[a].key] = a\n\tm.keyMap[m.data[b].key] = b\n}", "title": "" } ]
[ { "docid": "440f090886da249c2cc9a3b34b75d494", "score": "0.65978044", "text": "func (node *Node) Swap(b *Node) {\n\n\tvar (\n\t\ttmpNode *Node\n\t)\n\tif b != nil {\n\t\ttmpNode = node.GetNextNode()\n\t\tnode.setNextNode(b.GetNextNode())\n\t\tb.setNextNode(tmpNode)\n\n\t\ttmpNode = node.GetPrevNode()\n\t\tnode.setPrevNode(b.GetPrevNode())\n\t\tb.setPrevNode(tmpNode)\n\n\t\tif node.GetPrevNode() != nil {\n\t\t\tnode.GetPrevNode().setNextNode(node)\n\t\t}\n\t\tif node.GetNextNode() != nil {\n\t\t\tnode.GetNextNode().setPrevNode(node)\n\t\t}\n\t\tif b.GetNextNode() != nil {\n\t\t\tb.GetNextNode().setPrevNode(b)\n\t\t}\n\n\t}\n}", "title": "" }, { "docid": "26d6595219145536e3a1a6084b627be9", "score": "0.64503884", "text": "func (dlist *DoubleLinkList) swapNode(nodeA, nodeB *Node) bool {\n\tif reflect.DeepEqual(nodeA, nodeB) == true {\n\t\treturn false\n\t}\n\tif reflect.DeepEqual(nodeA.next, nodeB) == true { // brother node\n\t\treturn dlist.moveNext(nodeA)\n\t}\n\n\tif reflect.DeepEqual(nodeA, dlist.first) == true {\n\t\tif reflect.DeepEqual(nodeB, dlist.last) == true { // swap last node for first node\n\t\t\tnodeBPre := nodeB.prev\n\n\t\t\tnodeA.next.prev = nodeB\n\t\t\tnodeB.prev = nil\n\t\t\tnodeB.next = nodeA.next\n\t\t\tdlist.first = nodeB\n\n\t\t\tnodeBPre.next = nodeA\n\t\t\tnodeA.prev = nodeBPre\n\t\t\tnodeA.next = nil\n\t\t\tdlist.last = nodeA\n\t\t} else { // swap common node for first node\n\t\t\tnodeANext := nodeA.next\n\n\t\t\tnodeB.prev.next = nodeA\n\t\t\tnodeB.next.prev = nodeA\n\t\t\tnodeA.prev = nodeB.prev\n\t\t\tnodeA.next = nodeB.next\n\n\t\t\tnodeANext.prev = nodeB\n\t\t\tnodeB.next = nodeANext\n\t\t\tnodeB.prev = nil\n\t\t\tdlist.first = nodeB\n\t\t}\n\t} else {\n\t\tif reflect.DeepEqual(nodeB, dlist.last) == true { // swap last node for commom node\n\t\t\tnodeBPre := nodeB.prev\n\n\t\t\tnodeA.prev.next = nodeB\n\t\t\tnodeA.next.prev = nodeB\n\t\t\tnodeB.prev = nodeA.prev\n\t\t\tnodeB.next = nodeA.next\n\n\t\t\tnodeBPre.next = nodeA\n\t\t\tnodeA.prev = nodeBPre\n\t\t\tnodeA.next = nil\n\t\t\tdlist.last = nodeA\n\n\n\t\t} else { // swap common node for commom node\n\t\t\tnodeBPre := nodeB.prev\n\t\t\tnodeBNext := nodeB.next\n\n\t\t\tnodeA.prev.next = nodeB\n\t\t\tnodeA.next.prev = nodeB\n\t\t\tnodeB.prev = nodeA.prev\n\t\t\tnodeB.next = nodeA.next\n\n\t\t\tnodeBPre.next = nodeA\n\t\t\tnodeBNext.prev = nodeA\n\t\t\tnodeA.prev = nodeBPre\n\t\t\tnodeA.next = nodeBNext\n\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "be9d03a7e60a48d4638149813482d5d8", "score": "0.63134575", "text": "func swapgs()", "title": "" }, { "docid": "d44d3a334652e65a20861243d06b7e7e", "score": "0.6270585", "text": "func (r *Ring) Swap(a, b int) error {\n\taprev := r.nodes[a].prev\n\tbprev := r.nodes[b].prev\n\n\tif anyIsInvalid(a, b, aprev, bprev) {\n\t\treturn ErrInvalidOperationOnDetachedNode\n\t}\n\n\tr.Detach(a)\n\tr.Detach(b)\n\n\tvar err error\n\tif aprev == b {\n\t\terr = r.InsertAfter(a, bprev)\n\t\terr = r.InsertAfter(b, a)\n\t} else if bprev == a {\n\t\terr = r.InsertAfter(b, aprev)\n\t\terr = r.InsertAfter(a, b)\n\t} else {\n\t\terr = r.InsertAfter(a, bprev)\n\t\terr = r.InsertAfter(b, aprev)\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "d518239c2898f533a817a3bfcf2edc49", "score": "0.61781704", "text": "func (nq *NodeQueue) Swap(i, j int) {\n\tnq.nodes[i], nq.nodes[j] = nq.nodes[j], nq.nodes[i]\n}", "title": "" }, { "docid": "5e85f56b36b50aed9ff988d5f68ac579", "score": "0.6082779", "text": "func (t SwapTable) Swap(node Node) Node {\n\tif t == nil {\n\t\treturn node\n\t}\n\tif new, swapped := t[node]; swapped {\n\t\treturn new\n\t}\n\treturn node\n}", "title": "" }, { "docid": "e0d1ed8417d1c0a9f5175e23981ec89f", "score": "0.60202193", "text": "func (pq *PriorityQueue) swap(i, j int) {\n\n\t// Swap the reverse map\n\tpq.revMap[pq.data[i]], pq.revMap[pq.data[j]] = pq.revMap[pq.data[j]], pq.revMap[pq.data[i]]\n\n\t// Swap the actual data\n\tpq.data[i], pq.data[j] = pq.data[j], pq.data[i]\n\n}", "title": "" }, { "docid": "66aeb903079ebd514f6293a965df5695", "score": "0.60050493", "text": "func (s SingleLinkage) Swap(first, second distance.Distance) {\n\tfor fc, f := range first.Points {\n\t\ts, ok := second.Points[fc]\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tfirst.Points[fc] = math.Min(f, s)\n\t}\n}", "title": "" }, { "docid": "67f79f7bbaadbc3fbaafeb2d7f206939", "score": "0.59569985", "text": "func (g *Point) Swap(g2 *Point) {\n\t*g, *g2 = *g2, *g\n}", "title": "" }, { "docid": "ad4434adac6aa37dad842b1479818049", "score": "0.59560335", "text": "func (s *nodeSorter) Swap(i, j int) {\n\ts.nodes[i], s.nodes[j] = s.nodes[j], s.nodes[i]\n}", "title": "" }, { "docid": "6c9f231f5ca63a814599a63a2b8f7f8d", "score": "0.59017843", "text": "func (l *Link) Swap() error {\n\tvar tmpLink Link\n\n\terr := db.QueryRow(\"SELECT fromPortalID, toPortalID FROM link WHERE opID = ? AND ID = ?\", l.opID, l.ID).Scan(&tmpLink.From, &tmpLink.To)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\treturn err\n\t}\n\n\t_, err = db.Exec(\"UPDATE link SET fromPortalID = ?, toPortalID = ? WHERE ID = ? and opID = ?\", tmpLink.To, tmpLink.From, l.ID, l.opID)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8ac0f70b4c85d4786b3867181f3c2269", "score": "0.5883342", "text": "func (h nodeList) Swap(i, j int) {\n\th[i], h[j] = h[j], h[i]\n}", "title": "" }, { "docid": "03c7b1449023a738100c0be6b4eb85ab", "score": "0.5864243", "text": "func (p MapSFList) Swap(i, j int) { p[i], p[j] = p[j], p[i] }", "title": "" }, { "docid": "c914a0088b9c2603a91cf84fa5667f38", "score": "0.5839924", "text": "func (b *Board) Swap(p1, p2 Position) {\n\tv1 := b.At(p1.Row, p1.Col)\n\tv2 := b.At(p2.Row, p2.Col)\n\n\tupdateCache := func(sign int) {\n\t\tif b.nearnessCache == 0 {\n\t\t\treturn\n\t\t}\n\t\tvs := []Position{*v1, *v2}\n\t\tfor pointIdx, p := range []Position{p1, p2} {\n\t\t\tv := vs[pointIdx]\n\t\t\tfor i := 0; i < b.Size; i++ {\n\t\t\t\tfor j := 0; j < b.Size; j++ {\n\t\t\t\t\tp1 := Position{Row: i, Col: j}\n\t\t\t\t\tv1 := *b.At(i, j)\n\t\t\t\t\tb.nearnessCache += sign * b.Distance(p, p1) * b.Distance(v, v1)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tupdateCache(-1)\n\t*v1, *v2 = *v2, *v1\n\tupdateCache(1)\n}", "title": "" }, { "docid": "1092878db1505534496897510a3b5c51", "score": "0.5834335", "text": "func (partyMap PartyMap) Swap(i, j int) {\n\tpartyMap.TurnPoints[i], partyMap.TurnPoints[j] = partyMap.TurnPoints[j], partyMap.TurnPoints[i]\n}", "title": "" }, { "docid": "4a51c4f92446886a0724d5d86abad91f", "score": "0.5832985", "text": "func (r *Ring) TwoOptSwap(n, target int) error {\n\toldNext := r.nodes[n].next\n\toldTargetNext := r.nodes[target].next\n\n\tif anyIsInvalid(n, target, oldNext, oldTargetNext) {\n\t\treturn ErrInvalidOperationOnDetachedNode\n\t}\n\n\t// disconnect\n\tr.nodes[oldNext].prev = -1\n\tr.nodes[oldTargetNext].prev = -1\n\n\t// connect with new one\n\tr.nodes[n].next = target\n\n\t// reverse the direction. this will loop until we hit oldNext\n\tvar old = n\n\tvar current = target\n\tfor current != -1 {\n\t\tvar oldPrev = r.nodes[current].prev\n\t\tr.nodes[current].prev = old\n\t\tr.nodes[current].next = oldPrev\n\n\t\told = current\n\t\tcurrent = oldPrev\n\t}\n\n\tr.nodes[oldNext].next = oldTargetNext\n\tr.nodes[oldTargetNext].prev = oldNext\n\n\treturn nil\n}", "title": "" }, { "docid": "8255168c48b71396ea9619badc5d4bd7", "score": "0.5825487", "text": "func (s LeafList) Swap(i, j int) {\n\ts[i], s[j] = s[j], s[i]\n}", "title": "" }, { "docid": "8255168c48b71396ea9619badc5d4bd7", "score": "0.5825487", "text": "func (s LeafList) Swap(i, j int) {\n\ts[i], s[j] = s[j], s[i]\n}", "title": "" }, { "docid": "061e6a8f1c8f32a702ad91fdddc8225b", "score": "0.5814982", "text": "func (s *NodeState) Swap(new NodeState) NodeState {\n\treturn NodeState(atomic.SwapInt32((*int32)(s), int32(new)))\n}", "title": "" }, { "docid": "56fb4646a281bf0c850024939443a5f5", "score": "0.57891876", "text": "func (c ChildNodes) Swap(i, j int) {\n\tc[i], c[j] = c[j], c[i]\n}", "title": "" }, { "docid": "994f9066b0f4f7e8d69909194ece4958", "score": "0.5769294", "text": "func (l TargetNodeList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }", "title": "" }, { "docid": "b78d82570f91b18b66f6c65d47648dd6", "score": "0.57244617", "text": "func (l *list) Swap(i, j int) {\n\tni, ici := l.getNode(i)\n\tnj, icj := l.getNode(j)\n\n\tif ni != nil && nj != nil {\n\t\tni.values[ici], nj.values[icj] = nj.values[icj], ni.values[ici]\n\t}\n}", "title": "" }, { "docid": "75510bb5797c62c95fa278ee884b5d8a", "score": "0.57239974", "text": "func (_Main *MainTransactor) Swap(opts *bind.TransactOpts, amount0Out *big.Int, amount1Out *big.Int, to common.Address, data []byte) (*types.Transaction, error) {\n\treturn _Main.contract.Transact(opts, \"swap\", amount0Out, amount1Out, to, data)\n}", "title": "" }, { "docid": "47943a00f7d4e38dae425bf5417cd594", "score": "0.5674256", "text": "func (dn *dancingNode) relinkLeftRight() {\n\tdn.left.right = dn\n\tdn.right.left = dn\n}", "title": "" }, { "docid": "22ffe05f6195c375fb68ece8b8c83819", "score": "0.56683165", "text": "func swap(n1 *int, n2 *int){\n\ttemp :=*n1\n\t*n1 = *n2\n\t*n2 = temp\n}", "title": "" }, { "docid": "3d4ef8eddb2748474f1f586474bbcd86", "score": "0.5659139", "text": "func (h NodeHeap) Swap(i, j int) {\n\th[i], h[j] = h[j], h[i]\n}", "title": "" }, { "docid": "690fb5ed4ab1399cd1d8fbc67e19e55f", "score": "0.5645169", "text": "func (_MStable *MStableTransactor) Swap(opts *bind.TransactOpts, _input common.Address, _output common.Address, _inputQuantity *big.Int, _minOutputQuantity *big.Int, _recipient common.Address) (*types.Transaction, error) {\n\treturn _MStable.contract.Transact(opts, \"swap\", _input, _output, _inputQuantity, _minOutputQuantity, _recipient)\n}", "title": "" }, { "docid": "d626be7b11f0da243272b30a3f9ed52f", "score": "0.5634098", "text": "func (ts *TopicStore) Swap(i, j int) {\n\tts.index[i], ts.index[j] = ts.index[j], ts.index[i]\n}", "title": "" }, { "docid": "af610c203e7da7e528bea727039bebf4", "score": "0.55827236", "text": "func (wf *OLTPSpanner) atomicSwap(r *rand.Rand) error {\n\t// This should be both valid and random, hence we need to know the range of valid\n\t// identifiers within the table.\n\tupdateID := datagen.RandomGeneratedTransactionID(r)\n\t_, err := wf.client.ReadWriteTransaction(wf.ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error {\n\t\trow, err := txn.ReadRow(\n\t\t\twf.ctx,\n\t\t\tdatagen.TransactionTableName,\n\t\t\tspanner.Key{updateID},\n\t\t\t[]string{datagen.TransactionFromUserColumn, datagen.TransactionToUserColumn})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tvar fromUserID, toUserID int64\n\t\tif err := row.Columns(&fromUserID, &toUserID); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Swapping the user IDs guarantees that referential integrity is maintained.\n\t\tmutation := spanner.UpdateMap(datagen.TransactionTableName, map[string]interface{}{\n\t\t\t\"id\": updateID,\n\t\t\t\"fromUserId\": toUserID,\n\t\t\t\"toUserId\": fromUserID,\n\t\t\t\"time\": spanner.CommitTimestamp,\n\t\t})\n\t\treturn txn.BufferWrite([]*spanner.Mutation{mutation})\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f330476eb9ef3fc72a39a99cf8230567", "score": "0.5563262", "text": "func (recon *MultishareReconciler) fixTwoWayPointers(shareInfos map[string]*v1.ShareInfo, instanceInfos map[string]*v1.InstanceInfo) {\n\tfor _, instanceInfo := range instanceInfos {\n\t\tif instanceInfo.Status == nil {\n\t\t\tklog.V(6).Infof(\"Instance %q has Status nil\", instanceInfo.Name)\n\t\t\tcontinue\n\t\t}\n\n\t\tinstanceInfoClone := instanceInfo.DeepCopy()\n\t\tupdated := false\n\n\t\tinstanceURI := util.InstanceInfoNameToInstanceURI(instanceInfo.Name)\n\t\tfor _, shareName := range instanceInfo.Status.ShareNames {\n\t\t\tshareInfo, ok := shareInfos[shareName]\n\t\t\tif !ok {\n\t\t\t\tklog.Errorf(\"Share %q is assigned to instance %q but shareInfo does not exist\", shareName, instanceURI)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif shareInfo.Status == nil || shareInfo.Status.InstanceHandle == \"\" {\n\t\t\t\tshareInfo, err := recon.assignInstanceToShareInfo(shareInfo, instanceURI)\n\t\t\t\tif err != nil {\n\t\t\t\t\tklog.Errorf(\"Cannot update instanceHandle to shareInfo %q: %v\", shareName, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tshareInfos[shareName] = shareInfo\n\t\t\t} else if shareInfo.Status.InstanceHandle != instanceURI {\n\t\t\t\t// This case should be rare, however, if there's a race or crash between shareInfo update and instanceInfo update, the 2 way pointer may not match.\n\t\t\t\tklog.Warningf(\"InstanceInfo %q has share %q but its shareInfo points to instance %q\", instanceInfo.Name, shareName, shareInfo.Status.InstanceHandle)\n\n\t\t\t\t// If the share is already marked for deletion, don't try to add it to the instanceInfo it points to.\n\t\t\t\tif shareInfo.DeletionTimestamp == nil {\n\t\t\t\t\tklog.Infof(\"Deletion Timestamp not set on shareInfo %q, trying to update instanceInfo %q\", shareName, shareInfo.Status.InstanceHandle)\n\t\t\t\t\tactualAssigned, ok := instanceInfos[shareInfo.Status.InstanceHandle]\n\t\t\t\t\tvar err error\n\t\t\t\t\tif !ok {\n\t\t\t\t\t\tklog.Errorf(\"Share %q is assigned to instance %q but instanceInfo does not exist. Trying to create one\", shareName, shareInfo.Status.InstanceHandle)\n\t\t\t\t\t\tactualAssigned, err = recon.generateInstanceInfo(shareInfo.Status.InstanceHandle, shareInfo.Spec.InstancePoolTag, shareInfo.Spec.Parameters)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tklog.Errorf(\"Failed to create instanceInfo %q: %v\", shareInfo.Status.InstanceHandle, err)\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tactualAssigned, err = recon.assignShareToInstanceInfo(actualAssigned, shareName)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tklog.Errorf(\"Failed to assign share %q to instanceInfo %q: %v\", shareName, shareInfo.Status.InstanceHandle, err)\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\n\t\t\t\t\tinstanceInfos[shareInfo.Status.InstanceHandle] = actualAssigned\n\t\t\t\t}\n\n\t\t\t\tinstanceInfoClone, updated = recon.removeShareFromInstanceInfo(instanceInfoClone, shareName)\n\t\t\t}\n\t\t}\n\n\t\tvar err error\n\t\tif updated {\n\t\t\tklog.Infof(\"InstanceInfo %q has updated share assignment, trying to update object\", instanceInfoClone.Name)\n\t\t\tinstanceInfoClone, err = recon.updateInstanceInfoStatus(context.TODO(), instanceInfoClone)\n\t\t\tif err != nil {\n\t\t\t\tklog.Errorf(\"Failed to update status subresource of instanceInfo %q: %v\", instanceInfo.Name, err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tinstanceInfos[instanceURI] = instanceInfoClone\n\t\t}\n\t}\n}", "title": "" }, { "docid": "7405690763105349ff9a0bdac64e2835", "score": "0.5549661", "text": "func (graph *townGraph) set(a, b town, dist townDistance) {\n\tif a == b {\n\t\treturn\n\t}\n\tif a > b {\n\t\ta, b = b, a\n\t}\n\tgraph.links[townPair{a, b}] = dist\n\twriteA, writeB := true, true\n\tfor _, v := range graph.towns {\n\t\tif v == a {\n\t\t\twriteA = false\n\t\t}\n\t\tif v == b {\n\t\t\twriteB = false\n\t\t}\n\t}\n\tif writeA {\n\t\tgraph.towns = append(graph.towns, a)\n\t}\n\tif writeB {\n\t\tgraph.towns = append(graph.towns, b)\n\t}\n}", "title": "" }, { "docid": "184a68979f1cfa6b67116f2d7ff57219", "score": "0.5549579", "text": "func (nns neighbors) Swap(i, j int) {\n\tnns[i], nns[j] = nns[j], nns[i]\n}", "title": "" }, { "docid": "1fd88d2b7f5e5fff38dacbd55cc6c8b1", "score": "0.5549358", "text": "func swap(m1, m2 *int) {\n\tvar temp int\n\ttemp = *m2\n\t*m2 = *m1\n\t*m1 = temp\n}", "title": "" }, { "docid": "1fd88d2b7f5e5fff38dacbd55cc6c8b1", "score": "0.5549358", "text": "func swap(m1, m2 *int) {\n\tvar temp int\n\ttemp = *m2\n\t*m2 = *m1\n\t*m1 = temp\n}", "title": "" }, { "docid": "38f7c158dcdfbc1bd77b8453e45d0049", "score": "0.5541265", "text": "func (l TargetEdgeList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }", "title": "" }, { "docid": "28910cd0d2db35a3f93880482237a5d0", "score": "0.551741", "text": "func swap(m *Matrix, a int, b int) {\n row1 := make([]float64, len(m.rows[a]));\n row2 := make([]float64, len(m.rows[b]));\n copy(row1, m.rows[a]);\n copy(row2, m.rows[b]);\n m.rows[b] = row1;\n m.rows[a] = row2;\n}", "title": "" }, { "docid": "61329012fd85c4d3dccdf665635b5532", "score": "0.54844826", "text": "func swap(x *int, y *int) {\n\ttmp := *x\n\t*x = *y\n\t*y = tmp\n}", "title": "" }, { "docid": "1a7ea221ec93a8df4e03c0a9e0049ee0", "score": "0.5472147", "text": "func (th *Thread) Swap() {\n\tth.stack[th.sp-1], th.stack[th.sp-2] = th.stack[th.sp-2], th.stack[th.sp-1]\n}", "title": "" }, { "docid": "651372db6670ceac1506b4e682a4f921", "score": "0.54700494", "text": "func (knnHeap *KNNHeap) Swap(i, j int) {\n\tknnHeap.SparseVector.Swap(i, j)\n\tknnHeap.Similarities[i], knnHeap.Similarities[j] = knnHeap.Similarities[j], knnHeap.Similarities[i]\n}", "title": "" }, { "docid": "1db9791457ebb44cb28bebd70a1e7caa", "score": "0.546876", "text": "func (_V1 *V1Transactor) Swap(opts *bind.TransactOpts, swapper1 common.Address, swapper2 common.Address, val *big.Int, forth []common.Address, back []common.Address) (*types.Transaction, error) {\n\treturn _V1.contract.Transact(opts, \"swap\", swapper1, swapper2, val, forth, back)\n}", "title": "" }, { "docid": "78c4321afa31b75deb4188c00daacdce", "score": "0.54550827", "text": "func (t *STree) update() {\n\toldr := t.root\n\n\t// (s, (start, end)) is the canonical reference pair for the active point\n\ts := t.s\n\tstart, end := t.start, t.end\n\tvar r *state\n\tfor {\n\t\tvar endPoint bool\n\t\tr, endPoint = t.testAndSplit(s, start, end-1)\n\t\tif endPoint {\n\t\t\tbreak\n\t\t}\n\t\tr.fork(end)\n\t\tif oldr != t.root {\n\t\t\toldr.linkState = r\n\t\t}\n\t\toldr = r\n\t\ts, start = t.canonize(s.linkState, start, end-1)\n\t}\n\tif oldr != t.root {\n\t\toldr.linkState = r\n\t}\n\n\t// update active point\n\tt.s = s\n\tt.start = start\n}", "title": "" }, { "docid": "1ea0d8dd1037f9c456cca0a4ba9cec1e", "score": "0.54550105", "text": "func (_IPancakePair *IPancakePairTransactor) Swap(opts *bind.TransactOpts, amount0Out *big.Int, amount1Out *big.Int, to common.Address, data []byte) (*types.Transaction, error) {\n\treturn _IPancakePair.contract.Transact(opts, \"swap\", amount0Out, amount1Out, to, data)\n}", "title": "" }, { "docid": "fdeb6dc39a076aa506eaafb65236c5e3", "score": "0.54493845", "text": "func (q *Queen) swapTwo() {\n\tfirst := q.randInt()\n\tsecond := q.randInt()\n\n\tq.board[first], q.board[second] = q.board[second], q.board[first]\n}", "title": "" }, { "docid": "800b8fac1478fc6c66243b596ad0b37c", "score": "0.5438517", "text": "func (s *MsgHeap) swap(i, j int) {\n\td := s.data\n\td[i], d[j] = d[j], d[i]\n\ts.index[d[i].SerialNumber], s.index[d[j].SerialNumber] = i, j\n}", "title": "" }, { "docid": "21ae83740e5e6dc3c1155d52de903ad3", "score": "0.5384157", "text": "func (t Torrents) Swap(i, j int) { t[i], t[j] = t[j], t[i] }", "title": "" }, { "docid": "e1cca810ef6a3363bec2f020a6545008", "score": "0.5382522", "text": "func (net *Network) xfer_maps( old_net *Network ) {\n\tnet.vm2ip = old_net.vm2ip\n\tnet.ip2vm = old_net.ip2vm\n\tnet.vmid2ip = old_net.vmid2ip\n\tnet.ip2vmid = old_net.ip2vmid\n\tnet.vmid2phost = old_net.vmid2phost\t\n\tnet.vmip2gw = old_net.vmip2gw\n\tnet.ip2mac = old_net.ip2mac\n\tnet.mac2phost = old_net.mac2phost\n\tnet.gwmap = old_net.gwmap\n\tnet.fip2ip = old_net.fip2ip\n\tnet.ip2fip = old_net.ip2fip\n\tnet.limits = old_net.limits\n}", "title": "" }, { "docid": "b691bab02f72c65bd325b4ac341e40ef", "score": "0.5371824", "text": "func (c *ChangeValue) swap(newTarget *reflect.Value) {\n\tif newTarget.IsValid() {\n\t\tc.ClearFlag(FlagInvalidTarget)\n\t\tc.parent = c.target\n\t\tc.target = newTarget\n\t\tc.pos++\n\t}\n}", "title": "" }, { "docid": "7e5e8979fb16cad081a41fc7c4341ee8", "score": "0.53705794", "text": "func swap(array Stores, to int, from int) {\n\tstore := array[to]\n\tarray[to] = array[from]\n\tarray[from] = store\n}", "title": "" }, { "docid": "435e926b52516e2fe11f4fbf3520a4fe", "score": "0.53605294", "text": "func mergeMapNodes(dst, src *yaml.Node, handler keyExistsHandler) error {\n\tif src.IsZero() {\n\t\treturn nil\n\t}\n\n\tif dst.IsZero() {\n\t\t*dst = *src\n\t\treturn nil\n\t}\n\n\tdstMap := mappingNode(dst)\n\tvar newContent []*yaml.Node\n\tfor _, srcContent := range mappingContents(src) {\n\t\tsrcKey := srcContent.keyNode.Value\n\n\t\tdstValueNode, ok := dstMap[srcKey]\n\t\tif !ok {\n\t\t\t// The key doesn't exist in dst, we want to retain the two src nodes.\n\t\t\tnewContent = append(newContent, srcContent.keyNode, srcContent.valueNode)\n\t\t\tcontinue\n\t\t}\n\n\t\tif err := handler(srcKey, dstValueNode, srcContent.valueNode); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tdst.Content = append(dst.Content, newContent...)\n\treturn nil\n}", "title": "" }, { "docid": "200927350ccf67eab77a64e2f35a11b2", "score": "0.53584313", "text": "func (h *BHeap) swap(i, j int) {\n\tii, jj := h.get(i), h.get(j)\n\th.set(i, jj)\n\th.set(j, ii)\n}", "title": "" }, { "docid": "1152cec0faa20ba29f51e73784b66742", "score": "0.53518015", "text": "func (_MStable *MStableTransactorSession) Swap(_input common.Address, _output common.Address, _inputQuantity *big.Int, _minOutputQuantity *big.Int, _recipient common.Address) (*types.Transaction, error) {\n\treturn _MStable.Contract.Swap(&_MStable.TransactOpts, _input, _output, _inputQuantity, _minOutputQuantity, _recipient)\n}", "title": "" }, { "docid": "73dc36e62980e2eebf231750985b0a58", "score": "0.5349073", "text": "func swap(v1 *string, v2 *string) {\n\tpointerv1 := *v1\n\t*v1 = *v2\n\t*v2 = pointerv1\n}", "title": "" }, { "docid": "9e6337d7c018b044d95b61c696a64ae8", "score": "0.5348888", "text": "func swapPairs(head *ListNode) *ListNode {\n\t// Put a phantom node before the start so we don't have to do any special handling\n\t// for the first pair of nodes\n\tphantomNode := ListNode{0, head}\n\tleft, right := &phantomNode, head\n\n\t// Use two pointers and a six-step cycle to iterate through the list and swap each\n\t// pair of two nodes.\n\tfor left.Next != nil && right.Next != nil {\n\t\tleft.Next = right.Next\n\t\tleft = right\n\t\tright = left.Next\n\t\tleft.Next = right.Next\n\t\tright.Next = left\n\t\tright = left.Next\n\t}\n\n\treturn phantomNode.Next\n}", "title": "" }, { "docid": "743f557f78d666f472966c70974c93e9", "score": "0.5322817", "text": "func (heap *Heap) exchange(left, right int) {\n\tlist := heap.list\n\tlValue := list.ElementAt(left)\n\trValue := list.ElementAt(right)\n\tlist.SetElementAt(left, rValue)\n\tlist.SetElementAt(right, lValue)\n}", "title": "" }, { "docid": "3107cbbdaf9dee4005b4d750c718d6a5", "score": "0.5312908", "text": "func (gepcsl GatewayEndPointCacheStatusList) Swap(i, j int) {\n\tgepcsl[i], gepcsl[j] = gepcsl[j], gepcsl[i]\n}", "title": "" }, { "docid": "3107cbbdaf9dee4005b4d750c718d6a5", "score": "0.5312908", "text": "func (gepcsl GatewayEndPointCacheStatusList) Swap(i, j int) {\n\tgepcsl[i], gepcsl[j] = gepcsl[j], gepcsl[i]\n}", "title": "" }, { "docid": "097c8dc13f42f311b6e6a3ead5a4abb7", "score": "0.53125757", "text": "func swapNodes(indexes [][]int32, queries []int32) [][]int32 {\n\t/*\n\t * Write your code here.\n\t */\n\ttreeNodes := make([]TreeNode, len(indexes)+1)\n\tfor idx, index := range indexes {\n\t\ttreeNodes[idx+1] = TreeNode{\n\t\t\tValue: int32(idx) + 1,\n\t\t\tLeftIndex: index[0],\n\t\t\tRightIndex: index[1],\n\t\t}\n\t}\n\tfor i := 1; i < len(treeNodes); i++ {\n\t\tnode := &treeNodes[i]\n\t\tif node.LeftIndex != -1 {\n\t\t\tnode.Left = &treeNodes[node.LeftIndex]\n\t\t}\n\t\tif node.RightIndex != -1 {\n\t\t\tnode.Right = &treeNodes[node.RightIndex]\n\t\t}\n\t}\n\tbuildHeight(&treeNodes[1], 1)\n\tans := make([][]int32, len(queries))\n\tfor idx, query := range queries {\n\t\tswap(&treeNodes[1], query)\n\t\tret := make([]int32, len(indexes))\n\t\tinorder(&treeNodes[1], ret, 0)\n\t\tans[idx] = ret\n\t}\n\treturn ans\n}", "title": "" }, { "docid": "4a18ea1680decda423fd8cc58c0eecbc", "score": "0.53048897", "text": "func Swap(from, to string) error {\n\tif out, err := execCommand(ipsetPath, _swap, from, to).\n\t\tCombinedOutput(); err != nil {\n\t\treturn fmt.Errorf(\"ipset: can't swap from %s to %s: %s\", from, to, out)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c1bb1278ca1f04e2f5d95e354652cdac", "score": "0.53029644", "text": "func (a *Access) SwapKeys() {\n\ta.PrimaryCurrent = !a.PrimaryCurrent\n}", "title": "" }, { "docid": "a0c0185eaff01c0d8cc4689fa264e67e", "score": "0.52797675", "text": "func swap (left,right string) (string,string){\n\treturn right,left\n}", "title": "" }, { "docid": "65aea788c74cc715b571faaff1a89e3d", "score": "0.5278903", "text": "func (s Transactions) Swap(i, j int) { s[i], s[j] = s[j], s[i] }", "title": "" }, { "docid": "65aea788c74cc715b571faaff1a89e3d", "score": "0.5278903", "text": "func (s Transactions) Swap(i, j int) { s[i], s[j] = s[j], s[i] }", "title": "" }, { "docid": "772130c2adb691fcd37213a95ce0b1f0", "score": "0.526762", "text": "func swap(data [][]byte, a, b int) {\n temp := data[a]\n data[a] = data[b]\n data[b] = temp\n}", "title": "" }, { "docid": "8bdbff63dce19f0c764b1fe632c3e693", "score": "0.52655554", "text": "func (_MStable *MStableSession) Swap(_input common.Address, _output common.Address, _inputQuantity *big.Int, _minOutputQuantity *big.Int, _recipient common.Address) (*types.Transaction, error) {\n\treturn _MStable.Contract.Swap(&_MStable.TransactOpts, _input, _output, _inputQuantity, _minOutputQuantity, _recipient)\n}", "title": "" }, { "docid": "c89b602484f768dd48a5ef418c5ba154", "score": "0.52614504", "text": "func (dhtNode *DHTNode) reconnNodes(msg *Msg) {\n\tswitch msg.Type {\n\tcase \"succ\":\n\t\tif msg.Src == msg.Origin {\n\t\t\tdhtNode.succ[0] = msg.Key\n\t\t\tdhtNode.succ[1] = msg.Src\n\t\t} else {\n\t\t\tdhtNode.succ[0] = msg.Key\n\t\t\tdhtNode.succ[1] = msg.Origin\n\t\t}\n\t\tdhtNode.succ[0] = msg.Key\n\t}\n}", "title": "" }, { "docid": "01430c71518205b04d47830e5e8e30a6", "score": "0.52552307", "text": "func (s *Scorer) SwapModels() { s.active = 1-s.active }", "title": "" }, { "docid": "afefe559f6e90d17d1aa4be0ab6607de", "score": "0.52494764", "text": "func (s state) updateNodes(newNodes nodes) {\n\tfor _, nn := range newNodes {\n\t\ton, exists := s.Nodes[nn.NodeId]\n\t\tif !exists {\n\t\t\tif debugging {\n\t\t\t\tfmt.Printf(\"Learned about new node %v\\n\", nn.NodeId)\n\t\t\t}\n\t\t\ts.Nodes[nn.NodeId] = nn\n\t\t} else {\n\t\t\tif debugging {\n\t\t\t\tfmt.Printf(\"Updating any stale info we had on node %v\\n\", nn.NodeId)\n\t\t\t\tfmt.Printf(\"Updating any stale info we had on old node %v to new %v\\n\", on, nn)\n\t\t\t}\n\t\t\tif on.Alias != \"\" && nn.Alias == \"\" {\n\t\t\t\t// Preserve alias if we knew it.\n\t\t\t\tnn.Alias = on.Alias\n\t\t\t}\n\t\t\tif on.Color != \"\" && nn.Color == \"\" {\n\t\t\t\t// Preserve color if we knew it.\n\t\t\t\tnn.Color = on.Color\n\t\t\t}\n\t\t\tif on.isPeer {\n\t\t\t\t// Note: We avoid marking peers as non-peers when listnodes returns with isPeer=false. This could be less hacky.\n\t\t\t\tnn.isPeer = true\n\t\t\t\tnn.Connected = on.Connected\n\t\t\t\tif len(nn.Channels) == 0 && len(on.Channels) > 0 {\n\t\t\t\t\tnn.Channels = on.Channels\n\t\t\t\t}\n\t\t\t}\n\t\t\t// TODO: may or may not want to update on.Addresses.\n\t\t\tif on.LastTimestamp.Time().After(nn.LastTimestamp.Time()) {\n\t\t\t\tnn.LastTimestamp = on.LastTimestamp\n\t\t\t}\n\t\t\ts.Nodes[nn.NodeId] = nn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "a26e4a18ff0a7c2a472cc5450e4ed37e", "score": "0.52455354", "text": "func (dn *dancingNode) relinkUpDown() {\n\tdn.up.down = dn\n\tdn.down.up = dn\n}", "title": "" }, { "docid": "76796b0c94a2b189a6115aa4c673faeb", "score": "0.52394676", "text": "func (n *node) restore() {\n\tif math.Abs(float64(n.left.height-n.right.height)) <= 1 {\n\t\tn.restoreHeight()\n\t\treturn\n\t}\n\tif n.left.height > n.right.height {\n\t\tk1, v1, l1, r1 := n.Key, n.Value, n.left, n.right;\n\t\tk2, v2, l2, r2 := l1.Key, l1.Value, l1.left, l1.right;\n\t\tif l2.height >= r2.height {\n\t\t\tn.Key, n.Value, n.left, n.right = k2, v2, l2, &node{k1, v1, r2, r1, 1}\n\t\t\tn.right.restoreHeight()\n\t\t} else {\n\t\t\tk3, v3, l3, r3 := r2.Key, r2.Value, r2.left, r2.right;\n\t\t\tn.Key, n.Value, n.left, n.right = k3, v3, &node{k2, v2, l2, l3, 1}, &node{k1, v1, r3, r1, 1}\n\t\t\tn.left.restoreHeight()\n\t\t\tn.right.restoreHeight()\n\t\t}\n\t} else if n.right.height > n.left.height {\n\t\tk1, v1, l1, r1 := n.Key, n.Value, n.left, n.right\n\t\tk2, v2, l2, r2 := r1.Key, r1.Value, r1.left, r1.right;\n\t\tif r2.height >= l2.height {\n\t\t\tn.Key, n.Value, n.left, n.right = k2, v2, &node{k1, v1, l1, l2, 1}, r2\n\t\t\tn.left.restoreHeight()\n\t\t} else {\n\t\t\tk3, v3, l3, r3 := l2.Key, l2.Value, l2.left, l2.right\n\t\t\tn.Key, n.Value, n.left, n.right = k3, v3, &node{k1, v1, l1, l3, 1}, &node{k2, v2, r3, r2, 1}\n\t\t\tn.left.restoreHeight()\n\t\t\tn.right.restoreHeight()\n\t\t}\n\t}\n\tn.restoreHeight()\n}", "title": "" }, { "docid": "286371d5289d6c1a991ad596391e5080", "score": "0.52373403", "text": "func (xo VgmachineOrder) Swap(i, j int) {\n\txo.VgmachineSlice[i], xo.VgmachineSlice[j] = xo.VgmachineSlice[j], xo.VgmachineSlice[i]\n}", "title": "" }, { "docid": "c137423e1e101f824002e4339ec31882", "score": "0.5234969", "text": "func swap(cpu *CPU, _, r8 int) {\n\tb := cpu.Reg.R[r8]\n\tlower := b & 0b1111\n\tupper := b >> 4\n\tcpu.Reg.R[r8] = (lower << 4) | upper\n\n\tcpu.setF(flagZ, cpu.Reg.R[r8] == 0)\n\tcpu.setF(flagN, false)\n\tcpu.setF(flagH, false)\n\tcpu.setF(flagC, false)\n\tcpu.Reg.PC++\n}", "title": "" }, { "docid": "3a4cc65ab986c9fbd0f2d70909d6e059", "score": "0.52305543", "text": "func (eh edgeHeap) Swap(i, j int) {\n\teh.edges[i], eh.edges[j] = eh.edges[j], eh.edges[i]\n}", "title": "" }, { "docid": "1202b7f4a6399557a5dd4104154a056f", "score": "0.5225616", "text": "func Swap(xp *int, yp *int) {\n\ttemp := *xp\n\t*xp = *yp\n\t*yp = temp\n}", "title": "" }, { "docid": "6502364f7219edd49def3560c42f0dfe", "score": "0.52172244", "text": "func TestNodeCacheUnlinkThenRelink(t *testing.T) {\n\tid := tlf.FakeID(42, tlf.Private)\n\tbranch := data.BranchName(\"testBranch\")\n\tncs, _, childNode1, childNode2, _, path2 :=\n\t\tsetupNodeCache(t, id, branch, false)\n\tchildPtr2 := path2[2].BlockPointer\n\n\t// unlink child2\n\tundoFn := ncs.Unlink(\n\t\tchildPtr2.Ref(), ncs.PathFromNode(childNode2), data.DirEntry{})\n\tif undoFn == nil {\n\t\tt.Fatalf(\"Couldn't unlink\")\n\t}\n\n\tnewChildName := \"newChildName\"\n\tnewChildPtr2 := data.BlockPointer{ID: kbfsblock.FakeID(22)}\n\tncs.UpdatePointer(childPtr2.Ref(), newChildPtr2) // NO-OP\n\tchildNode2B, err := ncs.GetOrCreate(\n\t\tnewChildPtr2, testPPS(newChildName), childNode1, data.Dir)\n\tif err != nil {\n\t\tt.Fatalf(\"Couldn't relink node: %v\", err)\n\t}\n\tif childNode2.GetID() == childNode2B.GetID() {\n\t\tt.Errorf(\"Relink left the node the same\")\n\t}\n\n\t// Old unlinked node didn't get updated\n\tpath := ncs.PathFromNode(childNode2)\n\tcheckNodeCachePath(t, id, branch, path, path2)\n\n\t// New node\n\tpath = ncs.PathFromNode(childNode2B)\n\tpath2[2].BlockPointer = newChildPtr2\n\tpath2[2].Name = testPPS(newChildName)\n\tcheckNodeCachePath(t, id, branch, path, path2)\n\n\tif g, e := childNode2.GetBasename().Plaintext(), \"\"; g != e {\n\t\tt.Errorf(\"Expected basename %s, got %s\", e, g)\n\t}\n\tif g, e := childNode2B.GetBasename().Plaintext(), newChildName; g != e {\n\t\tt.Errorf(\"Expected basename %s, got %s\", e, g)\n\t}\n}", "title": "" }, { "docid": "c1b4d59986ef7626312376abd5b0ed23", "score": "0.5216967", "text": "func (p KeyProperties) Swap(a, b int) {\n\tp[a], p[b] = p[b], p[a]\n}", "title": "" }, { "docid": "fe4c3f2ce938167e2f04b758e5b80e6e", "score": "0.52160317", "text": "func reorder(gameState *GameState) {\n\tnext := (*gameState).mirror()\n\n\tmaybeReplace := func() {\n\t\tok := !isLess(*gameState, next)\n\t\tif ok {\n\t\t\t*gameState = next\n\t\t}\n\t}\n\n\tmaybeReplace()\n}", "title": "" }, { "docid": "32f018a0267333c69a1a858968a09c3e", "score": "0.5204909", "text": "func (d ByDistance) Swap(i, j int) { d[i], d[j] = d[j], d[i] }", "title": "" }, { "docid": "e500d9ddc2060b7485874f55c028768f", "score": "0.52024", "text": "func (cells CellHeap) Swap(i, j int) {\n\tcells[i], cells[j] = cells[j], cells[i]\n\tcells[i].index = i\n\tcells[j].index = j\n}", "title": "" }, { "docid": "8e48dc2158a0709de183b53aab69cc0f", "score": "0.5198669", "text": "func (_V1 *V1TransactorSession) Swap(swapper1 common.Address, swapper2 common.Address, val *big.Int, forth []common.Address, back []common.Address) (*types.Transaction, error) {\n\treturn _V1.Contract.Swap(&_V1.TransactOpts, swapper1, swapper2, val, forth, back)\n}", "title": "" }, { "docid": "15475f96d3920c616f0beb55e22b1147", "score": "0.5193939", "text": "func (e Edges) Swap(i, j int) { e[i], e[j] = e[j], e[i] }", "title": "" }, { "docid": "a9adbe6e58d5b5646c74e71eb10a0979", "score": "0.5189305", "text": "func (r *repoT) remapLocalIDs() (dvid.InstanceMap, dvid.VersionMap, error) {\n\tif manager == nil {\n\t\treturn nil, nil, ErrManagerNotInitialized\n\t}\n\tr.Lock()\n\tdefer r.Unlock()\n\n\t// Convert the transmitted local ids to this DVID server's local ids.\n\tmodifyManager := false\n\tinstanceMap := make(dvid.InstanceMap, len(r.data))\n\tfor dataname, dataservice := range r.data {\n\t\tinstanceID, err := manager.newInstanceID()\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\tinstanceMap[dataservice.InstanceID()] = instanceID\n\t\tr.data[dataname].SetInstanceID(instanceID)\n\t}\n\n\t// Pass 1 on DAG: copy the nodes with new ids\n\tnewNodes := make(map[dvid.VersionID]*nodeT, len(r.dag.nodes))\n\tversionMap := make(dvid.VersionMap, len(r.dag.nodes))\n\tfor oldVersionID, nodePtr := range r.dag.nodes {\n\t\t// keep the old uuid but get a new version id\n\t\tnewVersionID, err := manager.newVersionID(nodePtr.uuid, modifyManager)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\tversionMap[oldVersionID] = newVersionID\n\t\tnewNodes[newVersionID] = nodePtr\n\t}\n\n\t// Pass 2 on DAG: now that we know the version mapping, modify all nodes.\n\tfor _, nodePtr := range r.dag.nodes {\n\t\tnodePtr.version = versionMap[nodePtr.version]\n\t\tfor i, oldVersionID := range nodePtr.parents {\n\t\t\tnodePtr.parents[i] = versionMap[oldVersionID]\n\t\t}\n\t\tfor i, oldVersionID := range nodePtr.children {\n\t\t\tnodePtr.children[i] = versionMap[oldVersionID]\n\t\t}\n\t}\n\tr.dag.nodes = newNodes\n\treturn instanceMap, versionMap, nil\n}", "title": "" }, { "docid": "67d67332884bb97d913d5d84bbd65785", "score": "0.51873064", "text": "func (h *Heap) Swap(x, y int) {\n\t(*h)[x], (*h)[y] = (*h)[y], (*h)[x]\n}", "title": "" }, { "docid": "6736801fb4d946a9656bdb6d0a81df6a", "score": "0.5174307", "text": "func flip(h *Node) {\n\th.Black = !h.Black\n\th.Left.Black = !h.Left.Black\n\th.Right.Black = !h.Right.Black\n}", "title": "" }, { "docid": "70281b9430df207bf5ba4fe7486e4ed5", "score": "0.51731014", "text": "func (h *Heap) swap(i int, j int) error {\n\ttemp := h.data[i]\n\th.data[i] = h.data[j]\n\th.data[j] = temp\n\treturn nil\n}", "title": "" }, { "docid": "31484f334253de1803536756adfdf31a", "score": "0.5170189", "text": "func (pq dijkstraPriorityQueue) Swap(i, j int) {\n\tpq[i], pq[j] = pq[j], pq[i]\n\tpq[i].index = i\n\tpq[j].index = j\n}", "title": "" }, { "docid": "00a786520ff7d9b8d99dc9b42fa2f2f0", "score": "0.5151451", "text": "func (_Main *MainTransactorSession) Swap(amount0Out *big.Int, amount1Out *big.Int, to common.Address, data []byte) (*types.Transaction, error) {\n\treturn _Main.Contract.Swap(&_Main.TransactOpts, amount0Out, amount1Out, to, data)\n}", "title": "" }, { "docid": "6613804f62c06a129049920d14496b36", "score": "0.5144625", "text": "func (l LinksByIndex) Swap(i, j int) { l[i], l[j] = l[j], l[i] }", "title": "" }, { "docid": "2e769d492db6417b46e0fe8b3e4f914d", "score": "0.51438946", "text": "func swap(a, b *int) {\n\t*a, *b = *b, *a\n}", "title": "" }, { "docid": "648602748a2d4ce916ea7a4b125a0dab", "score": "0.5134355", "text": "func (h *Handle) Swap(s1, s2 *Set) error {\n\tif s1 == nil || s2 == nil {\n\t\treturn ErrorUnexpectedNil\n\t}\n\n\tif s1.Type != s2.Type {\n\t\treturn ErrorIncompatibleSwap\n\t}\n\n\tif !h.isOpen(h) {\n\t\treturn ErrorNotStarted\n\t}\n\n\tsets := &Ipset{[]*Set{s1, s2}}\n\t//\t_, err := io.WriteString(h, fmt.Sprintf(\"swap %s %s\\n\", s1.Name, s2.Name))\n\t_, err := io.WriteString(h, sets.Render(RenderSwap))\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to swap sets\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "6b1b056f9298c5aeab2069040067e072", "score": "0.5134152", "text": "func (d *Dense[T]) SwapInPlace(r1, r2 int) Matrix {\n\tif r1 < 0 || r1 >= d.shape[0] {\n\t\tpanic(\"mat: 'r1' argument out of range\")\n\t}\n\tif r2 < 0 || r2 >= d.shape[0] {\n\t\tpanic(\"mat: 'r2' argument out of range\")\n\t}\n\t// TODO: rewrite for better performance\n\tfor j := 0; j < d.shape[1]; j++ {\n\t\ta, b := r1*d.shape[1]+j, r2*d.shape[1]+j\n\t\td.data[a], d.data[b] = d.data[b], d.data[a]\n\t}\n\treturn d\n}", "title": "" }, { "docid": "e403f63e6a6bd3e055acdd314446c298", "score": "0.51336837", "text": "func (_Swap *SwapTransactor) Swap(opts *bind.TransactOpts, _ids []*big.Int, _tos []common.Address, _amounts []*big.Int, _signature []byte) (*types.Transaction, error) {\n\treturn _Swap.contract.Transact(opts, \"swap\", _ids, _tos, _amounts, _signature)\n}", "title": "" }, { "docid": "95136a4ebf547079afe3d814cc182951", "score": "0.51290137", "text": "func swap2(a, b int) (x, y int) {\n\tx, y = b, a\n\treturn\n}", "title": "" }, { "docid": "e6824770e393d81e46dbbf95dcb63031", "score": "0.51251084", "text": "func (ms *multiAddrSorter) Swap(i, j int) {\n\tms.addrs[i], ms.addrs[j] = ms.addrs[j], ms.addrs[i]\n}", "title": "" }, { "docid": "55918d07090f05cc44f49e45327050df", "score": "0.5123878", "text": "func (h *MaxHeap) swap(i1, i2 int) {\n\th.array[i1], h.array[i2] = h.array[i2], h.array[i1]\n}", "title": "" }, { "docid": "369e30b6b574af7981714cdaa1f69a50", "score": "0.51149696", "text": "func (t *Tree) Replace(old node.Node, new node.Node) error {\n\tif !t.HasNode(old.ID()) {\n\t\treturn fmt.Errorf(\"cannot replace node not in the Tree\")\n\t}\n\n\tif old.ID() == new.ID() {\n\t\t// the underlying objects may be different, but the ID's match. Simply track the new [already existing] node\n\t\t// and keep all existing relationships.\n\t\tt.nodes[new.ID()] = new\n\t\treturn nil\n\t}\n\n\t// add the new node\n\terr := t.addNode(new)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set the new node parent to the old node parent\n\tt.parent[new.ID()] = t.parent[old.ID()]\n\n\tfor cid := range t.children[old.ID()] {\n\t\t// replace the parent entry for each child\n\t\tt.parent[cid] = new\n\n\t\t// add child entries to the new node\n\t\tt.children[new.ID()][cid] = t.nodes[cid]\n\t}\n\n\t// replace the child entry for the old parents node\n\tdelete(t.children[t.parent[old.ID()].ID()], old.ID())\n\tt.children[t.parent[old.ID()].ID()][new.ID()] = new\n\n\t// remove the old node (if not already overwritten)\n\tif old.ID() != new.ID() {\n\t\tdelete(t.children, old.ID())\n\t\tdelete(t.nodes, old.ID())\n\t\tdelete(t.parent, old.ID())\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "71b3f1161aca0b5c3e183a03550aa901", "score": "0.5111073", "text": "func TestRaftNodesReplace(t *testing.T) {\n\ttx, cleanup := libtesting.NewTestNodeTx(t)\n\tdefer cleanup()\n\n\t_, err := tx.RaftNodeAdd(\"1.2.3.4:666\")\n\tif err != nil {\n\t\tt.Errorf(\"expected err to be nil: %v\", err)\n\t}\n\n\tnodes := []db.RaftNode{\n\t\t{ID: 2, Address: \"2.2.2.2:666\"},\n\t\t{ID: 3, Address: \"3.3.3.3:666\"},\n\t}\n\terr = tx.RaftNodesReplace(nodes)\n\tif err != nil {\n\t\tt.Errorf(\"expected err to be nil: %v\", err)\n\t}\n\n\tnewNodes, err := tx.RaftNodes()\n\tif err != nil {\n\t\tt.Errorf(\"expected err to be nil: %v\", err)\n\t}\n\tif expected, actual := nodes, newNodes; !reflect.DeepEqual(expected, actual) {\n\t\tt.Errorf(\"expected: %v, actual: %v\", expected, actual)\n\t}\n}", "title": "" }, { "docid": "91e4c329a24505d57974ebd8145ff4f5", "score": "0.5109614", "text": "func (b *Bucket) maintainNodes(elected []discover.NodeID) {\n\t// remake every time instead of delete\n\tb.bucket = make(map[int64][]discover.NodeID)\n\tfor _, v := range elected {\n\t\tb.bucketAdd(v)\n\t}\n}", "title": "" }, { "docid": "72b97a33186f219591248c8af3328d34", "score": "0.51066715", "text": "func (m mounts) Swap(i, j int) {\n\tm[i], m[j] = m[j], m[i]\n}", "title": "" }, { "docid": "532e72b5cd7d1a18de44eeb1eb4cad05", "score": "0.51026946", "text": "func (wp *WorkerPool) Swap(m, n int) {\n\n\ta := *wp\n\ttmp := a[n]\n\ta[n] = a[m]\n\ta[m] = tmp\n\ta[m].Index = m\n\ta[n].Index = n\n}", "title": "" }, { "docid": "dd2cbced09695d3d3caec9bd180ed16c", "score": "0.5094656", "text": "func (n *nodeStore) setOwnNodeWithoutPoolUpdate(node *ciliumv2.CiliumNode) {\n\tn.mutex.Lock()\n\tn.ownNode = node\n\tn.mutex.Unlock()\n}", "title": "" } ]
5d4fc6e6c0255a10fd59289aa85e9ba3
WithAid adds the aid to the revoke params
[ { "docid": "d8aea50ff836eaee3a3b10ccd97808c0", "score": "0.76619524", "text": "func (o *RevokeParams) WithAid(aid string) *RevokeParams {\n\to.SetAid(aid)\n\treturn o\n}", "title": "" } ]
[ { "docid": "1dd4b0a1bcd18cb5db48e29398ffc74c", "score": "0.6523156", "text": "func (o *RevokeParams) SetAid(aid string) {\n\to.Aid = aid\n}", "title": "" }, { "docid": "2a0b4dbef6ab46d10ebdda5b2fc9d49e", "score": "0.5815993", "text": "func (o *TokenParams) WithAid(aid string) *TokenParams {\n\to.SetAid(aid)\n\treturn o\n}", "title": "" }, { "docid": "3c1ba5976c131c2f8e26768706847315", "score": "0.5792191", "text": "func (o *TokenParams) SetAid(aid string) {\n\to.Aid = aid\n}", "title": "" }, { "docid": "27a1e7390eaf7089acfe214789730122", "score": "0.5500358", "text": "func NewRevokeParamsWithHTTPClient(client *http.Client) *RevokeParams {\n\tvar (\n\t\taidDefault = string(\"default\")\n\t\ttidDefault = string(\"default\")\n\t)\n\treturn &RevokeParams{\n\t\tAid: aidDefault,\n\t\tTid: tidDefault,\n\t\tHTTPClient: client,\n\t}\n}", "title": "" }, { "docid": "4773653d230816c6c3cba2af0bfd30db", "score": "0.5261489", "text": "func withAgencyID(id int) agencyOption {\n\treturn func(m *AgencyMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Agency\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Agency, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Agency.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "409d0190553ed28f2367b003213cf142", "score": "0.524697", "text": "func NewRevokeParamsWithTimeout(timeout time.Duration) *RevokeParams {\n\tvar (\n\t\taidDefault = string(\"default\")\n\t\ttidDefault = string(\"default\")\n\t)\n\treturn &RevokeParams{\n\t\tAid: aidDefault,\n\t\tTid: tidDefault,\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "42feb5c92ee4f98cde1f8d2560ddfb73", "score": "0.5127327", "text": "func WithID(id int) interface{\n\tmethodAOption\n\tcalloptions.CallOption\n}{\n\treturn struct{\n\t\tmethodAOption\n\t\tcalloptions.CallOption\n\t}{\n\t\tCallOption: calloptions.New(\n\t\t\tfunc(a any) error{\n\t\t\t\ta.(*aCallOptions).id = id\n\t\t\t\treturn nil\n\t\t\t},\n\t\t),\n\t}\n}", "title": "" }, { "docid": "f5ee6279e54389a28a4cd372fc6e3eb1", "score": "0.5016574", "text": "func NewRevokeParams() *RevokeParams {\n\tvar (\n\t\taidDefault = string(\"default\")\n\t\ttidDefault = string(\"default\")\n\t)\n\treturn &RevokeParams{\n\t\tAid: aidDefault,\n\t\tTid: tidDefault,\n\n\t\ttimeout: cr.DefaultTimeout,\n\t}\n}", "title": "" }, { "docid": "64dc32bf40dc0d02a3b6ca14bb177c7c", "score": "0.4994262", "text": "func revokeOptions(serial string, certToBeRevoked *x509.Certificate, reasonCode *int) *authority.RevokeOptions {\n\topts := &authority.RevokeOptions{\n\t\tSerial: serial,\n\t\tACME: true,\n\t\tCrt: certToBeRevoked,\n\t}\n\tif reasonCode != nil { // NOTE: when implementing CRL and/or OCSP, and reason code is missing, CRL entry extension should be omitted\n\t\topts.Reason = reason(*reasonCode)\n\t\topts.ReasonCode = *reasonCode\n\t}\n\treturn opts\n}", "title": "" }, { "docid": "071f62ffb3dd55b0ca0f5ac54c2a8b80", "score": "0.49864793", "text": "func (o *RevokeParams) WithTid(tid string) *RevokeParams {\n\to.SetTid(tid)\n\treturn o\n}", "title": "" }, { "docid": "f1c870b800df974015077f2152ca0710", "score": "0.49859476", "text": "func (o *RevokeParams) WithTimeout(timeout time.Duration) *RevokeParams {\n\to.SetTimeout(timeout)\n\treturn o\n}", "title": "" }, { "docid": "01e1dc267dbc0b309dc99b73ac821a6d", "score": "0.47656476", "text": "func (a *ShareApiService) QuicklinkRevokeIdGet(ctx context.Context, id string) (IdResp, *http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Get\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t \tsuccessPayload IdResp\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/quicklink/revoke/{id}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"id\"+\"}\", fmt.Sprintf(\"%v\", id), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ \"application/json\", }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"application/json\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tif ctx != nil {\n\t\t// API Key Authentication\n\t\tif auth, ok := ctx.Value(ContextAPIKey).(APIKey); ok {\n\t\t\tvar key string\n\t\t\tif auth.Prefix != \"\" {\n\t\t\t\tkey = auth.Prefix + \" \" + auth.Key\n\t\t\t} else {\n\t\t\t\tkey = auth.Key\n\t\t\t}\n\t\t\tlocalVarHeaderParams[\"X-Auth-Token\"] = key\n\t\t}\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn successPayload, nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn successPayload, localVarHttpResponse, err\n\t}\n\tdefer localVarHttpResponse.Body.Close()\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tbodyBytes, _ := ioutil.ReadAll(localVarHttpResponse.Body)\n\t\treturn successPayload, localVarHttpResponse, reportError(\"Status: %v, Body: %s\", localVarHttpResponse.Status, bodyBytes)\n\t}\n\n\tif err = json.NewDecoder(localVarHttpResponse.Body).Decode(&successPayload); err != nil {\n\t\treturn successPayload, localVarHttpResponse, err\n\t}\n\n\n\treturn successPayload, localVarHttpResponse, err\n}", "title": "" }, { "docid": "1efc956c504c11b8a5fbc344ed2e68a8", "score": "0.47388354", "text": "func (a *Authority) Revoke(ctx context.Context, revokeOpts *RevokeOptions) error {\n\topts := []interface{}{\n\t\terrs.WithKeyVal(\"serialNumber\", revokeOpts.Serial),\n\t\terrs.WithKeyVal(\"reasonCode\", revokeOpts.ReasonCode),\n\t\terrs.WithKeyVal(\"reason\", revokeOpts.Reason),\n\t\terrs.WithKeyVal(\"passiveOnly\", revokeOpts.PassiveOnly),\n\t\terrs.WithKeyVal(\"MTLS\", revokeOpts.MTLS),\n\t\terrs.WithKeyVal(\"context\", provisioner.MethodFromContext(ctx).String()),\n\t}\n\tif revokeOpts.MTLS {\n\t\topts = append(opts, errs.WithKeyVal(\"certificate\", base64.StdEncoding.EncodeToString(revokeOpts.Crt.Raw)))\n\t} else {\n\t\topts = append(opts, errs.WithKeyVal(\"token\", revokeOpts.OTT))\n\t}\n\n\trci := &db.RevokedCertificateInfo{\n\t\tSerial: revokeOpts.Serial,\n\t\tReasonCode: revokeOpts.ReasonCode,\n\t\tReason: revokeOpts.Reason,\n\t\tMTLS: revokeOpts.MTLS,\n\t\tRevokedAt: time.Now().UTC(),\n\t}\n\n\tvar (\n\t\tp provisioner.Interface\n\t\terr error\n\t)\n\t// If not mTLS then get the TokenID of the token.\n\tif !revokeOpts.MTLS {\n\t\ttoken, err := jose.ParseSigned(revokeOpts.OTT)\n\t\tif err != nil {\n\t\t\treturn errs.Wrap(http.StatusUnauthorized, err,\n\t\t\t\t\"authority.Revoke; error parsing token\", opts...)\n\t\t}\n\n\t\t// Get claims w/out verification.\n\t\tvar claims Claims\n\t\tif err = token.UnsafeClaimsWithoutVerification(&claims); err != nil {\n\t\t\treturn errs.Wrap(http.StatusUnauthorized, err, \"authority.Revoke\", opts...)\n\t\t}\n\n\t\t// This method will also validate the audiences for JWK provisioners.\n\t\tvar ok bool\n\t\tp, ok = a.provisioners.LoadByToken(token, &claims.Claims)\n\t\tif !ok {\n\t\t\treturn errs.InternalServer(\"authority.Revoke; provisioner not found\", opts...)\n\t\t}\n\t\trci.TokenID, err = p.GetTokenID(revokeOpts.OTT)\n\t\tif err != nil {\n\t\t\treturn errs.Wrap(http.StatusInternalServerError, err,\n\t\t\t\t\"authority.Revoke; could not get ID for token\")\n\t\t}\n\t\topts = append(opts, errs.WithKeyVal(\"tokenID\", rci.TokenID))\n\t} else {\n\t\t// Load the Certificate provisioner if one exists.\n\t\tp, err = a.LoadProvisionerByCertificate(revokeOpts.Crt)\n\t\tif err != nil {\n\t\t\treturn errs.Wrap(http.StatusUnauthorized, err,\n\t\t\t\t\"authority.Revoke: unable to load certificate provisioner\", opts...)\n\t\t}\n\t}\n\trci.ProvisionerID = p.GetID()\n\topts = append(opts, errs.WithKeyVal(\"provisionerID\", rci.ProvisionerID))\n\n\tif provisioner.MethodFromContext(ctx) == provisioner.SSHRevokeMethod {\n\t\terr = a.db.RevokeSSH(rci)\n\t} else {\n\t\t// Revoke an X.509 certificate using CAS. If the certificate is not\n\t\t// provided we will try to read it from the db. If the read fails we\n\t\t// won't throw an error as it will be responsibility of the CAS\n\t\t// implementation to require a certificate.\n\t\tvar revokedCert *x509.Certificate\n\t\tif revokeOpts.Crt != nil {\n\t\t\trevokedCert = revokeOpts.Crt\n\t\t} else if rci.Serial != \"\" {\n\t\t\trevokedCert, _ = a.db.GetCertificate(rci.Serial)\n\t\t}\n\n\t\t// CAS operation, note that SoftCAS (default) is a noop.\n\t\t// The revoke happens when this is stored in the db.\n\t\t_, err = a.x509CAService.RevokeCertificate(&casapi.RevokeCertificateRequest{\n\t\t\tCertificate: revokedCert,\n\t\t\tReason: rci.Reason,\n\t\t\tReasonCode: rci.ReasonCode,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn errs.Wrap(http.StatusInternalServerError, err, \"authority.Revoke\", opts...)\n\t\t}\n\n\t\t// Save as revoked in the Db.\n\t\terr = a.db.Revoke(rci)\n\t}\n\tswitch err {\n\tcase nil:\n\t\treturn nil\n\tcase db.ErrNotImplemented:\n\t\treturn errs.NotImplemented(\"authority.Revoke; no persistence layer configured\", opts...)\n\tcase db.ErrAlreadyExists:\n\t\treturn errs.BadRequest(\"authority.Revoke; certificate with serial \"+\n\t\t\t\"number %s has already been revoked\", append([]interface{}{rci.Serial}, opts...)...)\n\tdefault:\n\t\treturn errs.Wrap(http.StatusInternalServerError, err, \"authority.Revoke\", opts...)\n\t}\n}", "title": "" }, { "docid": "3a9122c304b2c6ce1797f35e7441f6e6", "score": "0.473371", "text": "func WithKid(kid string) Option {\n\treturn func(ctx *context) {\n\t\tctx.kid = kid\n\t}\n}", "title": "" }, { "docid": "2db5128daef9b4cb24bf402ecc10f852", "score": "0.46559194", "text": "func (m *IpsecTunnelParams) Redact(ctx context.Context) error {\n\t// clear fields with confidential option set (at message or field level)\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif err := m.GetIpsecPsk().Redact(ctx); err != nil {\n\t\treturn errors.Wrapf(err, \"Redacting IpsecTunnelParams.ipsec_psk\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "5c207b400197797557f9fa15cb72c84c", "score": "0.4634086", "text": "func withUserActiveRecordID(id string) useractiverecordOption {\n\treturn func(m *UserActiveRecordMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *UserActiveRecord\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*UserActiveRecord, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().UserActiveRecord.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "9937025473b4f1642a1ee67b8176b090", "score": "0.4616597", "text": "func NewPatchAuthorizationsIDRequestWithBody(server string, authID string, params *PatchAuthorizationsIDParams, contentType string, body io.Reader) (*http.Request, error) {\n\tvar err error\n\n\tvar pathParam0 string\n\n\tpathParam0, err = runtime.StyleParamWithLocation(\"simple\", false, \"authID\", runtime.ParamLocationPath, authID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tserverURL, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\toperationPath := fmt.Sprintf(\"/authorizations/%s\", pathParam0)\n\tif operationPath[0] == '/' {\n\t\toperationPath = \".\" + operationPath\n\t}\n\n\tqueryURL, err := serverURL.Parse(operationPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(\"PATCH\", queryURL.String(), body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq.Header.Add(\"Content-Type\", contentType)\n\n\tif params.ZapTraceSpan != nil {\n\t\tvar headerParam0 string\n\n\t\theaderParam0, err = runtime.StyleParamWithLocation(\"simple\", false, \"Zap-Trace-Span\", runtime.ParamLocationHeader, *params.ZapTraceSpan)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treq.Header.Set(\"Zap-Trace-Span\", headerParam0)\n\t}\n\n\treturn req, nil\n}", "title": "" }, { "docid": "1404dce4c38f8c15f6d6bb1841104a60", "score": "0.46135968", "text": "func withAgency(node *Agency) agencyOption {\n\treturn func(m *AgencyMutation) {\n\t\tm.oldValue = func(context.Context) (*Agency, error) {\n\t\t\treturn node, nil\n\t\t}\n\t\tm.id = &node.ID\n\t}\n}", "title": "" }, { "docid": "9a5fa11fb1daa5c8ca21bb08f786a11f", "score": "0.46125913", "text": "func (f *Funding) WithdrawCancel(ctx context.Context, aclass AssetsClass, asset asset.Currency, refID string) (res bool, err error) {\n\tbody := url.Values{\n\t\t\"aclass\": {string(aclass)},\n\t\t\"asset\": {asset.String()},\n\t\t\"refid\": {refID},\n\t}\n\n\treq, err := f.Client.DialWithAuth(ctx, http.MethodPost, WithdrawCancelResource, body)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tkrakenResp, err := f.Client.Call(req)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = krakenResp.ExtractResult(&res)\n\treturn\n}", "title": "" }, { "docid": "35b1c660f8ae54129ab463652ba2254b", "score": "0.4604228", "text": "func (r ApiGetOauthTokenRequest) ClientId(clientId string) ApiGetOauthTokenRequest {\n\tr.clientId = &clientId\n\treturn r\n}", "title": "" }, { "docid": "1629b081e49d6734c57e49dfebe036d9", "score": "0.4556458", "text": "func aid(addy string) (ret xdr.AccountId) {\n\terr := ret.SetAddress(addy)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn\n}", "title": "" }, { "docid": "6832a6032235710bb0ed9353040d943f", "score": "0.4544817", "text": "func (m *TermsOfUseRequestBuilder) AgreementAcceptancesById(id string)(*if58558aaf7d3a571a2b7b154eaf42dbeae3dfda757c15665c189420b3fc2cc8f.AgreementAcceptanceItemRequestBuilder) {\n urlTplParams := make(map[string]string)\n for idx, item := range m.pathParameters {\n urlTplParams[idx] = item\n }\n if id != \"\" {\n urlTplParams[\"agreementAcceptance%2Did\"] = id\n }\n return if58558aaf7d3a571a2b7b154eaf42dbeae3dfda757c15665c189420b3fc2cc8f.NewAgreementAcceptanceItemRequestBuilderInternal(urlTplParams, m.requestAdapter);\n}", "title": "" }, { "docid": "bdf12cd12db277b97d148c539dfd587f", "score": "0.45042443", "text": "func (o *RevokeParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\t// path param aid\n\tif err := r.SetPathParam(\"aid\", o.Aid); err != nil {\n\t\treturn err\n\t}\n\n\t// path param tid\n\tif err := r.SetPathParam(\"tid\", o.Tid); err != nil {\n\t\treturn err\n\t}\n\n\tif o.Token != nil {\n\n\t\t// form param token\n\t\tvar frToken string\n\t\tif o.Token != nil {\n\t\t\tfrToken = *o.Token\n\t\t}\n\t\tfToken := frToken\n\t\tif fToken != \"\" {\n\t\t\tif err := r.SetFormParam(\"token\", fToken); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "47a6bf3afb7ba58bf5774a8169869403", "score": "0.4502687", "text": "func (client *GenericClient) deleteByIDCreateRequest(ctx context.Context, resourceID string, apiVersion string) (*policy.Request, error) {\n\turlPath := \"/{resourceId}\"\n\tif resourceID == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceId}\", resourceID)\n\treq, err := runtime.NewRequest(ctx, http.MethodDelete, runtime.JoinPaths(client.endpoint, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", apiVersion)\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header.Set(\"Accept\", \"application/json\")\n\treturn req, nil\n}", "title": "" }, { "docid": "9860d7d6a3c31e4fa0d6de48ab014ead", "score": "0.44789976", "text": "func (a *ShareApiService) ShareRevokeIdGet(ctx context.Context, id string) (IdResp, *http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Get\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t \tsuccessPayload IdResp\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/share/revoke/{id}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"id\"+\"}\", fmt.Sprintf(\"%v\", id), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ \"application/json\", }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"application/json\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tif ctx != nil {\n\t\t// API Key Authentication\n\t\tif auth, ok := ctx.Value(ContextAPIKey).(APIKey); ok {\n\t\t\tvar key string\n\t\t\tif auth.Prefix != \"\" {\n\t\t\t\tkey = auth.Prefix + \" \" + auth.Key\n\t\t\t} else {\n\t\t\t\tkey = auth.Key\n\t\t\t}\n\t\t\tlocalVarHeaderParams[\"X-Auth-Token\"] = key\n\t\t}\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn successPayload, nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn successPayload, localVarHttpResponse, err\n\t}\n\tdefer localVarHttpResponse.Body.Close()\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tbodyBytes, _ := ioutil.ReadAll(localVarHttpResponse.Body)\n\t\treturn successPayload, localVarHttpResponse, reportError(\"Status: %v, Body: %s\", localVarHttpResponse.Status, bodyBytes)\n\t}\n\n\tif err = json.NewDecoder(localVarHttpResponse.Body).Decode(&successPayload); err != nil {\n\t\treturn successPayload, localVarHttpResponse, err\n\t}\n\n\n\treturn successPayload, localVarHttpResponse, err\n}", "title": "" }, { "docid": "2d5610614a2ae5b8b6ed1db738760769", "score": "0.4427943", "text": "func (o *RevokeParams) WithContext(ctx context.Context) *RevokeParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "4433c4ed815336e0679593292ed0fa14", "score": "0.43930042", "text": "func (client *Client) RevokeInstanceFromTransitRouterWithOptions(request *RevokeInstanceFromTransitRouterRequest, runtime *util.RuntimeOptions) (_result *RevokeInstanceFromTransitRouterResponse, _err error) {\n\t_err = util.ValidateModel(request)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\tquery := map[string]interface{}{}\n\tif !tea.BoolValue(util.IsUnset(request.CenId)) {\n\t\tquery[\"CenId\"] = request.CenId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.CenOwnerId)) {\n\t\tquery[\"CenOwnerId\"] = request.CenOwnerId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.InstanceId)) {\n\t\tquery[\"InstanceId\"] = request.InstanceId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.InstanceType)) {\n\t\tquery[\"InstanceType\"] = request.InstanceType\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.OwnerAccount)) {\n\t\tquery[\"OwnerAccount\"] = request.OwnerAccount\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.OwnerId)) {\n\t\tquery[\"OwnerId\"] = request.OwnerId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.RegionId)) {\n\t\tquery[\"RegionId\"] = request.RegionId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.ResourceOwnerAccount)) {\n\t\tquery[\"ResourceOwnerAccount\"] = request.ResourceOwnerAccount\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.ResourceOwnerId)) {\n\t\tquery[\"ResourceOwnerId\"] = request.ResourceOwnerId\n\t}\n\n\treq := &openapi.OpenApiRequest{\n\t\tQuery: openapiutil.Query(query),\n\t}\n\tparams := &openapi.Params{\n\t\tAction: tea.String(\"RevokeInstanceFromTransitRouter\"),\n\t\tVersion: tea.String(\"2017-09-12\"),\n\t\tProtocol: tea.String(\"HTTPS\"),\n\t\tPathname: tea.String(\"/\"),\n\t\tMethod: tea.String(\"POST\"),\n\t\tAuthType: tea.String(\"AK\"),\n\t\tStyle: tea.String(\"RPC\"),\n\t\tReqBodyType: tea.String(\"formData\"),\n\t\tBodyType: tea.String(\"json\"),\n\t}\n\t_result = &RevokeInstanceFromTransitRouterResponse{}\n\t_body, _err := client.CallApi(params, req, runtime)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\t_err = tea.Convert(_body, &_result)\n\treturn _result, _err\n}", "title": "" }, { "docid": "c8ab7f1b5c8d563fe77c1b29479a92ef", "score": "0.43740505", "text": "func WithClientID(clientID string) trace.EventOption {\n\treturn trace.WithAttributes(otelattr.String(attributeKeyOAuth2ClientID, clientID))\n}", "title": "" }, { "docid": "ac639abd552f7e99baaaf8d20c25a167", "score": "0.43711358", "text": "func (a *Authority) authorizeRevoke(opts *RevokeOptions) (p provisioner.Interface, err error) {\n\tif opts.MTLS {\n\t\tif opts.Crt.SerialNumber.String() != opts.Serial {\n\t\t\treturn nil, errors.New(\"authorizeRevoke: serial number in certificate different than body\")\n\t\t}\n\t\t// Load the Certificate provisioner if one exists.\n\t\tp, err = a.LoadProvisionerByCertificate(opts.Crt)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"authorizeRevoke\")\n\t\t}\n\t} else {\n\t\t// Gets the token provisioner and validates common token fields.\n\t\tp, err = a.authorizeToken(opts.OTT)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"authorizeRevoke\")\n\t\t}\n\n\t\t// Call the provisioner AuthorizeRevoke to apply provisioner specific auth claims.\n\t\terr = p.AuthorizeRevoke(opts.OTT)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"authorizeRevoke\")\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "c790d84ee2a1079b591e07090904485e", "score": "0.4364731", "text": "func (m *TunnelParams) Redact(ctx context.Context) error {\n\t// clear fields with confidential option set (at message or field level)\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif err := m.GetIpsec().Redact(ctx); err != nil {\n\t\treturn errors.Wrapf(err, \"Redacting TunnelParams.ipsec\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "d8e392669ceac5fe1b26a47a114e0bea", "score": "0.43643257", "text": "func (m *AgreementItemRequestBuilder) AcceptancesById(id string)(*i22d600b0c4aaca0bdb6422ea093131f6b276800efda0ef10e38f62d8c5ee6950.AgreementAcceptanceItemRequestBuilder) {\n urlTplParams := make(map[string]string)\n for idx, item := range m.pathParameters {\n urlTplParams[idx] = item\n }\n if id != \"\" {\n urlTplParams[\"agreementAcceptance%2Did\"] = id\n }\n return i22d600b0c4aaca0bdb6422ea093131f6b276800efda0ef10e38f62d8c5ee6950.NewAgreementAcceptanceItemRequestBuilderInternal(urlTplParams, m.requestAdapter);\n}", "title": "" }, { "docid": "955207f6268ddbe54102fa0835e4eee8", "score": "0.43623155", "text": "func (handler AcraBlockHandler) EncryptWithClientID(clientID, data []byte, context *encryptor.DataEncryptorContext) ([]byte, error) {\n\t// skip already encrypted AcraBlock\n\tif _, _, err := acrablock.ExtractAcraBlockFromData(data); err == nil {\n\t\treturn data, nil\n\t}\n\tkeys, err := context.Keystore.GetClientIDSymmetricKeys(clientID)\n\tif err != nil {\n\t\treturn data, fmt.Errorf(\"can't read private key for matched client_id to encrypt with AcraBlock: %w\", err)\n\t}\n\tdefer utils.ZeroizeSymmetricKeys(keys)\n\n\tif len(keys) == 0 {\n\t\treturn data, keystore.ErrKeysNotFound\n\t}\n\treturn acrablock.CreateAcraBlock(data, keys[0], nil)\n}", "title": "" }, { "docid": "19f253f27e27020c0d89e4a27047fc94", "score": "0.43513474", "text": "func (c *ExistingClient) ByID(id int64) {\n\tc.ClientID = id\n}", "title": "" }, { "docid": "28b6ec3d9442fb8cd1c10058fa9d44bb", "score": "0.4329269", "text": "func withAuthID(id int) authOption {\n\treturn func(m *AuthMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Auth\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Auth, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Auth.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "13c3e78706643e1328e7cce95fa5e3ee", "score": "0.432609", "text": "func WithAPIToken(token string) ClientOption {\n\treturn func(c *client) error {\n\t\tctx := context.Background()\n\t\tc.client = oauth2.NewClient(ctx, oauth2.StaticTokenSource(&oauth2.Token{AccessToken: token}))\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "cf09113170a6c9fe9583b5b51a6e7551", "score": "0.43167305", "text": "func (a *ReservationActionsApiService) BookingReservationActionsByIdAmendPut(ctx _context.Context, id string) ApiBookingReservationActionsByIdAmendPutRequest {\n\treturn ApiBookingReservationActionsByIdAmendPutRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tid: id,\n\t}\n}", "title": "" }, { "docid": "9f3209d14d99509e45387c10c545df55", "score": "0.43103525", "text": "func withBouncerID(id int) bouncerOption {\n\treturn func(m *BouncerMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Bouncer\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Bouncer, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = errors.New(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Bouncer.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "0e6cfe6dd9296daf9a924db9260cda12", "score": "0.43071985", "text": "func (o *RevokeParams) WithHTTPClient(client *http.Client) *RevokeParams {\n\to.SetHTTPClient(client)\n\treturn o\n}", "title": "" }, { "docid": "8e8cab39e94b0c37a7a261d8b19d8f78", "score": "0.42986554", "text": "func (aaa *OAuthService) RevokeAUser(input *o_auth.RevokeAUserParams) error {\n\ttoken, err := aaa.TokenRepository.GetToken()\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, badRequest, unauthorized, err := aaa.Client.OAuth.RevokeAUser(input, client.BearerToken(*token.AccessToken))\n\tif badRequest != nil {\n\t\treturn badRequest\n\t}\n\tif unauthorized != nil {\n\t\treturn unauthorized\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "2d24358248bdc94e5e55e011980f220e", "score": "0.42940947", "text": "func NewDeleteAuthorizationsIDRequest(server string, authID string, params *DeleteAuthorizationsIDParams) (*http.Request, error) {\n\tvar err error\n\n\tvar pathParam0 string\n\n\tpathParam0, err = runtime.StyleParamWithLocation(\"simple\", false, \"authID\", runtime.ParamLocationPath, authID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tserverURL, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\toperationPath := fmt.Sprintf(\"/authorizations/%s\", pathParam0)\n\tif operationPath[0] == '/' {\n\t\toperationPath = \".\" + operationPath\n\t}\n\n\tqueryURL, err := serverURL.Parse(operationPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(\"DELETE\", queryURL.String(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif params.ZapTraceSpan != nil {\n\t\tvar headerParam0 string\n\n\t\theaderParam0, err = runtime.StyleParamWithLocation(\"simple\", false, \"Zap-Trace-Span\", runtime.ParamLocationHeader, *params.ZapTraceSpan)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treq.Header.Set(\"Zap-Trace-Span\", headerParam0)\n\t}\n\n\treturn req, nil\n}", "title": "" }, { "docid": "da6d4f90a2cd22e56878942b4a18bd40", "score": "0.42817286", "text": "func withOAuth2ClientID(id string) oauth2clientOption {\n\treturn func(m *OAuth2ClientMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *OAuth2Client\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*OAuth2Client, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = errors.New(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().OAuth2Client.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "cf02456753ea7f3092e36e9fd4697b16", "score": "0.428143", "text": "func WithAuth(handler func(http.ResponseWriter, *http.Request), checker *Checker, require Require) (func(http.ResponseWriter, *http.Request), error) {\n\tif !require.ClientID && (require.ClientSecret || len(require.Role) != 0) {\n\t\treturn nil, status.Errorf(codes.Internal, \"must require client_id when require client_secret or bearer token\")\n\t}\n\n\tswitch require.Role {\n\tcase None, User, Admin:\n\tdefault:\n\t\treturn nil, status.Errorf(codes.Internal, \"undefined role: %s\", require.Role)\n\t}\n\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tvar linkedID *ga4gh.Identity\n\t\tlog, id, err := checker.check(r, require)\n\t\tif err == nil && len(r.Header.Get(LinkAuthorizationHeader)) > 0 {\n\t\t\tlinkedID, err = checker.verifiedBearerToken(r, LinkAuthorizationHeader, oathclients.ExtractClientID(r))\n\t\t\tif err == nil && !strutil.ContainsWord(linkedID.Scope, \"link\") {\n\t\t\t\terr = errutil.WithErrorReason(errScopeMissing, status.Errorf(codes.Unauthenticated, \"linked auth bearer token missing required 'link' scope\"))\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.ErrorType = errutil.ErrorReason(err)\n\t\t}\n\t\twriteAccessLog(checker.Logger, log, err, r)\n\t\tif err != nil {\n\t\t\thttputils.WriteError(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tisAdmin := false\n\t\tif id != nil {\n\t\t\tisAdmin = checker.IsAdmin(id) == nil\n\t\t}\n\t\ta := &Context{\n\t\t\tID: id,\n\t\t\tLinkedID: linkedID,\n\t\t\tClientID: oathclients.ExtractClientID(r),\n\t\t\tClientSecret: oathclients.ExtractClientSecret(r),\n\t\t\tIsAdmin: isAdmin,\n\t\t}\n\t\tr = r.WithContext(context.WithValue(r.Context(), authContextKey, a))\n\n\t\thandler(w, r)\n\t}, nil\n}", "title": "" }, { "docid": "ad124a6b7127f7340e3f20bdd77d1a8d", "score": "0.4264021", "text": "func (_Attestations *AttestationsTransactor) Revoke(opts *bind.TransactOpts, identifier [32]byte, index *big.Int) (*types.Transaction, error) {\n\treturn _Attestations.contract.Transact(opts, \"revoke\", identifier, index)\n}", "title": "" }, { "docid": "2cbdb0e17efbafd455856462418c4929", "score": "0.42578948", "text": "func NewRevokeParamsWithContext(ctx context.Context) *RevokeParams {\n\tvar (\n\t\taidDefault = string(\"default\")\n\t\ttidDefault = string(\"default\")\n\t)\n\treturn &RevokeParams{\n\t\tAid: aidDefault,\n\t\tTid: tidDefault,\n\n\t\tContext: ctx,\n\t}\n}", "title": "" }, { "docid": "bb3326e83aa7e04e411d1b7967317712", "score": "0.4254254", "text": "func WithAPIToken(token string) ConfigFunc {\n\treturn func(c *Client) {\n\t\tot := c.client.Transport\n\t\tc.client.Transport = rtFunc(func(r *http.Request) (*http.Response, error) {\n\t\t\tif token != \"\" {\n\t\t\t\tr.Header.Set(\"Authorization\", \"Bearer \"+token)\n\t\t\t}\n\t\t\treturn ot.RoundTrip(r)\n\t\t})\n\t}\n}", "title": "" }, { "docid": "a01000cf216eca835bf7ced73805d494", "score": "0.4253366", "text": "func (_Attestations *AttestationsTransactorSession) Revoke(identifier [32]byte, index *big.Int) (*types.Transaction, error) {\n\treturn _Attestations.Contract.Revoke(&_Attestations.TransactOpts, identifier, index)\n}", "title": "" }, { "docid": "d8070472acd07cd02faaa77ba946be23", "score": "0.42451516", "text": "func (i Invitation) Revoked() Invitation {\n\ti.Status = InvitationStatusRevoked\n\ti.UpdatedUTC = chrono.TimeNow()\n\n\treturn i\n}", "title": "" }, { "docid": "1182a7235973be86c8dc5a86b84233dc", "score": "0.42404994", "text": "func (_Attestations *AttestationsSession) Revoke(identifier [32]byte, index *big.Int) (*types.Transaction, error) {\n\treturn _Attestations.Contract.Revoke(&_Attestations.TransactOpts, identifier, index)\n}", "title": "" }, { "docid": "306db842033c67bdd12911bd4654d86f", "score": "0.42382112", "text": "func WithClaimantID(id uuid.UUID) Option {\n\treturn func(eq *EntroQ) {\n\t\teq.clientID = id\n\t}\n}", "title": "" }, { "docid": "614eee217344842ef57f91640901bf7d", "score": "0.42367536", "text": "func logRevoke(w http.ResponseWriter, ri *authority.RevokeOptions) {\n\tif rl, ok := w.(logging.ResponseLogger); ok {\n\t\trl.WithFields(map[string]interface{}{\n\t\t\t\"serial\": ri.Serial,\n\t\t\t\"reasonCode\": ri.ReasonCode,\n\t\t\t\"reason\": ri.Reason,\n\t\t\t\"passiveOnly\": ri.PassiveOnly,\n\t\t\t\"ACME\": ri.ACME,\n\t\t})\n\t}\n}", "title": "" }, { "docid": "764916071f8c646189b73fa7922dd2aa", "score": "0.42346323", "text": "func (d *Resource) WithID(id string) *Resource {\n\td.JSON[\"tacoIdentifier\"] = id\n\treturn d\n}", "title": "" }, { "docid": "c3f41ffa6ba3fef5a3fccb0b26b6a08b", "score": "0.42080185", "text": "func (client *Client) ModifyDedicatedHostAutoRenewAttributeWithOptions(request *ModifyDedicatedHostAutoRenewAttributeRequest, runtime *util.RuntimeOptions) (_result *ModifyDedicatedHostAutoRenewAttributeResponse, _err error) {\n\t_err = util.ValidateModel(request)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\tquery := map[string]interface{}{}\n\tif !tea.BoolValue(util.IsUnset(request.AutoRenew)) {\n\t\tquery[\"AutoRenew\"] = request.AutoRenew\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.AutoRenewWithEcs)) {\n\t\tquery[\"AutoRenewWithEcs\"] = request.AutoRenewWithEcs\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.DedicatedHostIds)) {\n\t\tquery[\"DedicatedHostIds\"] = request.DedicatedHostIds\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.Duration)) {\n\t\tquery[\"Duration\"] = request.Duration\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.OwnerAccount)) {\n\t\tquery[\"OwnerAccount\"] = request.OwnerAccount\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.OwnerId)) {\n\t\tquery[\"OwnerId\"] = request.OwnerId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.PeriodUnit)) {\n\t\tquery[\"PeriodUnit\"] = request.PeriodUnit\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.RegionId)) {\n\t\tquery[\"RegionId\"] = request.RegionId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.RenewalStatus)) {\n\t\tquery[\"RenewalStatus\"] = request.RenewalStatus\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.ResourceOwnerAccount)) {\n\t\tquery[\"ResourceOwnerAccount\"] = request.ResourceOwnerAccount\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.ResourceOwnerId)) {\n\t\tquery[\"ResourceOwnerId\"] = request.ResourceOwnerId\n\t}\n\n\treq := &openapi.OpenApiRequest{\n\t\tQuery: openapiutil.Query(query),\n\t}\n\tparams := &openapi.Params{\n\t\tAction: tea.String(\"ModifyDedicatedHostAutoRenewAttribute\"),\n\t\tVersion: tea.String(\"2014-05-26\"),\n\t\tProtocol: tea.String(\"HTTPS\"),\n\t\tPathname: tea.String(\"/\"),\n\t\tMethod: tea.String(\"POST\"),\n\t\tAuthType: tea.String(\"AK\"),\n\t\tStyle: tea.String(\"RPC\"),\n\t\tReqBodyType: tea.String(\"formData\"),\n\t\tBodyType: tea.String(\"json\"),\n\t}\n\t_result = &ModifyDedicatedHostAutoRenewAttributeResponse{}\n\t_body, _err := client.CallApi(params, req, runtime)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\t_err = tea.Convert(_body, &_result)\n\treturn _result, _err\n}", "title": "" }, { "docid": "427b9f22fa78d75e2b88c9906fd61714", "score": "0.4201877", "text": "func NewRevokeTokenParamsWithHTTPClient(client *http.Client) *RevokeTokenParams {\n\tvar ()\n\treturn &RevokeTokenParams{\n\t\tHTTPClient: client,\n\t}\n}", "title": "" }, { "docid": "7a81d9251124a547faf1a49d0cfd41fb", "score": "0.41977617", "text": "func (le *lessor) Revoke(id uint64) error {\n\tle.mu.Lock()\n\tdefer le.mu.Unlock()\n\n\tl := le.leaseMap[id]\n\tif l == nil {\n\t\treturn fmt.Errorf(\"lease: cannot find lease %x\", id)\n\t}\n\n\tdelete(le.leaseMap, l.id)\n\n\tfor item := range l.itemSet {\n\t\tle.dr.DeleteRange([]byte(item.key), []byte(item.endRange))\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "9a0e4b8fc1f3dbd4c4ded73bfd66d60f", "score": "0.41896343", "text": "func (o *RevokeParams) WithToken(token *string) *RevokeParams {\n\to.SetToken(token)\n\treturn o\n}", "title": "" }, { "docid": "9490a90823ce916a6d39fcc8e34049a8", "score": "0.41870868", "text": "func withAuthRequestID(id string) authrequestOption {\n\treturn func(m *AuthRequestMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *AuthRequest\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*AuthRequest, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = errors.New(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().AuthRequest.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "a95d3de4499e038597f822c418f047ca", "score": "0.41816947", "text": "func ModifyAs(id uuid.UUID) ModifyArg {\n\treturn func(m *Modification) {\n\t\tm.Claimant = id\n\t}\n}", "title": "" }, { "docid": "d56895300830d6951071fd5b89709aba", "score": "0.4177926", "text": "func NewPatchKioskEnclosuresEnclosureIDParamsWithTimeout(timeout time.Duration) *PatchKioskEnclosuresEnclosureIDParams {\n\tvar ()\n\treturn &PatchKioskEnclosuresEnclosureIDParams{\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "0e3e10f9c3d2c1e7ecc183b238fcf918", "score": "0.41771746", "text": "func withAuthCodeID(id string) authcodeOption {\n\treturn func(m *AuthCodeMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *AuthCode\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*AuthCode, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = errors.New(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().AuthCode.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "7c75763de447af7750dd81aa593031d0", "score": "0.41764545", "text": "func (m *TermsOfUseRequestBuilder) AgreementsById(id string)(*ied57f20b5e5c3beb8344ef3753e6a67a240318854397236c0cd83ef4d9597226.AgreementItemRequestBuilder) {\n urlTplParams := make(map[string]string)\n for idx, item := range m.pathParameters {\n urlTplParams[idx] = item\n }\n if id != \"\" {\n urlTplParams[\"agreement%2Did\"] = id\n }\n return ied57f20b5e5c3beb8344ef3753e6a67a240318854397236c0cd83ef4d9597226.NewAgreementItemRequestBuilderInternal(urlTplParams, m.requestAdapter);\n}", "title": "" }, { "docid": "2b5518d02e337dd93a95044a5b6a6229", "score": "0.41650832", "text": "func WithAPIKey(apiKey string) ClientOptions {\n\treturn func(a *Client) {\n\t\tbaseURL = baseProURL\n\n\t\tbaseTransport := http.DefaultTransport\n\t\tif a.httpClient.Transport != nil {\n\t\t\tbaseTransport = a.httpClient.Transport\n\t\t}\n\n\t\ta.httpClient.Transport = &authTransport{\n\t\t\tapiKey: apiKey,\n\t\t\tbaseTransport: baseTransport,\n\t\t}\n\t}\n}", "title": "" }, { "docid": "a297bd383d567669b14da92a97d7e8a6", "score": "0.41569892", "text": "func WithCompleteParams(addr string, client *httpclient.HTTPClient) Option {\n\treturn func(n *ErdaHPA) {\n\t\tn.addr = addr\n\t\tn.client = client\n\t}\n}", "title": "" }, { "docid": "5547a35e5b1eff21d4a61aa5148ab6ef", "score": "0.41520977", "text": "func (c *Client) RevokeWithContext(ctx context.Context, req *api.RevokeRequest, tr http.RoundTripper) (*api.RevokeResponse, error) {\n\tvar retried bool\n\tbody, err := json.Marshal(req)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"error marshaling request\")\n\t}\n\tvar client *uaClient\nretry:\n\tif tr != nil {\n\t\tclient = newClient(tr)\n\t} else {\n\t\tclient = c.client\n\t}\n\n\tu := c.endpoint.ResolveReference(&url.URL{Path: \"/revoke\"})\n\tresp, err := client.PostWithContext(ctx, u.String(), \"application/json\", bytes.NewReader(body))\n\tif err != nil {\n\t\treturn nil, clientError(err)\n\t}\n\tif resp.StatusCode >= 400 {\n\t\tif !retried && c.retryOnError(resp) { //nolint:contextcheck // deeply nested context; retry using the same context\n\t\t\tretried = true\n\t\t\tgoto retry\n\t\t}\n\t\treturn nil, readError(resp.Body)\n\t}\n\tvar revoke api.RevokeResponse\n\tif err := readJSON(resp.Body, &revoke); err != nil {\n\t\treturn nil, errors.Wrapf(err, \"error reading %s\", u)\n\t}\n\treturn &revoke, nil\n}", "title": "" }, { "docid": "e50e4158f2b2b18e21a17cdc6f8f8031", "score": "0.41497827", "text": "func WithAesKey(key []byte) AesOption {\n\treturn func(o *aesOptions) {\n\t\to.aesKey = key\n\t}\n}", "title": "" }, { "docid": "388eea9626a46ecf0592b26f36428b77", "score": "0.41471994", "text": "func withAccountID(id int) accountOption {\n\treturn func(m *AccountMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Account\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Account, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Account.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "aec8bc8116d7ff2e7959628b0ab517a3", "score": "0.41429454", "text": "func (c *Client) DownloadByAid(option DownloadOptionAid, showProgress bool, progressWriter ProgressWriter) error {\n\turlResponse, err := c.GetStreamUrlAvid(option.Aid, option.Page, option.Resolution, option.Mode, option.Allow4K)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn c.download(urlResponse, option.DownloadOptionCommon, showProgress, progressWriter)\n}", "title": "" }, { "docid": "c2397be2d73b476ca9afb7bdb0f8248c", "score": "0.41424736", "text": "func (d *Dao) RecheckByAid(c context.Context, tp int, aid int64) (recheck *archive.Recheck, err error) {\n\trow := d.db.QueryRow(c, _recheckByAid, aid, tp)\n\trecheck = &archive.Recheck{}\n\tif err = row.Scan(&recheck.ID, &recheck.Type, &recheck.AID, &recheck.UID, &recheck.State, &recheck.CTime, &recheck.MTime); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil\n\t\t\trecheck = nil\n\t\t} else {\n\t\t\tlog.Error(\"RecheckByAid row.Scan(%d,%d) error(%v)\", tp, aid, err)\n\t\t}\n\t\treturn\n\t}\n\treturn\n}", "title": "" }, { "docid": "b6a1ca290217d759f644836b68c1c1f7", "score": "0.41397303", "text": "func (client *Client) BeeBotAssociateWithOptions(tmpReq *BeeBotAssociateRequest, runtime *util.RuntimeOptions) (_result *BeeBotAssociateResponse, _err error) {\n\t_err = util.ValidateModel(tmpReq)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\trequest := &BeeBotAssociateShrinkRequest{}\n\topenapiutil.Convert(tmpReq, request)\n\tif !tea.BoolValue(util.IsUnset(tmpReq.Perspective)) {\n\t\trequest.PerspectiveShrink = openapiutil.ArrayToStringWithSpecifiedStyle(tmpReq.Perspective, tea.String(\"Perspective\"), tea.String(\"json\"))\n\t}\n\n\tbody := map[string]interface{}{}\n\tif !tea.BoolValue(util.IsUnset(request.ChatBotInstanceId)) {\n\t\tbody[\"ChatBotInstanceId\"] = request.ChatBotInstanceId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.CustSpaceId)) {\n\t\tbody[\"CustSpaceId\"] = request.CustSpaceId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.IsvCode)) {\n\t\tbody[\"IsvCode\"] = request.IsvCode\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.PerspectiveShrink)) {\n\t\tbody[\"Perspective\"] = request.PerspectiveShrink\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.RecommendNum)) {\n\t\tbody[\"RecommendNum\"] = request.RecommendNum\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.SessionId)) {\n\t\tbody[\"SessionId\"] = request.SessionId\n\t}\n\n\tif !tea.BoolValue(util.IsUnset(request.Utterance)) {\n\t\tbody[\"Utterance\"] = request.Utterance\n\t}\n\n\treq := &openapi.OpenApiRequest{\n\t\tBody: openapiutil.ParseToMap(body),\n\t}\n\tparams := &openapi.Params{\n\t\tAction: tea.String(\"BeeBotAssociate\"),\n\t\tVersion: tea.String(\"2020-06-06\"),\n\t\tProtocol: tea.String(\"HTTPS\"),\n\t\tPathname: tea.String(\"/\"),\n\t\tMethod: tea.String(\"POST\"),\n\t\tAuthType: tea.String(\"AK\"),\n\t\tStyle: tea.String(\"RPC\"),\n\t\tReqBodyType: tea.String(\"formData\"),\n\t\tBodyType: tea.String(\"json\"),\n\t}\n\t_result = &BeeBotAssociateResponse{}\n\t_body, _err := client.CallApi(params, req, runtime)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\t_err = tea.Convert(_body, &_result)\n\treturn _result, _err\n}", "title": "" }, { "docid": "064bc19a34980fd4be623788a871a6d3", "score": "0.41388655", "text": "func (ac *ActivitiesCreate) SetAcademicyearID(id int) *ActivitiesCreate {\n\tac.mutation.SetAcademicyearID(id)\n\treturn ac\n}", "title": "" }, { "docid": "94372735deb3e5f751b4dbb6e7c72c86", "score": "0.41299734", "text": "func withResourceID(id int) resourceOption {\n\treturn func(m *ResourceMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Resource\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Resource, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Resource.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "ecca9d1da6bfc3a763883dd50cd9063a", "score": "0.41263777", "text": "func (h *Handler) approve(c echo.Context) (e error) {\n\tvar r approveRequest\n\tvar id int64\n\tvar fexpense *model.FinanceExpense\n\tctx := c.(*cuxs.Context)\n\tif id, e = common.Decrypt(ctx.Param(\"id\")); e == nil {\n\t\tif fexpense, e = ShowFinanceExpense(\"id\", id); e == nil {\n\t\t\tr.FinanceExpense = fexpense\n\t\t\tif e = ctx.Bind(&r); e == nil {\n\t\t\t\tif e = ApproveExpense(fexpense); e == nil {\n\t\t\t\t\tctx.Data(fexpense)\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\te = echo.ErrNotFound\n\t\t}\n\t}\n\n\treturn ctx.Serve(e)\n}", "title": "" }, { "docid": "1f966b30bf8a133a638cd1e77b73025a", "score": "0.41111997", "text": "func WithID(id string) Config {\n\treturn func(r *RequestOptions) {\n\t\tr.ID = id\n\t}\n}", "title": "" }, { "docid": "e237548fb00582a889e4180821072106", "score": "0.4099564", "text": "func AuthorizeID(allowed spiffeid.ID) Authorizer {\n\treturn AdaptMatcher(spiffeid.MatchID(allowed))\n}", "title": "" }, { "docid": "895fc3991f7e1cd17013eeff459be5dc", "score": "0.40904194", "text": "func ClaimAs(id uuid.UUID) ClaimOpt {\n\treturn func(q *ClaimQuery) {\n\t\tq.Claimant = id\n\t}\n}", "title": "" }, { "docid": "02b242705549e79b413a71d8d4b1e3a8", "score": "0.40854678", "text": "func (f *Funding) Withdraw(ctx context.Context, aclass AssetsClass, asset asset.Currency, key string, amount float64) (res *WithdrawResponse, err error) {\n\tbody := url.Values{\n\t\t\"aclass\": {string(aclass)},\n\t\t\"asset\": {asset.String()},\n\t\t\"key\": {key},\n\t\t\"amount\": {strconv.FormatFloat(amount, 'f', 4, 64)},\n\t}\n\n\treq, err := f.Client.DialWithAuth(ctx, http.MethodPost, WithdrawResource, body)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tkrakenResp, err := f.Client.Call(req)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = krakenResp.ExtractResult(&res)\n\treturn\n}", "title": "" }, { "docid": "acf0f75a55ec758b1ae7c0e202d8a0ed", "score": "0.40823272", "text": "func WithJwtID(s string) ValidateOption {\n\treturn newValidateOption(optkeyJwtid, s)\n}", "title": "" }, { "docid": "82713c1e54e8828353a64324f6d0ceae", "score": "0.40735334", "text": "func (o *DeleteAPIParams) WithTid(tid string) *DeleteAPIParams {\n\to.SetTid(tid)\n\treturn o\n}", "title": "" }, { "docid": "56ccd5815eead3d870d7e307cabb1b66", "score": "0.40691182", "text": "func withAccountID(id uuid.UUID) accountOption {\n\treturn func(m *AccountMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Account\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Account, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Account.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "2379a34da073c1b4370cdbc691c1e3c2", "score": "0.4060681", "text": "func NewRevokeTokenParamsWithTimeout(timeout time.Duration) *RevokeTokenParams {\n\tvar ()\n\treturn &RevokeTokenParams{\n\n\t\ttimeout: timeout,\n\t}\n}", "title": "" }, { "docid": "495dc08abbad428dee946bf0e11352df", "score": "0.40544152", "text": "func withAPIKey(fn http.HandlerFunc) http.HandlerFunc {\n\n\t// return function performs a check for the key query parameter by calling isValidAPIKey\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tkey := r.URL.Query().Get(\"key\")\n\t\tif !isValidAPIKey(key) {\n\t\t\trespondErr(w, r, http.StatusUnauthorized, \"invalid API key\")\n\t\t\treturn\n\t\t}\n\t\t// otherwise we put the key into the context and call the next handler\n\t\tctx := context.WithValue(r.Context(),\n\t\t\tcontextKeyAPIKey, key)\n\t\tfn(w, r.WithContext(ctx))\n\t}\n}", "title": "" }, { "docid": "7f4175ef68798ee4dad105f6c27729f2", "score": "0.4053711", "text": "func (a *Client) OptionsLeasesIDAuth(params *OptionsLeasesIDAuthParams) (*OptionsLeasesIDAuthOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewOptionsLeasesIDAuthParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"OptionsLeasesIDAuth\",\n\t\tMethod: \"OPTIONS\",\n\t\tPathPattern: \"/leases/{id}/auth\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &OptionsLeasesIDAuthReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*OptionsLeasesIDAuthOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for OptionsLeasesIDAuth: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "title": "" }, { "docid": "89b1b5479dc82ee53aaac5b70defcd73", "score": "0.40436137", "text": "func WithProjectID(id string) func(o *options) {\n\treturn func(o *options) {\n\t\to.projectID = id\n\t}\n}", "title": "" }, { "docid": "58754fb977bcbf7f8adb3f82557330ed", "score": "0.40374073", "text": "func withKeysID(id string) keysOption {\n\treturn func(m *KeysMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Keys\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Keys, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = errors.New(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Keys.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "175e5a29e8f2168a0f845f345897ead8", "score": "0.40372434", "text": "func (client *SnapshotsClient) revokeAccessCreateRequest(ctx context.Context, resourceGroupName string, snapshotName string, options *SnapshotsClientBeginRevokeAccessOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/endGetAccess\"\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif snapshotName == \"\" {\n\t\treturn nil, errors.New(\"parameter snapshotName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{snapshotName}\", url.PathEscape(snapshotName))\n\treq, err := runtime.NewRequest(ctx, http.MethodPost, runtime.JoinPaths(client.host, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2022-07-02\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treturn req, nil\n}", "title": "" }, { "docid": "93be83943dd6792f918acdcde65d9e93", "score": "0.4035941", "text": "func (o *RevokeParams) SetTimeout(timeout time.Duration) {\n\to.timeout = timeout\n}", "title": "" }, { "docid": "a6f1ce2ab7f8ec6a017a9489bb83a3e7", "score": "0.40347004", "text": "func withDegreeID(id int) degreeOption {\n\treturn func(m *DegreeMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Degree\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Degree, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Degree.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "872663114a90d7d7c7594c429afa11ed", "score": "0.40340737", "text": "func (m *ApplicationItemRequestBuilder) TokenIssuancePoliciesById(id string)(*id868d42838b59366f189543d6b65b78e527af17acc61cbc962cae87c3c961e40.TokenIssuancePolicyItemRequestBuilder) {\n urlTplParams := make(map[string]string)\n for idx, item := range m.pathParameters {\n urlTplParams[idx] = item\n }\n if id != \"\" {\n urlTplParams[\"tokenIssuancePolicy%2Did\"] = id\n }\n return id868d42838b59366f189543d6b65b78e527af17acc61cbc962cae87c3c961e40.NewTokenIssuancePolicyItemRequestBuilderInternal(urlTplParams, m.requestAdapter);\n}", "title": "" }, { "docid": "3c39dd99f83940ab6965eeeef789d208", "score": "0.40329278", "text": "func (o *UnfavoriteARecipeByIDParams) WithContext(ctx context.Context) *UnfavoriteARecipeByIDParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "a5682e239e04be8e15c43a1cb4cfd26f", "score": "0.40308478", "text": "func (client *PolicyFragmentClient) deleteCreateRequest(ctx context.Context, resourceGroupName string, serviceName string, id string, ifMatch string, options *PolicyFragmentClientDeleteOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/policyFragments/{id}\"\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif serviceName == \"\" {\n\t\treturn nil, errors.New(\"parameter serviceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{serviceName}\", url.PathEscape(serviceName))\n\tif id == \"\" {\n\t\treturn nil, errors.New(\"parameter id cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{id}\", url.PathEscape(id))\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\treq, err := runtime.NewRequest(ctx, http.MethodDelete, runtime.JoinPaths(client.internal.Endpoint(), urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2022-08-01\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header[\"If-Match\"] = []string{ifMatch}\n\treq.Raw().Header[\"Accept\"] = []string{\"application/json\"}\n\treturn req, nil\n}", "title": "" }, { "docid": "3b600df68e221d2a5d3fb77ab4b37979", "score": "0.40307397", "text": "func (client *DisksClient) revokeAccessCreateRequest(ctx context.Context, resourceGroupName string, diskName string, options *DisksClientBeginRevokeAccessOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/disks/{diskName}/endGetAccess\"\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif diskName == \"\" {\n\t\treturn nil, errors.New(\"parameter diskName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{diskName}\", url.PathEscape(diskName))\n\treq, err := runtime.NewRequest(ctx, http.MethodPost, runtime.JoinPaths(client.host, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2022-07-02\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treturn req, nil\n}", "title": "" }, { "docid": "031ea36916073afc39afea0c2f286a27", "score": "0.4021791", "text": "func (c *FirstAndThirdPartyAudiencesPatchCall) AdvertiserId(advertiserId int64) *FirstAndThirdPartyAudiencesPatchCall {\n\tc.urlParams_.Set(\"advertiserId\", fmt.Sprint(advertiserId))\n\treturn c\n}", "title": "" }, { "docid": "ed1bcb10b229035e7a5f158f11be785b", "score": "0.4020049", "text": "func WithAudience(audience string) Option {\n\treturn func(opts *options) {\n\t\topts.audience = audience\n\t}\n}", "title": "" }, { "docid": "660deb475e5303599fdc4360513990c2", "score": "0.40166375", "text": "func withRefreshTokenID(id string) refreshtokenOption {\n\treturn func(m *RefreshTokenMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *RefreshToken\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*RefreshToken, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = errors.New(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().RefreshToken.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "title": "" }, { "docid": "22cccb97ee9e44d1966607fe6b07c3f2", "score": "0.40111122", "text": "func expireLeaseStrategyRevoke(ctx context.Context, m *ExpirationManager, le *leaseEntry) {\n\tfor attempt := uint(0); attempt < maxRevokeAttempts; attempt++ {\n\t\trevokeCtx, cancel := context.WithTimeout(ctx, DefaultMaxRequestDuration)\n\t\trevokeCtx = namespace.ContextWithNamespace(revokeCtx, le.namespace)\n\n\t\tgo func() {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\tcase <-m.quitCh:\n\t\t\t\tcancel()\n\t\t\tcase <-revokeCtx.Done():\n\t\t\t}\n\t\t}()\n\n\t\tselect {\n\t\tcase <-m.quitCh:\n\t\t\tm.logger.Error(\"shutting down, not attempting further revocation of lease\", \"lease_id\", le.LeaseID)\n\t\t\tcancel()\n\t\t\treturn\n\t\tcase <-m.quitContext.Done():\n\t\t\tm.logger.Error(\"core context canceled, not attempting further revocation of lease\", \"lease_id\", le.LeaseID)\n\t\t\tcancel()\n\t\t\treturn\n\t\tdefault:\n\t\t}\n\n\t\tm.coreStateLock.RLock()\n\t\terr := m.Revoke(revokeCtx, le.LeaseID)\n\t\tm.coreStateLock.RUnlock()\n\t\tcancel()\n\t\tif err == nil {\n\t\t\treturn\n\t\t}\n\n\t\tm.logger.Error(\"failed to revoke lease\", \"lease_id\", le.LeaseID, \"error\", err)\n\t\ttime.Sleep((1 << attempt) * revokeRetryBase)\n\t}\n\tm.logger.Error(\"maximum revoke attempts reached\", \"lease_id\", le.LeaseID)\n}", "title": "" }, { "docid": "071bc7a04fe74e1f2f5fda91a522bab3", "score": "0.39976352", "text": "func (c *Client) Revoke(req *api.RevokeRequest, tr http.RoundTripper) (*api.RevokeResponse, error) {\n\treturn c.RevokeWithContext(context.Background(), req, tr)\n}", "title": "" }, { "docid": "5d41aa8f2cbe875166aed45392dc286e", "score": "0.39929712", "text": "func (a *Adapter) WithControllerID(id string) *Adapter {\n\ta.controllerID = id\n\treturn a\n}", "title": "" }, { "docid": "a88c3d42370f86a9ffe1677a6f753b52", "score": "0.39925", "text": "func (client *PeerAsnsClient) deleteCreateRequest(ctx context.Context, peerAsnName string, options *PeerAsnsClientDeleteOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/providers/Microsoft.Peering/peerAsns/{peerAsnName}\"\n\tif peerAsnName == \"\" {\n\t\treturn nil, errors.New(\"parameter peerAsnName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{peerAsnName}\", url.PathEscape(peerAsnName))\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\treq, err := runtime.NewRequest(ctx, http.MethodDelete, runtime.JoinPaths(client.internal.Endpoint(), urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2022-01-01\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header[\"Accept\"] = []string{\"application/json\"}\n\treturn req, nil\n}", "title": "" }, { "docid": "13c2a6916ac766a3852884a675c11f9b", "score": "0.39909655", "text": "func (client *GenericClient) createOrUpdateByIDCreateRequest(\n\tctx context.Context,\n\tresourceID string,\n\tapiVersion string,\n\tresource interface{}) (*policy.Request, error) {\n\n\tif resourceID == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceID cannot be empty\")\n\t}\n\n\turlPath := resourceID\n\treq, err := runtime.NewRequest(ctx, http.MethodPut, runtime.JoinPaths(client.endpoint, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", apiVersion)\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header.Set(\"Accept\", \"application/json\")\n\treturn req, runtime.MarshalAsJSON(req, resource)\n}", "title": "" }, { "docid": "0cf4ade49d18b8915fe678eb7bb4b8f1", "score": "0.39907706", "text": "func (c Client) Withdrawal(ctx context.Context, accID vos.AccountID, amount vos.Money) error {\n\tconst operation = \"accounts.Client.Withdrawal\"\n\t_, err := c.client.Withdrawal(ctx, &Request{\n\t\tAccountID: accID.String(),\n\t\tAmount: amount.Int64(),\n\t})\n\tif err != nil {\n\t\treturn parseServerErr(operation, err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "11a5f51ffb8338fa4a47eaf6358bb2f7", "score": "0.39879304", "text": "func (a *ReservationActionsApiService) BookingReservationActionsByIdAmendforcePut(ctx _context.Context, id string) ApiBookingReservationActionsByIdAmendforcePutRequest {\n\treturn ApiBookingReservationActionsByIdAmendforcePutRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tid: id,\n\t}\n}", "title": "" } ]
b28f68389e6612ee7def520851dd3db0
NewSparseProcessingUnitRefresh returns a new SparseProcessingUnitRefresh.
[ { "docid": "bc286d7483ea30341b2c85d35d3c49af", "score": "0.88993704", "text": "func NewSparseProcessingUnitRefresh() *SparseProcessingUnitRefresh {\n\treturn &SparseProcessingUnitRefresh{}\n}", "title": "" } ]
[ { "docid": "68f6ed5778ed924ff4e97078a392808c", "score": "0.69863105", "text": "func (o *ProcessingUnitRefresh) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseProcessingUnitRefresh{\n\t\t\tID: &o.ID,\n\t\t\tDebug: &o.Debug,\n\t\t\tNamespace: &o.Namespace,\n\t\t\tPingAddress: &o.PingAddress,\n\t\t\tPingEnabled: &o.PingEnabled,\n\t\t\tPingIterations: &o.PingIterations,\n\t\t\tPingMode: &o.PingMode,\n\t\t\tPingPort: &o.PingPort,\n\t\t\tRefreshID: &o.RefreshID,\n\t\t\tRefreshPolicy: &o.RefreshPolicy,\n\t\t\tTraceApplicationConnections: &o.TraceApplicationConnections,\n\t\t\tTraceDuration: &o.TraceDuration,\n\t\t\tTraceIPTables: &o.TraceIPTables,\n\t\t\tTraceNetworkConnections: &o.TraceNetworkConnections,\n\t\t}\n\t}\n\n\tsp := &SparseProcessingUnitRefresh{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"ID\":\n\t\t\tsp.ID = &(o.ID)\n\t\tcase \"debug\":\n\t\t\tsp.Debug = &(o.Debug)\n\t\tcase \"namespace\":\n\t\t\tsp.Namespace = &(o.Namespace)\n\t\tcase \"pingAddress\":\n\t\t\tsp.PingAddress = &(o.PingAddress)\n\t\tcase \"pingEnabled\":\n\t\t\tsp.PingEnabled = &(o.PingEnabled)\n\t\tcase \"pingIterations\":\n\t\t\tsp.PingIterations = &(o.PingIterations)\n\t\tcase \"pingMode\":\n\t\t\tsp.PingMode = &(o.PingMode)\n\t\tcase \"pingPort\":\n\t\t\tsp.PingPort = &(o.PingPort)\n\t\tcase \"refreshID\":\n\t\t\tsp.RefreshID = &(o.RefreshID)\n\t\tcase \"refreshPolicy\":\n\t\t\tsp.RefreshPolicy = &(o.RefreshPolicy)\n\t\tcase \"traceApplicationConnections\":\n\t\t\tsp.TraceApplicationConnections = &(o.TraceApplicationConnections)\n\t\tcase \"traceDuration\":\n\t\t\tsp.TraceDuration = &(o.TraceDuration)\n\t\tcase \"traceIPTables\":\n\t\t\tsp.TraceIPTables = &(o.TraceIPTables)\n\t\tcase \"traceNetworkConnections\":\n\t\t\tsp.TraceNetworkConnections = &(o.TraceNetworkConnections)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "3e49d5c6b5da26e4d1d89c94ed334ef0", "score": "0.68048805", "text": "func (o *SparseProcessingUnitRefresh) DeepCopy() *SparseProcessingUnitRefresh {\n\n\tif o == nil {\n\t\treturn nil\n\t}\n\n\tout := &SparseProcessingUnitRefresh{}\n\to.DeepCopyInto(out)\n\n\treturn out\n}", "title": "" }, { "docid": "d86a95c2b4a01d455e488b036eb1fd1b", "score": "0.6312328", "text": "func (o ProcessingUnitRefreshsList) ToSparse(fields ...string) elemental.Identifiables {\n\n\tout := make(SparseProcessingUnitRefreshsList, len(o))\n\tfor i := 0; i < len(o); i++ {\n\t\tout[i] = o[i].ToSparse(fields...).(*SparseProcessingUnitRefresh)\n\t}\n\n\treturn out\n}", "title": "" }, { "docid": "1cd48c7eb800ddca68c6aa98f5259fda", "score": "0.62215143", "text": "func NewProcessingUnitRefresh() *ProcessingUnitRefresh {\n\n\treturn &ProcessingUnitRefresh{\n\t\tModelVersion: 1,\n\t\tPingIterations: 1,\n\t\tPingMode: ProcessingUnitRefreshPingModeAuto,\n\t\tTraceDuration: \"10s\",\n\t}\n}", "title": "" }, { "docid": "78ee1fa8cafd86c3c2eecc6ba04dec32", "score": "0.5612742", "text": "func (o *ProcessingUnitRefresh) Patch(sparse elemental.SparseIdentifiable) {\n\tif !sparse.Identity().IsEqual(o.Identity()) {\n\t\tpanic(\"cannot patch from a parse with different identity\")\n\t}\n\n\tso := sparse.(*SparseProcessingUnitRefresh)\n\tif so.ID != nil {\n\t\to.ID = *so.ID\n\t}\n\tif so.Debug != nil {\n\t\to.Debug = *so.Debug\n\t}\n\tif so.Namespace != nil {\n\t\to.Namespace = *so.Namespace\n\t}\n\tif so.PingAddress != nil {\n\t\to.PingAddress = *so.PingAddress\n\t}\n\tif so.PingEnabled != nil {\n\t\to.PingEnabled = *so.PingEnabled\n\t}\n\tif so.PingIterations != nil {\n\t\to.PingIterations = *so.PingIterations\n\t}\n\tif so.PingMode != nil {\n\t\to.PingMode = *so.PingMode\n\t}\n\tif so.PingPort != nil {\n\t\to.PingPort = *so.PingPort\n\t}\n\tif so.RefreshID != nil {\n\t\to.RefreshID = *so.RefreshID\n\t}\n\tif so.RefreshPolicy != nil {\n\t\to.RefreshPolicy = *so.RefreshPolicy\n\t}\n\tif so.TraceApplicationConnections != nil {\n\t\to.TraceApplicationConnections = *so.TraceApplicationConnections\n\t}\n\tif so.TraceDuration != nil {\n\t\to.TraceDuration = *so.TraceDuration\n\t}\n\tif so.TraceIPTables != nil {\n\t\to.TraceIPTables = *so.TraceIPTables\n\t}\n\tif so.TraceNetworkConnections != nil {\n\t\to.TraceNetworkConnections = *so.TraceNetworkConnections\n\t}\n}", "title": "" }, { "docid": "a3120e513a3fac12c9790ae1e76e9f6e", "score": "0.5446559", "text": "func NewSparseHit() *SparseHit {\n\treturn &SparseHit{}\n}", "title": "" }, { "docid": "743eaf5c4b6053e3a888df033cd3d8f7", "score": "0.5345004", "text": "func (o *SparseProcessingUnitRefresh) ToPlain() elemental.PlainIdentifiable {\n\n\tout := NewProcessingUnitRefresh()\n\tif o.ID != nil {\n\t\tout.ID = *o.ID\n\t}\n\tif o.Debug != nil {\n\t\tout.Debug = *o.Debug\n\t}\n\tif o.Namespace != nil {\n\t\tout.Namespace = *o.Namespace\n\t}\n\tif o.PingAddress != nil {\n\t\tout.PingAddress = *o.PingAddress\n\t}\n\tif o.PingEnabled != nil {\n\t\tout.PingEnabled = *o.PingEnabled\n\t}\n\tif o.PingIterations != nil {\n\t\tout.PingIterations = *o.PingIterations\n\t}\n\tif o.PingMode != nil {\n\t\tout.PingMode = *o.PingMode\n\t}\n\tif o.PingPort != nil {\n\t\tout.PingPort = *o.PingPort\n\t}\n\tif o.RefreshID != nil {\n\t\tout.RefreshID = *o.RefreshID\n\t}\n\tif o.RefreshPolicy != nil {\n\t\tout.RefreshPolicy = *o.RefreshPolicy\n\t}\n\tif o.TraceApplicationConnections != nil {\n\t\tout.TraceApplicationConnections = *o.TraceApplicationConnections\n\t}\n\tif o.TraceDuration != nil {\n\t\tout.TraceDuration = *o.TraceDuration\n\t}\n\tif o.TraceIPTables != nil {\n\t\tout.TraceIPTables = *o.TraceIPTables\n\t}\n\tif o.TraceNetworkConnections != nil {\n\t\tout.TraceNetworkConnections = *o.TraceNetworkConnections\n\t}\n\n\treturn out\n}", "title": "" }, { "docid": "81facc1f4262a38962c3c4c95d731365", "score": "0.5304566", "text": "func NewSparseAlarm() *SparseAlarm {\n\treturn &SparseAlarm{}\n}", "title": "" }, { "docid": "e7909a93e6333e9f301833cd3f1fcab5", "score": "0.52352023", "text": "func NewSparsePlan() *SparsePlan {\n\treturn &SparsePlan{}\n}", "title": "" }, { "docid": "ee580a18cba60572dbb433bb7692845e", "score": "0.51756245", "text": "func NewSparseMessage() *SparseMessage {\n\treturn &SparseMessage{}\n}", "title": "" }, { "docid": "52437d89917b0e90208d92a202ea84ad", "score": "0.5057369", "text": "func NewSparseSolverC(kind string) SparseSolverC {\n\tif maker, ok := spSolverDBc[kind]; ok {\n\t\treturn maker()\n\t}\n\tchk.Panic(\"cannot find SparseSolverC named %q in database\", kind)\n\treturn nil\n}", "title": "" }, { "docid": "2a23d0d57c4fd58aeba8205dc8ea51c8", "score": "0.49992323", "text": "func NewSparseServicePublication() *SparseServicePublication {\n\treturn &SparseServicePublication{}\n}", "title": "" }, { "docid": "9a40a9621f7b2789e7f84c8760551654", "score": "0.49896845", "text": "func NewSparse(n int, cMap map[int]map[int]complex128, doCopy bool) MutableMatrix {\n\tif cMap == nil {\n\t\treturn &sparseMatrix{n, make(map[int]map[int]complex128)}\n\t}\n\n\tif doCopy {\n\t\t_cMap := make(map[int]map[int]complex128)\n\t\tfor i, c_i := range cMap {\n\t\t\t_c_i := make(map[int]complex128)\n\t\t\tfor j, c_ij := range c_i {\n\t\t\t\t_c_i[j] = c_ij\n\t\t\t}\n\t\t\t_cMap[i] = _c_i\n\t\t}\n\t\treturn &sparseMatrix{n, _cMap}\n\n\t} else {\n\t\treturn &sparseMatrix{n, cMap}\n\t}\n}", "title": "" }, { "docid": "ec17fddde79c0e3bf58a57a265e37915", "score": "0.49871367", "text": "func NewSparseNamespaceRenderer() *SparseNamespaceRenderer {\n\treturn &SparseNamespaceRenderer{}\n}", "title": "" }, { "docid": "754b6ca9ed54e73028198efa22b9b54c", "score": "0.4930907", "text": "func NewSparseDeviceSet(s int) *DeviceSet {\n\treturn &DeviceSet{\n\t\tSetSize: s,\n\t\tDevices: make([]*DeviceEntry, s),\n\t}\n}", "title": "" }, { "docid": "21716fbf4f872e3672d0ba4030dd4a4e", "score": "0.48753428", "text": "func NewSparseSolver(kind string) SparseSolver {\n\tif maker, ok := spSolverDB[kind]; ok {\n\t\treturn maker()\n\t}\n\tchk.Panic(\"cannot find SparseSolver named %q in database\", kind)\n\treturn nil\n}", "title": "" }, { "docid": "4ba7e45e7ccb56e47cfeac3372ed3a90", "score": "0.48744202", "text": "func NewSparseFileAccessReport() *SparseFileAccessReport {\n\treturn &SparseFileAccessReport{}\n}", "title": "" }, { "docid": "e0bde6d3732af9af54d5b28b70f5b842", "score": "0.4869792", "text": "func NewSparseLocalCA() *SparseLocalCA {\n\treturn &SparseLocalCA{}\n}", "title": "" }, { "docid": "d3c81519db94e73fb483254c9b7544d9", "score": "0.48634717", "text": "func NewSparseFlowReport() *SparseFlowReport {\n\treturn &SparseFlowReport{}\n}", "title": "" }, { "docid": "7fe455981367398d4c83dc7d70505fbc", "score": "0.48584005", "text": "func NewSparseAccessReport() *SparseAccessReport {\n\treturn &SparseAccessReport{}\n}", "title": "" }, { "docid": "11e5e6d0a47074d3cd47012481d769a9", "score": "0.479424", "text": "func NewSparseAWSRegister() *SparseAWSRegister {\n\treturn &SparseAWSRegister{}\n}", "title": "" }, { "docid": "f7293b079f6eeb6ea4be7d95fc23f64b", "score": "0.47898373", "text": "func (o *ProcessingUnitRefresh) DeepCopy() *ProcessingUnitRefresh {\n\n\tif o == nil {\n\t\treturn nil\n\t}\n\n\tout := &ProcessingUnitRefresh{}\n\to.DeepCopyInto(out)\n\n\treturn out\n}", "title": "" }, { "docid": "aa4483ceea88b78868f94b63aec3251f", "score": "0.47011372", "text": "func NewSparseActivate() *SparseActivate {\n\treturn &SparseActivate{}\n}", "title": "" }, { "docid": "1730a02284f8effd0db8f3d399c4f1ac", "score": "0.46784636", "text": "func (o *SparseProcessingUnitRefresh) Identity() elemental.Identity {\n\n\treturn ProcessingUnitRefreshIdentity\n}", "title": "" }, { "docid": "1739a70f56d6255ea6e872c278c274e0", "score": "0.4663941", "text": "func (o *SparseProcessingUnitRefresh) Version() int {\n\n\treturn 1\n}", "title": "" }, { "docid": "a92054b117549677f3bd15fa79f8fd4e", "score": "0.4641868", "text": "func (o SparseProcessingUnitRefreshsList) Copy() elemental.Identifiables {\n\n\tcopy := append(SparseProcessingUnitRefreshsList{}, o...)\n\treturn &copy\n}", "title": "" }, { "docid": "32527640e4250f26a8c8eac4eab807d6", "score": "0.45978403", "text": "func (o *ServicePublication) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseServicePublication{\n\t\t\tService: o.Service,\n\t\t}\n\t}\n\n\tsp := &SparseServicePublication{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"service\":\n\t\t\tsp.Service = o.Service\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "74ae2e9db50343ce9fde0705b6ef0c69", "score": "0.45950916", "text": "func NewSparseMax(x Operand) *SparseMax {\n\treturn &SparseMax{x: x}\n}", "title": "" }, { "docid": "d2d7409dfd9c30335b410911053de28c", "score": "0.45427543", "text": "func newStationsFull(vres *stationviews.StationsFullView) *StationsFull {\n\tres := &StationsFull{}\n\tif vres.Stations != nil {\n\t\tres.Stations = newStationFullCollection(vres.Stations)\n\t}\n\treturn res\n}", "title": "" }, { "docid": "c40a1ff5d71ecb9f723b35d02d6e66d3", "score": "0.45064145", "text": "func NewSparseBrickSet(s int) *BrickSet {\n\treturn &BrickSet{\n\t\tSetSize: s,\n\t\tBricks: make([]*BrickEntry, s),\n\t}\n}", "title": "" }, { "docid": "22eb786181cff0652443a4e51ee4991a", "score": "0.45011982", "text": "func newStationFull(vres *stationviews.StationFullView) *StationFull {\n\tres := &StationFull{\n\t\tBattery: vres.Battery,\n\t\tRecordingStartedAt: vres.RecordingStartedAt,\n\t\tMemoryUsed: vres.MemoryUsed,\n\t\tMemoryAvailable: vres.MemoryAvailable,\n\t\tFirmwareNumber: vres.FirmwareNumber,\n\t\tFirmwareTime: vres.FirmwareTime,\n\t\tLocationName: vres.LocationName,\n\t}\n\tif vres.ID != nil {\n\t\tres.ID = *vres.ID\n\t}\n\tif vres.Name != nil {\n\t\tres.Name = *vres.Name\n\t}\n\tif vres.DeviceID != nil {\n\t\tres.DeviceID = *vres.DeviceID\n\t}\n\tif vres.ReadOnly != nil {\n\t\tres.ReadOnly = *vres.ReadOnly\n\t}\n\tif vres.Updated != nil {\n\t\tres.Updated = *vres.Updated\n\t}\n\tif vres.Owner != nil {\n\t\tres.Owner = transformStationviewsStationOwnerViewToStationOwner(vres.Owner)\n\t}\n\tif vres.Uploads != nil {\n\t\tres.Uploads = make([]*StationUpload, len(vres.Uploads))\n\t\tfor i, val := range vres.Uploads {\n\t\t\tres.Uploads[i] = transformStationviewsStationUploadViewToStationUpload(val)\n\t\t}\n\t}\n\tif vres.Images != nil {\n\t\tres.Images = make([]*ImageRef, len(vres.Images))\n\t\tfor i, val := range vres.Images {\n\t\t\tres.Images[i] = transformStationviewsImageRefViewToImageRef(val)\n\t\t}\n\t}\n\tif vres.Photos != nil {\n\t\tres.Photos = transformStationviewsStationPhotosViewToStationPhotos(vres.Photos)\n\t}\n\tif vres.Configurations != nil {\n\t\tres.Configurations = transformStationviewsStationConfigurationsViewToStationConfigurations(vres.Configurations)\n\t}\n\tif vres.Location != nil {\n\t\tres.Location = transformStationviewsStationLocationViewToStationLocation(vres.Location)\n\t}\n\treturn res\n}", "title": "" }, { "docid": "55f325b7a651bc652d6014936197a920", "score": "0.44987682", "text": "func (s *ssTable) rebuildSparseIndex() {\n\ts.index = rbt.NewRBTree()\n\n\tfile, err := os.OpenFile(s.config.filename, os.O_RDONLY, 0600)\n\tif err != nil {\n\t\tlog.Printf(\"[ERROR]: Can't read sstable file=%s, err:%v\", s.config.filename, err)\n\t\treturn\n\t}\n\tdefer file.Close()\n\n\tscanner := newBinFileScanner(file, s.config.readBufferSize)\n\n\toffset := 0\n\tpreviousKeyOffset := 0\n\n\tfor scanner.Scan() {\n\t\tentry, _ := entry.NewDBEntry(scanner.Bytes())\n\n\t\tif s.index.Size() == 0 || offset-previousKeyOffset > s.config.readBufferSize {\n\t\t\ts.index.Put(entry.Key, offset)\n\t\t\tpreviousKeyOffset = offset\n\t\t}\n\t\toffset += entry.Length()\n\t}\n}", "title": "" }, { "docid": "13a4ba6b8dcd96089ac44e571c6b1299", "score": "0.4489577", "text": "func NewSparseGraph(n int, directed bool) *SparseGraph {\n\tg := make([]List, n)\n\tfor i := 0; i < n; i++ {\n\t\tg[i] = *NewList()\n\t}\n\treturn &SparseGraph{\n\t\tvertexNum: n,\n\t\tedgeNum: 0,\n\t\tdirected: directed,\n\t\tg: g,\n\t}\n}", "title": "" }, { "docid": "6bc8735b9fbfb754fdfaf4ddb08fac50", "score": "0.445133", "text": "func NewSparseIPInfo() *SparseIPInfo {\n\treturn &SparseIPInfo{}\n}", "title": "" }, { "docid": "112f41610f254f02b16a761b0cd144e1", "score": "0.4445938", "text": "func NewSparseCloudAccountCleaner() *SparseCloudAccountCleaner {\n\treturn &SparseCloudAccountCleaner{}\n}", "title": "" }, { "docid": "689e43f4181b1c67aaca969abfa3c581", "score": "0.44327402", "text": "func (o SparseProcessingUnitRefreshsList) Identity() elemental.Identity {\n\n\treturn ProcessingUnitRefreshIdentity\n}", "title": "" }, { "docid": "6033178044f7f06bb4a0ff2436e399d7", "score": "0.4422113", "text": "func (o *FileAccessReport) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseFileAccessReport{\n\t\t\tID: &o.ID,\n\t\t\tAction: &o.Action,\n\t\t\tHost: &o.Host,\n\t\t\tMigrationsLog: &o.MigrationsLog,\n\t\t\tMode: &o.Mode,\n\t\t\tPath: &o.Path,\n\t\t\tProcessingUnitID: &o.ProcessingUnitID,\n\t\t\tProcessingUnitNamespace: &o.ProcessingUnitNamespace,\n\t\t\tTimestamp: &o.Timestamp,\n\t\t\tZHash: &o.ZHash,\n\t\t\tZone: &o.Zone,\n\t\t}\n\t}\n\n\tsp := &SparseFileAccessReport{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"ID\":\n\t\t\tsp.ID = &(o.ID)\n\t\tcase \"action\":\n\t\t\tsp.Action = &(o.Action)\n\t\tcase \"host\":\n\t\t\tsp.Host = &(o.Host)\n\t\tcase \"migrationsLog\":\n\t\t\tsp.MigrationsLog = &(o.MigrationsLog)\n\t\tcase \"mode\":\n\t\t\tsp.Mode = &(o.Mode)\n\t\tcase \"path\":\n\t\t\tsp.Path = &(o.Path)\n\t\tcase \"processingUnitID\":\n\t\t\tsp.ProcessingUnitID = &(o.ProcessingUnitID)\n\t\tcase \"processingUnitNamespace\":\n\t\t\tsp.ProcessingUnitNamespace = &(o.ProcessingUnitNamespace)\n\t\tcase \"timestamp\":\n\t\t\tsp.Timestamp = &(o.Timestamp)\n\t\tcase \"zHash\":\n\t\t\tsp.ZHash = &(o.ZHash)\n\t\tcase \"zone\":\n\t\t\tsp.Zone = &(o.Zone)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "fca014a3fdaa540bcba70208478c9980", "score": "0.44036627", "text": "func (o SparseProcessingUnitRefreshsList) Version() int {\n\n\treturn 1\n}", "title": "" }, { "docid": "a22a9ca789a8b7bf51c29f6605fc7607", "score": "0.44028988", "text": "func NewSparseAccount() *SparseAccount {\n\treturn &SparseAccount{}\n}", "title": "" }, { "docid": "4ead702172b2d99b5eacd48932be8440", "score": "0.43791044", "text": "func newTestStorage() *Storage {\n\tstreamIDCache := workingsetcache.New(1024 * 1024)\n\tstreamFilterCache := workingsetcache.New(1024 * 1024)\n\treturn &Storage{\n\t\tflushInterval: time.Second,\n\t\tstreamIDCache: streamIDCache,\n\t\tstreamFilterCache: streamFilterCache,\n\t}\n}", "title": "" }, { "docid": "7f09b254926dd85fa8ea8a7926321996", "score": "0.43696564", "text": "func New(provider storage.Provider) (*Store, error) {\n\tstore, err := provider.OpenStore(namespace)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to open vc-status store: %w\", err)\n\t}\n\n\terr = provider.SetStoreConfig(namespace, storage.StoreConfiguration{TagNames: []string{index}})\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to set store configuration: %w\", err)\n\t}\n\n\treturn &Store{\n\t\tstore: store,\n\t}, nil\n}", "title": "" }, { "docid": "4b57ed51bdcb4f5f396a29f00ed4cade", "score": "0.43349293", "text": "func NewSparseAuditReport() *SparseAuditReport {\n\treturn &SparseAuditReport{}\n}", "title": "" }, { "docid": "4b57ed51bdcb4f5f396a29f00ed4cade", "score": "0.43349293", "text": "func NewSparseAuditReport() *SparseAuditReport {\n\treturn &SparseAuditReport{}\n}", "title": "" }, { "docid": "70e2abb09b8deaae7c52b406017cbec4", "score": "0.4326083", "text": "func (o *LocalCA) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseLocalCA{\n\t\t\tSSHCertificate: &o.SSHCertificate,\n\t\t\tSSHCertificateRenew: &o.SSHCertificateRenew,\n\t\t\tCertificate: &o.Certificate,\n\t\t\tCertificateRenew: &o.CertificateRenew,\n\t\t}\n\t}\n\n\tsp := &SparseLocalCA{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"SSHCertificate\":\n\t\t\tsp.SSHCertificate = &(o.SSHCertificate)\n\t\tcase \"SSHCertificateRenew\":\n\t\t\tsp.SSHCertificateRenew = &(o.SSHCertificateRenew)\n\t\tcase \"certificate\":\n\t\t\tsp.Certificate = &(o.Certificate)\n\t\tcase \"certificateRenew\":\n\t\t\tsp.CertificateRenew = &(o.CertificateRenew)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "a367ab4025629ebb1b4b2e816f1796ab", "score": "0.43169355", "text": "func NewSparseBitSet() *SparseBitSet {\n\tb := &SparseBitSet{\n\t\tdata: make(map[int]uint64),\n\t}\n\treturn b\n}", "title": "" }, { "docid": "e93de742d8f81bb44deff9b1b949dd85", "score": "0.43097642", "text": "func (o SparseProcessingUnitRefreshsList) Append(objects ...elemental.Identifiable) elemental.Identifiables {\n\n\tout := append(SparseProcessingUnitRefreshsList{}, o...)\n\tfor _, obj := range objects {\n\t\tout = append(out, obj.(*SparseProcessingUnitRefresh))\n\t}\n\n\treturn out\n}", "title": "" }, { "docid": "0ae32e9cb9f08d4c0a360725a27ccb2a", "score": "0.4258812", "text": "func NewSecretSparse(id string, name string) *SecretSparse {\n\tthis := SecretSparse{}\n\tthis.Id = id\n\tthis.Name = name\n\treturn &this\n}", "title": "" }, { "docid": "c23a8e532096ac4885b362ba854112c3", "score": "0.42549974", "text": "func (o *AWSRegister) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseAWSRegister{\n\t\t\tID: &o.ID,\n\t\t\tCreateTime: &o.CreateTime,\n\t\t\tProvider: &o.Provider,\n\t\t\tUpdateTime: &o.UpdateTime,\n\t\t}\n\t}\n\n\tsp := &SparseAWSRegister{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"ID\":\n\t\t\tsp.ID = &(o.ID)\n\t\tcase \"createTime\":\n\t\t\tsp.CreateTime = &(o.CreateTime)\n\t\tcase \"provider\":\n\t\t\tsp.Provider = &(o.Provider)\n\t\tcase \"updateTime\":\n\t\t\tsp.UpdateTime = &(o.UpdateTime)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "c3408a1dda2b3e67abe77b5d32ce9d8a", "score": "0.42506453", "text": "func (dm *DagModifier) expandSparse(size int64) error {\n\tr := io.LimitReader(zeroReader{}, size)\n\tspl := chunker.NewSizeSplitter(r, 4096)\n\tnnode, err := dm.appendData(dm.curNode, spl)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = dm.dagserv.Add(dm.ctx, nnode)\n\treturn err\n}", "title": "" }, { "docid": "7f4d8b52a923300dad6b7184c3a6decf", "score": "0.42305508", "text": "func New(gpid uint64) *Snowflake {\n\tgpid &= 0xff\n\tsf := &Snowflake{\n\t\tstatems: 0,\n\t\tgpid: gpid,\n\t}\n\treturn sf\n}", "title": "" }, { "docid": "dbc2f4d9b0e80b36746881f8608d0171", "score": "0.42186573", "text": "func SparseUnionOf(fields []Field, typeCodes []UnionTypeCode) *SparseUnionType {\n\tret := &SparseUnionType{}\n\tif err := ret.validate(fields, typeCodes, ret.Mode()); err != nil {\n\t\tpanic(err)\n\t}\n\tret.init(fields, typeCodes)\n\treturn ret\n}", "title": "" }, { "docid": "73a2b115cdfe3e825282ad1d90a1d47c", "score": "0.41974258", "text": "func NewSystemLister(indexer cache.Indexer) SystemLister {\n\treturn &systemLister{indexer: indexer}\n}", "title": "" }, { "docid": "957d06ddd468527054f2fa6eb8579d36", "score": "0.41953924", "text": "func NewSparseR1CS(r1cs compiled.SparseR1CS, coefficients []big.Int) *SparseR1CS {\n\tcs := SparseR1CS{\n\t\tr1cs,\n\t\tmake([]fr.Element, len(coefficients)),\n\t}\n\tfor i := 0; i < len(coefficients); i++ {\n\t\tcs.Coefficients[i].SetBigInt(&coefficients[i])\n\t}\n\treturn &cs\n}", "title": "" }, { "docid": "f28abdb64cd17e44a5856839b9f18e8a", "score": "0.4195306", "text": "func (o *AccessReport) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseAccessReport{\n\t\t\tID: &o.ID,\n\t\t\tAction: &o.Action,\n\t\t\tClaimHash: &o.ClaimHash,\n\t\t\tEnforcerID: &o.EnforcerID,\n\t\t\tEnforcerNamespace: &o.EnforcerNamespace,\n\t\t\tMigrationsLog: &o.MigrationsLog,\n\t\t\tProcessingUnitID: &o.ProcessingUnitID,\n\t\t\tProcessingUnitName: &o.ProcessingUnitName,\n\t\t\tProcessingUnitNamespace: &o.ProcessingUnitNamespace,\n\t\t\tReason: &o.Reason,\n\t\t\tTimestamp: &o.Timestamp,\n\t\t\tType: &o.Type,\n\t\t\tZHash: &o.ZHash,\n\t\t\tZone: &o.Zone,\n\t\t}\n\t}\n\n\tsp := &SparseAccessReport{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"ID\":\n\t\t\tsp.ID = &(o.ID)\n\t\tcase \"action\":\n\t\t\tsp.Action = &(o.Action)\n\t\tcase \"claimHash\":\n\t\t\tsp.ClaimHash = &(o.ClaimHash)\n\t\tcase \"enforcerID\":\n\t\t\tsp.EnforcerID = &(o.EnforcerID)\n\t\tcase \"enforcerNamespace\":\n\t\t\tsp.EnforcerNamespace = &(o.EnforcerNamespace)\n\t\tcase \"migrationsLog\":\n\t\t\tsp.MigrationsLog = &(o.MigrationsLog)\n\t\tcase \"processingUnitID\":\n\t\t\tsp.ProcessingUnitID = &(o.ProcessingUnitID)\n\t\tcase \"processingUnitName\":\n\t\t\tsp.ProcessingUnitName = &(o.ProcessingUnitName)\n\t\tcase \"processingUnitNamespace\":\n\t\t\tsp.ProcessingUnitNamespace = &(o.ProcessingUnitNamespace)\n\t\tcase \"reason\":\n\t\t\tsp.Reason = &(o.Reason)\n\t\tcase \"timestamp\":\n\t\t\tsp.Timestamp = &(o.Timestamp)\n\t\tcase \"type\":\n\t\t\tsp.Type = &(o.Type)\n\t\tcase \"zHash\":\n\t\t\tsp.ZHash = &(o.ZHash)\n\t\tcase \"zone\":\n\t\t\tsp.Zone = &(o.Zone)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "80156d78849c3187bfacf7ede3650ee0", "score": "0.4186197", "text": "func newStationsFullView(res *StationsFull) *stationviews.StationsFullView {\n\tvres := &stationviews.StationsFullView{}\n\tif res.Stations != nil {\n\t\tvres.Stations = newStationFullCollectionView(res.Stations)\n\t}\n\treturn vres\n}", "title": "" }, { "docid": "831fbe08285c03b814a61aed8c780379", "score": "0.4176328", "text": "func New(num int) IOS {\n\tios := &ios{\n\t\tmpath2disks: make(map[string]FsDisks, num),\n\t\tdisk2mpath: make(cos.StrKVs, num),\n\t\tdisk2sysfn: make(cos.StrKVs, num),\n\t}\n\tfor i := 0; i < len(ios.cacheHst); i++ {\n\t\tios.cacheHst[i] = newCache(num)\n\t}\n\tios._put(ios.cacheHst[0])\n\tios.cacheIdx = 0\n\tios.busy.Store(false) // redundant on purpose\n\n\t// once (cleared via Clblk)\n\tif res := lsblk(\"new-ios\", true); res != nil {\n\t\tios.lsblk.Store(unsafe.Pointer(res))\n\t}\n\treturn ios\n}", "title": "" }, { "docid": "fe1c4e1431c61f9dca0013eefdf7de7e", "score": "0.41741276", "text": "func RunSparseArray() {\n\ttmpDir := temp(\"sparse_array\")\n\tdefer cleanup(tmpDir)\n\n\tcreateSparseArray(tmpDir)\n\twriteSparseArray(tmpDir)\n\treadSparseArray(tmpDir)\n}", "title": "" }, { "docid": "7afa191b19a8bfe1e525a5d97b4dd452", "score": "0.41740015", "text": "func newStationFullCollection(vres stationviews.StationFullCollectionView) StationFullCollection {\n\tres := make(StationFullCollection, len(vres))\n\tfor i, n := range vres {\n\t\tres[i] = newStationFull(n)\n\t}\n\treturn res\n}", "title": "" }, { "docid": "e8a4d2db48cf678ebccd9c8a4058b225", "score": "0.4166269", "text": "func NewSensor() *Sensor {\n\treturn &Sensor{\n\t\tData: make(map[string]int64),\n\t\tM: &sync.RWMutex{},\n\t}\n}", "title": "" }, { "docid": "540207ac9a5df775cf80167a13fbebf1", "score": "0.41628757", "text": "func (array sparseCooF64Matrix) SparseCoo() Matrix {\n\treturn array.copy()\n}", "title": "" }, { "docid": "183d04bcb0f9ab1548e363370a216ddc", "score": "0.41576296", "text": "func New(initial ...interface{}) Squeue {\n\tn := len(initial)\n\thead, cache := make([]interface{}, 2*max(n, 10)), make([]*Cached, 6)\n\n\tcopy(head, initial)\n\tcache[0] = &Cached{&head, 0}\n\n\treturn Squeue{head, nil, cache, 0, n, 0, 0, 0, 1, 0}\n}", "title": "" }, { "docid": "ca3828978f66f7ee2b1817e4b6f2b963", "score": "0.41532624", "text": "func New(path string) (solomonkey.CausetStorage, error) {\n\treturn newStoreWithRetry(path, soliton.DefaultMaxRetries)\n}", "title": "" }, { "docid": "3369da8ecfe9dbd76f166cda99b65fc2", "score": "0.41482165", "text": "func newCESManagerFcfs(workQueue workqueue.RateLimitingInterface, maxCEPsInCES int) operations {\n\treturn &cesManagerFcfs{\n\t\tcesMgr{\n\t\t\tdesiredCESs: newDesiredCESMap(),\n\t\t\tqueue: workQueue,\n\t\t\tmaxCEPsInCES: maxCEPsInCES,\n\t\t},\n\t}\n}", "title": "" }, { "docid": "8d04b0b890e3aefdb8b0cf7a63cfcc50", "score": "0.41474307", "text": "func (o *CloudAccountCleaner) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseCloudAccountCleaner{\n\t\t\tDate: &o.Date,\n\t\t}\n\t}\n\n\tsp := &SparseCloudAccountCleaner{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"date\":\n\t\t\tsp.Date = &(o.Date)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "c8f5f75ed785a7bc3821e4b1cf522a20", "score": "0.4141667", "text": "func (array denseF64Array) SparseCoo() Matrix {\n\tm := SparseCoo(array.shape[0], array.shape[1])\n\tarray.VisitNonzero(func(pos []int, value float64) bool {\n\t\tm.ItemSet(value, pos[0], pos[1])\n\t\treturn true\n\t})\n\treturn m\n}", "title": "" }, { "docid": "8667632fc06fd8916addc61e21e1508b", "score": "0.41347554", "text": "func NewSparseMaxLoss(x Operand) *SparseMaxLoss {\n\treturn &SparseMaxLoss{x: x}\n}", "title": "" }, { "docid": "c28227dc3f8488e26dc341d9ae23f83c", "score": "0.41163886", "text": "func NewMonitoredSALShortFormBasicMode(counts byte, bridgeCount *uint8, networkNumber *uint8, noCounts *byte, application ApplicationIdContainer, salData SALData, salType byte, cBusOptions CBusOptions) *_MonitoredSALShortFormBasicMode {\n\t_result := &_MonitoredSALShortFormBasicMode{\n\t\tCounts: counts,\n\t\tBridgeCount: bridgeCount,\n\t\tNetworkNumber: networkNumber,\n\t\tNoCounts: noCounts,\n\t\tApplication: application,\n\t\tSalData: salData,\n\t\t_MonitoredSAL: NewMonitoredSAL(salType, cBusOptions),\n\t}\n\t_result._MonitoredSAL._MonitoredSALChildRequirements = _result\n\treturn _result\n}", "title": "" }, { "docid": "0e89906ac6f4c6616746bec4d6b0ff24", "score": "0.41078788", "text": "func (o *Activate) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseActivate{\n\t\t\tToken: &o.Token,\n\t\t}\n\t}\n\n\tsp := &SparseActivate{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"token\":\n\t\t\tsp.Token = &(o.Token)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "a86c7b60a9b04164cff8cac5a620e6c1", "score": "0.41046926", "text": "func (c *Cache) NewFetches() uint64 {\n\treturn atomic.LoadUint64(&c.newFetches)\n}", "title": "" }, { "docid": "12589c251ddfea664f7b60e1adb7610e", "score": "0.4104384", "text": "func New(network string, coreURL string, core, horizon *db.Session, config Config) *System {\n\ti := &System{\n\t\tConfig: config,\n\t\tNetwork: network,\n\t\tStellarCoreURL: coreURL,\n\t\tHorizonDB: horizon,\n\t\tCoreDB: core,\n\t}\n\n\ti.Metrics.ClearLedgerTimer = metrics.NewTimer()\n\ti.Metrics.IngestLedgerTimer = metrics.NewTimer()\n\ti.Metrics.LoadLedgerTimer = metrics.NewTimer()\n\treturn i\n}", "title": "" }, { "docid": "cc0a5b20de1cf3f4f4a202da131308a4", "score": "0.40929842", "text": "func (evaluator *Evaluator) SquareNew(ct0 CkksElement, evakey *EvaluationKey) (ct1 CkksElement, err error) {\n\tct1 = evaluator.ckkscontext.NewCiphertext(1, ct0.Level(), ct0.Scale())\n\tif err = evaluator.Square(ct0, evakey, ct1); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn ct1, nil\n}", "title": "" }, { "docid": "48e3d15914343526132087d1fec77e32", "score": "0.4087239", "text": "func NewSparseBitArray() BitArray {\n\treturn newSparseBitArray()\n}", "title": "" }, { "docid": "7441d65ec971d5d92d8fa9f78f09e119", "score": "0.40830666", "text": "func NewRefreshQty(val decimal.Decimal, scale int32) RefreshQtyField {\n\treturn RefreshQtyField{quickfix.FIXDecimal{Decimal: val, Scale: scale}}\n}", "title": "" }, { "docid": "52fd7bdf6b5d3f6e43898454ac5b6e94", "score": "0.4079237", "text": "func New(cfg *config.Cluster, parentLogger zerolog.Logger, check *circonus.Check) (*KSM, error) {\n\tif cfg == nil {\n\t\treturn nil, errors.New(\"invalid cluster config (nil)\")\n\t}\n\tif check == nil {\n\t\treturn nil, errors.New(\"invalid check (nil)\")\n\t}\n\n\tksm := &KSM{\n\t\tconfig: cfg,\n\t\tcheck: check,\n\t\tlog: parentLogger.With().Str(\"collector\", \"kube-state-metrics\").Logger(),\n\t}\n\n\tif cfg.APITimelimit != \"\" {\n\t\tv, err := time.ParseDuration(cfg.APITimelimit)\n\t\tif err != nil {\n\t\t\tksm.log.Error().Err(err).Msg(\"parsing api timelimit, using default\")\n\t\t} else {\n\t\t\tksm.apiTimelimit = v\n\t\t}\n\t}\n\n\tif ksm.apiTimelimit == time.Duration(0) {\n\t\tv, err := time.ParseDuration(defaults.K8SAPITimelimit)\n\t\tif err != nil {\n\t\t\tksm.log.Fatal().Err(err).Msg(\"parsing DEFAULT api timelimit\")\n\t\t}\n\t\tksm.apiTimelimit = v\n\t}\n\n\treturn ksm, nil\n}", "title": "" }, { "docid": "c9e672454b36d3ce258fac47e65e873a", "score": "0.40792075", "text": "func NewSSD() *storage.Storage {\n\tssd := &storage.Storage{\n\t\tDriver: storage.Storage_SSD,\n\t\tMemory: &memory.Memory{\n\t\t\tValue: uint64(generator.RandomInt(128, 1024)),\n\t\t\tUnit: memory.Memory_GIGABYTE,\n\t\t},\n\t}\n\n\treturn ssd\n}", "title": "" }, { "docid": "5490b854e7b8ed4e8c0060d3d4ab0e77", "score": "0.4077961", "text": "func (t *System) NewCpu(Index System_Cpu_Index_Union) (*System_Cpu, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Cpu == nil {\n\t\tt.Cpu = make(map[System_Cpu_Index_Union]*System_Cpu)\n\t}\n\n\tkey := Index\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Cpu[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Cpu\", key)\n\t}\n\n\tt.Cpu[key] = &System_Cpu{\n\t\tIndex: Index,\n\t}\n\n\treturn t.Cpu[key], nil\n}", "title": "" }, { "docid": "5490b854e7b8ed4e8c0060d3d4ab0e77", "score": "0.40779123", "text": "func (t *System) NewCpu(Index System_Cpu_Index_Union) (*System_Cpu, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Cpu == nil {\n\t\tt.Cpu = make(map[System_Cpu_Index_Union]*System_Cpu)\n\t}\n\n\tkey := Index\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Cpu[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Cpu\", key)\n\t}\n\n\tt.Cpu[key] = &System_Cpu{\n\t\tIndex: Index,\n\t}\n\n\treturn t.Cpu[key], nil\n}", "title": "" }, { "docid": "5490b854e7b8ed4e8c0060d3d4ab0e77", "score": "0.40779123", "text": "func (t *System) NewCpu(Index System_Cpu_Index_Union) (*System_Cpu, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Cpu == nil {\n\t\tt.Cpu = make(map[System_Cpu_Index_Union]*System_Cpu)\n\t}\n\n\tkey := Index\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Cpu[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Cpu\", key)\n\t}\n\n\tt.Cpu[key] = &System_Cpu{\n\t\tIndex: Index,\n\t}\n\n\treturn t.Cpu[key], nil\n}", "title": "" }, { "docid": "5490b854e7b8ed4e8c0060d3d4ab0e77", "score": "0.40779123", "text": "func (t *System) NewCpu(Index System_Cpu_Index_Union) (*System_Cpu, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Cpu == nil {\n\t\tt.Cpu = make(map[System_Cpu_Index_Union]*System_Cpu)\n\t}\n\n\tkey := Index\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Cpu[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Cpu\", key)\n\t}\n\n\tt.Cpu[key] = &System_Cpu{\n\t\tIndex: Index,\n\t}\n\n\treturn t.Cpu[key], nil\n}", "title": "" }, { "docid": "5490b854e7b8ed4e8c0060d3d4ab0e77", "score": "0.40779123", "text": "func (t *System) NewCpu(Index System_Cpu_Index_Union) (*System_Cpu, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Cpu == nil {\n\t\tt.Cpu = make(map[System_Cpu_Index_Union]*System_Cpu)\n\t}\n\n\tkey := Index\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Cpu[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Cpu\", key)\n\t}\n\n\tt.Cpu[key] = &System_Cpu{\n\t\tIndex: Index,\n\t}\n\n\treturn t.Cpu[key], nil\n}", "title": "" }, { "docid": "520bfefa79330cdbef443dbbfddf8031", "score": "0.40671438", "text": "func NewSSD() *pb.Storage {\n\tmemory := &pb.Memory{\n\t\tValue: uint32(randomInt(128, 1024)),\n\t\tUnit: pb.Memory_GIGABYTE,\n\t}\n\treturn &pb.Storage{\n\t\tDriver: pb.Storage_SSD,\n\t\tMemory: memory,\n\t}\n}", "title": "" }, { "docid": "74792b24db7ca5a2b4fff4fbda3fe2d8", "score": "0.40631264", "text": "func (o *FlowReport) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseFlowReport{\n\t\t\tID: &o.ID,\n\t\t\tAction: &o.Action,\n\t\t\tDestinationController: &o.DestinationController,\n\t\t\tDestinationID: &o.DestinationID,\n\t\t\tDestinationIP: &o.DestinationIP,\n\t\t\tDestinationNamespace: &o.DestinationNamespace,\n\t\t\tDestinationPlatform: &o.DestinationPlatform,\n\t\t\tDestinationPort: &o.DestinationPort,\n\t\t\tDestinationType: &o.DestinationType,\n\t\t\tDropReason: &o.DropReason,\n\t\t\tEncrypted: &o.Encrypted,\n\t\t\tEnforcerID: &o.EnforcerID,\n\t\t\tMigrationsLog: &o.MigrationsLog,\n\t\t\tNamespace: &o.Namespace,\n\t\t\tObserved: &o.Observed,\n\t\t\tObservedAction: &o.ObservedAction,\n\t\t\tObservedDropReason: &o.ObservedDropReason,\n\t\t\tObservedEncrypted: &o.ObservedEncrypted,\n\t\t\tObservedPolicyID: &o.ObservedPolicyID,\n\t\t\tObservedPolicyNamespace: &o.ObservedPolicyNamespace,\n\t\t\tPolicyID: &o.PolicyID,\n\t\t\tPolicyNamespace: &o.PolicyNamespace,\n\t\t\tProtocol: &o.Protocol,\n\t\t\tRemoteNamespace: &o.RemoteNamespace,\n\t\t\tRemotePolicyID: &o.RemotePolicyID,\n\t\t\tRuleName: &o.RuleName,\n\t\t\tServiceClaimHash: &o.ServiceClaimHash,\n\t\t\tServiceID: &o.ServiceID,\n\t\t\tServiceNamespace: &o.ServiceNamespace,\n\t\t\tServiceType: &o.ServiceType,\n\t\t\tServiceURL: &o.ServiceURL,\n\t\t\tSourceController: &o.SourceController,\n\t\t\tSourceID: &o.SourceID,\n\t\t\tSourceIP: &o.SourceIP,\n\t\t\tSourceNamespace: &o.SourceNamespace,\n\t\t\tSourcePlatform: &o.SourcePlatform,\n\t\t\tSourceType: &o.SourceType,\n\t\t\tTimestamp: &o.Timestamp,\n\t\t\tValue: &o.Value,\n\t\t\tZHash: &o.ZHash,\n\t\t\tZone: &o.Zone,\n\t\t}\n\t}\n\n\tsp := &SparseFlowReport{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"ID\":\n\t\t\tsp.ID = &(o.ID)\n\t\tcase \"action\":\n\t\t\tsp.Action = &(o.Action)\n\t\tcase \"destinationController\":\n\t\t\tsp.DestinationController = &(o.DestinationController)\n\t\tcase \"destinationID\":\n\t\t\tsp.DestinationID = &(o.DestinationID)\n\t\tcase \"destinationIP\":\n\t\t\tsp.DestinationIP = &(o.DestinationIP)\n\t\tcase \"destinationNamespace\":\n\t\t\tsp.DestinationNamespace = &(o.DestinationNamespace)\n\t\tcase \"destinationPlatform\":\n\t\t\tsp.DestinationPlatform = &(o.DestinationPlatform)\n\t\tcase \"destinationPort\":\n\t\t\tsp.DestinationPort = &(o.DestinationPort)\n\t\tcase \"destinationType\":\n\t\t\tsp.DestinationType = &(o.DestinationType)\n\t\tcase \"dropReason\":\n\t\t\tsp.DropReason = &(o.DropReason)\n\t\tcase \"encrypted\":\n\t\t\tsp.Encrypted = &(o.Encrypted)\n\t\tcase \"enforcerID\":\n\t\t\tsp.EnforcerID = &(o.EnforcerID)\n\t\tcase \"migrationsLog\":\n\t\t\tsp.MigrationsLog = &(o.MigrationsLog)\n\t\tcase \"namespace\":\n\t\t\tsp.Namespace = &(o.Namespace)\n\t\tcase \"observed\":\n\t\t\tsp.Observed = &(o.Observed)\n\t\tcase \"observedAction\":\n\t\t\tsp.ObservedAction = &(o.ObservedAction)\n\t\tcase \"observedDropReason\":\n\t\t\tsp.ObservedDropReason = &(o.ObservedDropReason)\n\t\tcase \"observedEncrypted\":\n\t\t\tsp.ObservedEncrypted = &(o.ObservedEncrypted)\n\t\tcase \"observedPolicyID\":\n\t\t\tsp.ObservedPolicyID = &(o.ObservedPolicyID)\n\t\tcase \"observedPolicyNamespace\":\n\t\t\tsp.ObservedPolicyNamespace = &(o.ObservedPolicyNamespace)\n\t\tcase \"policyID\":\n\t\t\tsp.PolicyID = &(o.PolicyID)\n\t\tcase \"policyNamespace\":\n\t\t\tsp.PolicyNamespace = &(o.PolicyNamespace)\n\t\tcase \"protocol\":\n\t\t\tsp.Protocol = &(o.Protocol)\n\t\tcase \"remoteNamespace\":\n\t\t\tsp.RemoteNamespace = &(o.RemoteNamespace)\n\t\tcase \"remotePolicyID\":\n\t\t\tsp.RemotePolicyID = &(o.RemotePolicyID)\n\t\tcase \"ruleName\":\n\t\t\tsp.RuleName = &(o.RuleName)\n\t\tcase \"serviceClaimHash\":\n\t\t\tsp.ServiceClaimHash = &(o.ServiceClaimHash)\n\t\tcase \"serviceID\":\n\t\t\tsp.ServiceID = &(o.ServiceID)\n\t\tcase \"serviceNamespace\":\n\t\t\tsp.ServiceNamespace = &(o.ServiceNamespace)\n\t\tcase \"serviceType\":\n\t\t\tsp.ServiceType = &(o.ServiceType)\n\t\tcase \"serviceURL\":\n\t\t\tsp.ServiceURL = &(o.ServiceURL)\n\t\tcase \"sourceController\":\n\t\t\tsp.SourceController = &(o.SourceController)\n\t\tcase \"sourceID\":\n\t\t\tsp.SourceID = &(o.SourceID)\n\t\tcase \"sourceIP\":\n\t\t\tsp.SourceIP = &(o.SourceIP)\n\t\tcase \"sourceNamespace\":\n\t\t\tsp.SourceNamespace = &(o.SourceNamespace)\n\t\tcase \"sourcePlatform\":\n\t\t\tsp.SourcePlatform = &(o.SourcePlatform)\n\t\tcase \"sourceType\":\n\t\t\tsp.SourceType = &(o.SourceType)\n\t\tcase \"timestamp\":\n\t\t\tsp.Timestamp = &(o.Timestamp)\n\t\tcase \"value\":\n\t\t\tsp.Value = &(o.Value)\n\t\tcase \"zHash\":\n\t\t\tsp.ZHash = &(o.ZHash)\n\t\tcase \"zone\":\n\t\t\tsp.Zone = &(o.Zone)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "5b95cfd0b438f9b7b32f472b60a3f7f6", "score": "0.4057909", "text": "func NewStorage() *Storage {\n stock := &Storage {\n Crates: make(StorageMap),\n load: make(chan StoreMessage),\n unload: make(chan GetMessage),\n destroy: nil,\n }\n go storageWorker(stock)\n return stock\n}", "title": "" }, { "docid": "61ae96eb6e9172a8deb38ea117cd1fa9", "score": "0.4054768", "text": "func New(remoteAPI dsb.DSBFacade) StationsService {\n\treturn &stationService{remoteAPI, 0}\n}", "title": "" }, { "docid": "5247ea9cc94c3f390bae54d11415f735", "score": "0.404828", "text": "func NewSimple() Simple {\n\tq := &simpleQueue{\n\t\tqueue: make([]interface{}, 0),\n\t}\n\tq.available = sync.NewCond(&q.mu)\n\treturn q\n}", "title": "" }, { "docid": "fc4bb34d36a5b084c5b05a85e2a345e8", "score": "0.4047576", "text": "func New(old *Stable) *Stable {\n\treturn &Stable{\n\t\ttable: make(map[string]*NodeInfo),\n\t\tup: old,\n\t}\n}", "title": "" }, { "docid": "031a6af4c1dd3b19253fa4e11943c674", "score": "0.40408993", "text": "func newStationFullView(res *StationFull) *stationviews.StationFullView {\n\tvres := &stationviews.StationFullView{\n\t\tID: &res.ID,\n\t\tName: &res.Name,\n\t\tDeviceID: &res.DeviceID,\n\t\tReadOnly: &res.ReadOnly,\n\t\tBattery: res.Battery,\n\t\tRecordingStartedAt: res.RecordingStartedAt,\n\t\tMemoryUsed: res.MemoryUsed,\n\t\tMemoryAvailable: res.MemoryAvailable,\n\t\tFirmwareNumber: res.FirmwareNumber,\n\t\tFirmwareTime: res.FirmwareTime,\n\t\tUpdated: &res.Updated,\n\t\tLocationName: res.LocationName,\n\t}\n\tif res.Owner != nil {\n\t\tvres.Owner = transformStationOwnerToStationviewsStationOwnerView(res.Owner)\n\t}\n\tif res.Uploads != nil {\n\t\tvres.Uploads = make([]*stationviews.StationUploadView, len(res.Uploads))\n\t\tfor i, val := range res.Uploads {\n\t\t\tvres.Uploads[i] = transformStationUploadToStationviewsStationUploadView(val)\n\t\t}\n\t}\n\tif res.Images != nil {\n\t\tvres.Images = make([]*stationviews.ImageRefView, len(res.Images))\n\t\tfor i, val := range res.Images {\n\t\t\tvres.Images[i] = transformImageRefToStationviewsImageRefView(val)\n\t\t}\n\t}\n\tif res.Photos != nil {\n\t\tvres.Photos = transformStationPhotosToStationviewsStationPhotosView(res.Photos)\n\t}\n\tif res.Configurations != nil {\n\t\tvres.Configurations = transformStationConfigurationsToStationviewsStationConfigurationsView(res.Configurations)\n\t}\n\tif res.Location != nil {\n\t\tvres.Location = transformStationLocationToStationviewsStationLocationView(res.Location)\n\t}\n\treturn vres\n}", "title": "" }, { "docid": "a10ab5fe331784be45ac8f90153a1fdf", "score": "0.4036268", "text": "func NewRefreshSource(plugctx *plugin.Context, proj *workspace.Project, target *Target, dryRun bool) Source {\n\treturn &refreshSource{\n\t\tplugctx: plugctx,\n\t\tproj: proj,\n\t\ttarget: target,\n\t\tdryRun: dryRun,\n\t}\n}", "title": "" }, { "docid": "8973c174b1fad05ec77a7ffa0316cdf0", "score": "0.40346313", "text": "func (o ServicePublicationsList) ToSparse(fields ...string) elemental.Identifiables {\n\n\tout := make(SparseServicePublicationsList, len(o))\n\tfor i := 0; i < len(o); i++ {\n\t\tout[i] = o[i].ToSparse(fields...).(*SparseServicePublication)\n\t}\n\n\treturn out\n}", "title": "" }, { "docid": "39cfee98d2fac413ca0f1e3740626a15", "score": "0.4032219", "text": "func (o *NamespaceRenderer) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparseNamespaceRenderer{\n\t\t\tNamespace: &o.Namespace,\n\t\t\tTags: &o.Tags,\n\t\t}\n\t}\n\n\tsp := &SparseNamespaceRenderer{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"namespace\":\n\t\t\tsp.Namespace = &(o.Namespace)\n\t\tcase \"tags\":\n\t\t\tsp.Tags = &(o.Tags)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "d1e805663192aac199835a275ffbdd04", "score": "0.40241072", "text": "func New() Service {\n\treturn &service{\n\t\tsdcMap: map[string]string{},\n\t\tspCache: map[string]string{},\n\t}\n}", "title": "" }, { "docid": "513e0fe62bf8a53c5a25ac080f1bf548", "score": "0.4014804", "text": "func NewSystemdCollector() (Collector, error) {\n\tconst subsystem = \"systemd\"\n\n\tunitDesc := prometheus.NewDesc(\n\t\tprometheus.BuildFQName(Namespace, subsystem, \"unit_state\"),\n\t\t\"Systemd unit\", []string{\"name\", \"state\"}, nil,\n\t)\n\tsystemRunningDesc := prometheus.NewDesc(\n\t\tprometheus.BuildFQName(Namespace, subsystem, \"system_running\"),\n\t\t\"Whether the system is operational (see 'systemctl is-system-running')\",\n\t\tnil, nil,\n\t)\n\n\treturn &systemdCollector{\n\t\tunitDesc: unitDesc,\n\t\tsystemRunningDesc: systemRunningDesc,\n\t}, nil\n}", "title": "" }, { "docid": "ff09fa42bd68a6bae8e9e11ed202ea23", "score": "0.40140265", "text": "func newSensorOperationCtx(s *v1alpha1.Sensor, controller *SensorController) *sOperationCtx {\n\treturn &sOperationCtx{\n\t\ts: s.DeepCopy(),\n\t\tupdated: false,\n\t\tlog: zlog.New(os.Stdout).With().Str(\"name\", s.Name).Str(\"namespace\", s.Namespace).Caller().Logger(),\n\t\tcontroller: controller,\n\t}\n}", "title": "" }, { "docid": "ce0bb798d6be36f1dacef0c55711dc60", "score": "0.40109047", "text": "func New(l *util.Layout) *Systemd {\n\treturn &Systemd{\n\t\twritten: map[string]struct{}{},\n\t\tLayout: l,\n\t\tctr: 60,\n\t}\n}", "title": "" }, { "docid": "8ce2e2aedfc0a8714e2ec123058d077c", "score": "0.40098718", "text": "func NewMutableFS(bundle *core.Bundle, opts ...Option) (*MutableFS, error) {\n\tif err := checkBundle(bundle); err != nil {\n\t\treturn nil, err\n\t}\n\n\tpathToStaging, err := localPath(bundle.ConsumableStore)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfs := defaultMutableFS(bundle, pathToStaging)\n\tfor _, bapply := range opts {\n\t\tbapply(fs)\n\t}\n\n\tfs.l = fs.l.With(zap.String(\"repo\", bundle.RepoID))\n\tif bundle.BundleID != \"\" {\n\t\tfs.l = fs.l.With(zap.String(\"bundle\", bundle.BundleID))\n\t}\n\n\tif fs.MetricsEnabled() {\n\t\tfs.m = fs.EnsureMetrics(\"fuse\", &M{}).(*M)\n\t}\n\n\tfs.l.Info(\"mutable mount staging storage\", zap.String(\"path\", pathToStaging))\n\n\terr = fs.initRoot()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &MutableFS{\n\t\tmfs: nil,\n\t\tfsInternal: fs,\n\t\tserver: fuseutil.NewFileSystemServer(fs),\n\t}, nil\n}", "title": "" }, { "docid": "ddb7758bb9669bb57630cb8c576c2bb6", "score": "0.4008757", "text": "func (bd *BlockDevice) IsSparse() bool {\n\treturn bd.Object.Spec.Details.DeviceType == string(apis.TypeBlockDeviceCPV)\n}", "title": "" }, { "docid": "8c3b55e8fd084bed2bac00c68a72f83e", "score": "0.40027156", "text": "func New(uri string) (s Store, err error) {\n\tparams := strings.SplitN(uri, \":\", 2)\n\tif len(params) == 0 {\n\t\treturn nil, fmt.Errorf(\"store: invalid uri %s\", uri)\n\t}\n\tname := params[0]\n\tspi, ok := registry.Load(name)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"store: %s not registered\", name)\n\t}\n\n\tsp := spi.(Supplier)\n\treturn sp(uri)\n}", "title": "" }, { "docid": "aade5afa99864fd7c2ed05e749b79760", "score": "0.39932203", "text": "func (o *Plan) ToSparse(fields ...string) elemental.SparseIdentifiable {\n\n\tif len(fields) == 0 {\n\t\t// nolint: goimports\n\t\treturn &SparsePlan{\n\t\t\tDescription: &o.Description,\n\t\t\tKey: &o.Key,\n\t\t\tName: &o.Name,\n\t\t\tQuotas: &o.Quotas,\n\t\t\tRequireAdminValidation: &o.RequireAdminValidation,\n\t\t\tRoles: &o.Roles,\n\t\t}\n\t}\n\n\tsp := &SparsePlan{}\n\tfor _, f := range fields {\n\t\tswitch f {\n\t\tcase \"description\":\n\t\t\tsp.Description = &(o.Description)\n\t\tcase \"key\":\n\t\t\tsp.Key = &(o.Key)\n\t\tcase \"name\":\n\t\t\tsp.Name = &(o.Name)\n\t\tcase \"quotas\":\n\t\t\tsp.Quotas = &(o.Quotas)\n\t\tcase \"requireAdminValidation\":\n\t\t\tsp.RequireAdminValidation = &(o.RequireAdminValidation)\n\t\tcase \"roles\":\n\t\t\tsp.Roles = &(o.Roles)\n\t\t}\n\t}\n\n\treturn sp\n}", "title": "" }, { "docid": "c50801bb9249dcc882ea332397a9c4bd", "score": "0.39876494", "text": "func (s *Store) newStorer(dataType interface{}) Storer {\n\tstr, ok := dataType.(Storer)\n\n\tif ok {\n\t\treturn str\n\t}\n\n\ttp := reflect.TypeOf(dataType)\n\n\tfor tp.Kind() == reflect.Ptr {\n\t\ttp = tp.Elem()\n\t}\n\n\tstorer := &anonStorer{\n\t\trType: tp,\n\t\tindexes: make(map[string]Index),\n\t\tsliceIndexes: make(map[string]SliceIndex),\n\t}\n\n\tif storer.rType.Name() == \"\" {\n\t\tpanic(\"Invalid Type for Storer. Type is unnamed\")\n\t}\n\n\tif storer.rType.Kind() != reflect.Struct {\n\t\tpanic(\"Invalid Type for Storer. BoltHold only works with structs\")\n\t}\n\n\tfor i := 0; i < storer.rType.NumField(); i++ {\n\t\tstorer.addIndex(storer.rType.Field(i), s)\n\t}\n\n\treturn storer\n}", "title": "" }, { "docid": "5f367505ed09dd83daa6f6fe07820433", "score": "0.3979503", "text": "func NewGCSStore(gcs *storage.Client, bucket, cachepath string, pagesize int) (*GcsFS, error) {\n\terr := os.MkdirAll(path.Dir(cachepath), 0775)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to create path. path=%s err=%v\", cachepath, err)\n\t}\n\n\tuid := uuid.NewUUID().String()\n\tuid = strings.Replace(uid, \"-\", \"\", -1)\n\n\treturn &GcsFS{\n\t\tgcs: gcs,\n\t\tbucket: bucket,\n\t\tcachepath: cachepath,\n\t\tId: uid,\n\t\tPageSize: pagesize,\n\t}, nil\n}", "title": "" } ]
a16347cee77d2e080e7b4068d5a519d9
GetMessageType provides a mock function with given fields:
[ { "docid": "723c6b7095fc295f2bdab0907a4bb6d9", "score": "0.70905906", "text": "func (_m *Message) GetMessageType() types.MessageType {\n\tret := _m.Called()\n\n\tvar r0 types.MessageType\n\tif rf, ok := ret.Get(0).(func() types.MessageType); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(types.MessageType)\n\t}\n\n\treturn r0\n}", "title": "" } ]
[ { "docid": "b59c37065de5dd9d6d89f3a22be74916", "score": "0.6142157", "text": "func (_m *MockSessionService) GetMessage(ctx context.Context, lastMessageId int64) (*RunnerScaleSetMessage, error) {\n\tret := _m.Called(ctx, lastMessageId)\n\n\tvar r0 *RunnerScaleSetMessage\n\tif rf, ok := ret.Get(0).(func(context.Context, int64) *RunnerScaleSetMessage); ok {\n\t\tr0 = rf(ctx, lastMessageId)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*RunnerScaleSetMessage)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, int64) error); ok {\n\t\tr1 = rf(ctx, lastMessageId)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "6419370214125ac1def152f153c67813", "score": "0.61185443", "text": "func (_m *mock_PDU) GetMessage() spi.Message {\n\tret := _m.Called()\n\n\tvar r0 spi.Message\n\tif rf, ok := ret.Get(0).(func() spi.Message); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(spi.Message)\n\t\t}\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "65461a8252038dfb986152b5bcf597bb", "score": "0.5822765", "text": "func GetMockMessage(userResponse string) []byte {\n\tresponse := \"Hello world\"\n\tif userResponse != \"\" {\n\t\tresponse = userResponse\n\t}\n\n\tmessageStr := fmt.Sprintf(`{\n\t\t\"update_id\": 123,\n\t\t\"message\": {\n\t\t\t\"message_id\": 123,\n\t\t\t\"date\": 20180314,\n\t\t\t\"text\": \"%s\",\n\t\t\t\"from\": {\n\t\t\t\t\"id\": 12345,\n\t\t\t\t\"first_name\": \"Jane\",\n\t\t\t\t\"last_name\": \"Doe\",\n\t\t\t\t\"username\": \"janedoe\"\n\t\t\t},\n\t\t\t\"chat\": {\n\t\t\t\t\"id\": 456,\n\t\t\t\t\"first_name\": \"Jane\",\n\t\t\t\t\"last_name\": \"Doe\",\n\t\t\t\t\"username\": \"janedoe\"\n\t\t\t}\n\t\t}\n\t}`, response)\n\tmessage := []byte(messageStr)\n\treturn message\n}", "title": "" }, { "docid": "ae7f488b72349cf444bf52bfb89f2c83", "score": "0.5793016", "text": "func (a *AuthenticationResponseMessageIdentity) GetMessageType() (messageType uint8) {}", "title": "" }, { "docid": "b1a8259815dfe3c5ada512bfec578ecc", "score": "0.5761829", "text": "func generateMessageTypeGetter(w io.Writer, structName string, msgType messageType) {\n\tfmt.Fprintln(w, \"func (*\"+structName+\") GetMessageType() api.MessageType {\")\n\tif msgType == requestMessage {\n\t\tfmt.Fprintln(w, \"\\treturn api.RequestMessage\")\n\t} else if msgType == replyMessage {\n\t\tfmt.Fprintln(w, \"\\treturn api.ReplyMessage\")\n\t} else if msgType == eventMessage {\n\t\tfmt.Fprintln(w, \"\\treturn api.EventMessage\")\n\t} else {\n\t\tfmt.Fprintln(w, \"\\treturn api.OtherMessage\")\n\t}\n\tfmt.Fprintln(w, \"}\")\n}", "title": "" }, { "docid": "208555514b1ede1e77c04858aa6e2f05", "score": "0.57438534", "text": "func (_m *MockGoSNMPWrapper) GetMsgFlags() gosnmp.SnmpV3MsgFlags {\n\tret := _m.Called()\n\n\tvar r0 gosnmp.SnmpV3MsgFlags\n\tif rf, ok := ret.Get(0).(func() gosnmp.SnmpV3MsgFlags); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(gosnmp.SnmpV3MsgFlags)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "b255d90c04e16c82a19dc10b2d3655d1", "score": "0.5584375", "text": "func (_m *QueryFactory) BuildMessage(_a0 string, _a1 uint) apifarm.Query {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 apifarm.Query\n\tif rf, ok := ret.Get(0).(func(string, uint) apifarm.Query); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Get(0).(apifarm.Query)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "536e9c96f728124ac904584418f06c7b", "score": "0.5580491", "text": "func (_m *MockDatabase) GetMessageByID(_a0 string, _a1 string) (burner.Message, error) {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 burner.Message\n\tif rf, ok := ret.Get(0).(func(string, string) burner.Message); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Get(0).(burner.Message)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, string) error); ok {\n\t\tr1 = rf(_a0, _a1)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "22e18b13462156f7545057712a0b8875", "score": "0.5578456", "text": "func TestGetMessage(t *testing.T) {\n\tmessage := GetMessage()\n\n\tif message.Message != \"Hello World\" {\n\t\tt.Fail()\n\t}\n}", "title": "" }, { "docid": "14102756256f5f5b442c8aec78270f35", "score": "0.54741055", "text": "func (_m *Repo_UploadServer) RecvMsg(m interface{}) error {\n\tret := _m.Called(m)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(interface{}) error); ok {\n\t\tr0 = rf(m)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "450cfdbebfc26c5d7e45ebe344128b72", "score": "0.54552233", "text": "func (m *MockContext) Message() interface{} {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Message\")\n\tret0, _ := ret[0].(interface{})\n\treturn ret0\n}", "title": "" }, { "docid": "eb1d806196d3eaa0dfa344493963c8fa", "score": "0.54351497", "text": "func mockMessage(ageHours int) *MockMessage {\n\tmsg := &MockMessage{}\n\tmsg.On(\"ID\").Return(fmt.Sprintf(\"MSG[age=%vh]\", ageHours))\n\tmsg.On(\"Date\").Return(time.Now().Add(time.Duration(ageHours*-1) * time.Hour))\n\tmsg.On(\"Delete\").Return(nil)\n\treturn msg\n}", "title": "" }, { "docid": "233a0c86ce86aeb3e2a70923e95db07b", "score": "0.54207444", "text": "func (_m *mockWhatsappClient) Send(msg interface{}) (string, error) {\n\tret := _m.Called(msg)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(interface{}) string); ok {\n\t\tr0 = rf(msg)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(interface{}) error); ok {\n\t\tr1 = rf(msg)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "2eb76f21d706da5621196c95f9a84510", "score": "0.540614", "text": "func (_m *IState) Message(channelID string, messageID string) (*discordgo.Message, error) {\n\tret := _m.Called(channelID, messageID)\n\n\tvar r0 *discordgo.Message\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(string, string) (*discordgo.Message, error)); ok {\n\t\treturn rf(channelID, messageID)\n\t}\n\tif rf, ok := ret.Get(0).(func(string, string) *discordgo.Message); ok {\n\t\tr0 = rf(channelID, messageID)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*discordgo.Message)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(string, string) error); ok {\n\t\tr1 = rf(channelID, messageID)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "dae9bb6db4d0453d7bfb6af050d868b1", "score": "0.538297", "text": "func (p VersionInfoRequest) GetMessageType() MessageType {\r\n\treturn MessageType((p.OpCode >> 8) & 0xF0)\r\n}", "title": "" }, { "docid": "87630b7a1d395446a4c5046549a17d97", "score": "0.5364427", "text": "func testMessage(i int, f float64) *starlarkproto.Message {\n\tmsg := testMessageType.Message()\n\tif err := msg.SetField(\"i\", starlark.MakeInt(i)); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := msg.SetField(\"f\", starlark.Float(f)); err != nil {\n\t\tpanic(err)\n\t}\n\treturn msg\n}", "title": "" }, { "docid": "669825f43c95ed5f98f4fe89cd9b0f51", "score": "0.5350595", "text": "func (_m *MockSlackClient) UpdateMessage(channelID string, timestamp string, options ...slack.MsgOption) (string, string, string, error) {\n\t_va := make([]interface{}, len(options))\n\tfor _i := range options {\n\t\t_va[_i] = options[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, channelID, timestamp)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string, string, ...slack.MsgOption) string); ok {\n\t\tr0 = rf(channelID, timestamp, options...)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 string\n\tif rf, ok := ret.Get(1).(func(string, string, ...slack.MsgOption) string); ok {\n\t\tr1 = rf(channelID, timestamp, options...)\n\t} else {\n\t\tr1 = ret.Get(1).(string)\n\t}\n\n\tvar r2 string\n\tif rf, ok := ret.Get(2).(func(string, string, ...slack.MsgOption) string); ok {\n\t\tr2 = rf(channelID, timestamp, options...)\n\t} else {\n\t\tr2 = ret.Get(2).(string)\n\t}\n\n\tvar r3 error\n\tif rf, ok := ret.Get(3).(func(string, string, ...slack.MsgOption) error); ok {\n\t\tr3 = rf(channelID, timestamp, options...)\n\t} else {\n\t\tr3 = ret.Error(3)\n\t}\n\n\treturn r0, r1, r2, r3\n}", "title": "" }, { "docid": "daa57b1b1c89acb4cd84d4b0c00f6de8", "score": "0.5332153", "text": "func (m *FakeMethod) GetRequestType() apisrv.Message { return m.ReqMsg }", "title": "" }, { "docid": "ac3934e4a8bc9a9e7be54a28249d68a6", "score": "0.5299065", "text": "func (_m *SSMMDSAPI) GetMessages(_a0 *ssmmds.GetMessagesInput) (*ssmmds.GetMessagesOutput, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 *ssmmds.GetMessagesOutput\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(*ssmmds.GetMessagesInput) (*ssmmds.GetMessagesOutput, error)); ok {\n\t\treturn rf(_a0)\n\t}\n\tif rf, ok := ret.Get(0).(func(*ssmmds.GetMessagesInput) *ssmmds.GetMessagesOutput); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*ssmmds.GetMessagesOutput)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(*ssmmds.GetMessagesInput) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "fce4fc8260dcab375449815f84a46b1e", "score": "0.5298443", "text": "func (_m *SSMMDSAPI) GetMessagesRequest(_a0 *ssmmds.GetMessagesInput) (*request.Request, *ssmmds.GetMessagesOutput) {\n\tret := _m.Called(_a0)\n\n\tvar r0 *request.Request\n\tvar r1 *ssmmds.GetMessagesOutput\n\tif rf, ok := ret.Get(0).(func(*ssmmds.GetMessagesInput) (*request.Request, *ssmmds.GetMessagesOutput)); ok {\n\t\treturn rf(_a0)\n\t}\n\tif rf, ok := ret.Get(0).(func(*ssmmds.GetMessagesInput) *request.Request); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*request.Request)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(*ssmmds.GetMessagesInput) *ssmmds.GetMessagesOutput); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(*ssmmds.GetMessagesOutput)\n\t\t}\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "7fa3f6f066b85b0cad270dc7ceaf46b0", "score": "0.52914214", "text": "func (_m *MockRedisClient) Type(key string) *redis.StatusCmd {\n\tret := _m.Called(key)\n\n\tvar r0 *redis.StatusCmd\n\tif rf, ok := ret.Get(0).(func(string) *redis.StatusCmd); ok {\n\t\tr0 = rf(key)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*redis.StatusCmd)\n\t\t}\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "1ae4d05e2f6ed402165e158838f40e04", "score": "0.52744144", "text": "func (_m *Message) GetId() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "b3ae67b3cf323f2a6fbb4771c001b41b", "score": "0.52627033", "text": "func (_m *Reader) ReadMessage(_a0 context.Context) (kafka.Message, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 kafka.Message\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context) (kafka.Message, error)); ok {\n\t\treturn rf(_a0)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context) kafka.Message); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(kafka.Message)\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "d2f3aba6e95a239c02b90226d476e770", "score": "0.5251437", "text": "func (_m *Starfriends_ListStarshipActionsServer) RecvMsg(m interface{}) error {\n\tret := _m.Called(m)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(interface{}) error); ok {\n\t\tr0 = rf(m)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "d35eb84bf6b0df3c27a5e05069a81c71", "score": "0.52390194", "text": "func (f *FakeServer) GetMessage(svc, kind string) apisrv.Message {\n\treturn nil\n}", "title": "" }, { "docid": "7ab6e027f5bd9e073e65d5b73237f54b", "score": "0.52141196", "text": "func (m *MockClient) GetOutMessage(to string, idx uint64) (*pb.IBTP, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetOutMessage\", to, idx)\n\tret0, _ := ret[0].(*pb.IBTP)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "202bc25545dfb9016a0b42efb603dac7", "score": "0.5205305", "text": "func (_m *QueueProtocolAPI) Query(driver string, funcname string, param types.Message) (types.Message, error) {\n\tret := _m.Called(driver, funcname, param)\n\n\tvar r0 types.Message\n\tif rf, ok := ret.Get(0).(func(string, string, types.Message) types.Message); ok {\n\t\tr0 = rf(driver, funcname, param)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(types.Message)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, string, types.Message) error); ok {\n\t\tr1 = rf(driver, funcname, param)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "b24bd6e315bc2e9b3b66dbff779794ec", "score": "0.52026105", "text": "func (_m *Repo_UploadServer) SendMsg(m interface{}) error {\n\tret := _m.Called(m)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(interface{}) error); ok {\n\t\tr0 = rf(m)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "532e81b75e66d0b4ff5c96ccb85295d3", "score": "0.51669264", "text": "func TestMessageWith(t *testing.T) {\n\tMustGetAPIServer()\n\tf := mocks.NewFakeMessage(\"test.TestType1\", \"/test\", true).(*mocks.FakeMessage)\n\tm := NewMessage(\"TestType1\").WithValidate(f.ValidateFunc).WithNormalizer(f.NormalizerFunc)\n\tm = m.WithKvUpdater(f.KvUpdateFunc).WithKvGetter(f.KvGetFunc).WithKvDelFunc(f.KvDelFunc).WithObjectVersionWriter(f.ObjverwriteFunc)\n\tm = m.WithKvTxnUpdater(f.TxnUpdateFunc).WithKvTxnDelFunc(f.DelFromKvTxnFunc).WithSelfLinkWriter(f.SelfLinkWriterFunc)\n\tm = m.WithKvWatchFunc(f.KvwatchFunc).WithKvListFunc(f.KvListFunc).WithReplaceStatusFunction(f.GetUpdateStatusFunc())\n\tm = m.WithUUIDWriter(f.CreateUUID).WithReplaceSpecFunction(f.GetUpdateSpecFunc()).WithGetRuntimeObject(f.GetRuntimeObject)\n\tm = m.WithModTimeWriter(f.WriteModTime).WithCreationTimeWriter(f.WriteCreationTime).WithObjectVersionWriter(f.WriteObjVersion)\n\tm = m.WithReferencesGetter(f.GetReferencesFunc).WithKeyGenerator(f.KeyGeneratorFunc).WithUpdateMetaFunction(f.GetUpdateMetaFunc())\n\tstx := mocks.ObjStorageTransformer{}\n\tm = m.WithStorageTransformer(&stx)\n\tsingletonAPISrv.runstate.running = true\n\tm.Validate(nil, \"\", true, false)\n\tvar kv kvstore.Interface\n\tif f.ValidateCalled != 1 {\n\t\tt.Errorf(\"Expecting 1 validation found %d\", f.ValidateCalled)\n\t}\n\tm.Normalize(nil)\n\tif f.DefaultCalled != 1 {\n\t\tt.Errorf(\"Expecting 1 call to Defaulter function found %d\", f.DefaultCalled)\n\t}\n\n\tm.GetFromKv(context.TODO(), kv, \"testkey\")\n\tif f.Kvreads != 1 {\n\t\tt.Errorf(\"Expecting 1 call to KV read found %d\", f.Kvreads)\n\t}\n\n\tm.WriteToKv(context.TODO(), kv, nil, \"testprefix\", true, true)\n\tif f.Kvwrites != 1 {\n\t\tt.Errorf(\"Expecting 1 call to KV Write found %d\", f.Kvwrites)\n\t}\n\n\tm.WriteToKvTxn(context.TODO(), nil, nil, nil, \"testprefix\", true, false)\n\tif f.Txnwrites != 1 {\n\t\tt.Errorf(\"Expecting 1 call to Txn write found %d\", f.Txnwrites)\n\t}\n\tm.DelFromKv(context.TODO(), kv, \"testKey\")\n\tif f.Kvdels != 1 {\n\t\tt.Errorf(\"Expecting 1 call to KV Del found %d\", f.Kvdels)\n\t}\n\n\tm.DelFromKvTxn(context.TODO(), nil, \"testKey\")\n\tif f.Txndels != 1 {\n\t\tt.Errorf(\"Expecting 1 call to Txn Del found %d\", f.Txndels)\n\t}\n\tm.CreateUUID(nil)\n\tif f.Uuidwrite != 1 {\n\t\tt.Errorf(\"Expecting 1 call to CreateUUID found %d\", f.Uuidwrite)\n\t}\n\n\tm.UpdateSelfLink(\"\", \"\", \"\", nil)\n\tif f.SelfLinkWrites != 1 {\n\t\tt.Errorf(\"Expecgting 1 call to UpdateSelfLink found %d\", f.SelfLinkWrites)\n\t}\n\tctx := context.TODO()\n\tfn := m.GetUpdateSpecFunc()\n\tif fn == nil {\n\t\tt.Fatalf(\"UpdateSpecFunc returned nil\")\n\t}\n\tfn1 := m.GetUpdateStatusFunc()\n\tif fn1 == nil {\n\t\tt.Fatalf(\"UpdateStatusFunc returned nil\")\n\t}\n\tobj := TestType1{}\n\tf.RuntimeObj = &obj\n\trobj := m.GetRuntimeObject(obj)\n\tif _, ok := robj.(runtime.Object); !ok {\n\t\tt.Fatalf(\"failed to get runtime object\")\n\t}\n\tm.WriteCreationTime(obj)\n\tif f.CreateTimeWrites != 1 {\n\t\tt.Errorf(\"WriteCreation time failed\")\n\t}\n\tm.WriteModTime(obj)\n\tif f.ModTimeWrite != 1 {\n\t\tt.Errorf(\"WriteModTime time failed\")\n\t}\n\tm.WriteObjVersion(obj, \"123\")\n\tif f.ObjVerWrites != 1 {\n\t\tt.Errorf(\"WriteObjVersion time failed got %d\", f.Objverwrite)\n\t}\n\tm.TransformToStorage(ctx, apiintf.CreateOper, nil)\n\tif stx.TransformToStorageCalled != 1 {\n\t\tt.Errorf(\"Expecting 1 call to TransformToStorage, found %d\", stx.TransformToStorageCalled)\n\t}\n\tm.TransformFromStorage(ctx, apiintf.CreateOper, nil)\n\tif stx.TransformFromStorageCalled != 1 {\n\t\tt.Errorf(\"Expecting 1 call to TransformFromStorage, found %d\", stx.TransformFromStorageCalled)\n\t}\n\t// Add the same storage transformer a second time. Now each calls increments the counter by 2.\n\tm.WithStorageTransformer(&stx)\n\tm.TransformToStorage(ctx, apiintf.UpdateOper, nil)\n\tif stx.TransformToStorageCalled != 3 {\n\t\tt.Errorf(\"Expecting 3 calls to TransformToStorage, found %d\", stx.TransformToStorageCalled)\n\t}\n\tm.TransformFromStorage(ctx, apiintf.UpdateOper, nil)\n\tif stx.TransformFromStorageCalled != 3 {\n\t\tt.Errorf(\"Expecting 3 calls to TransformFromStorage, found %d\", stx.TransformFromStorageCalled)\n\t}\n\tif m.GetKind() != \"TestType1\" {\n\t\tt.Errorf(\"Expecting kind %s, found %s\", \"TestType1\", m.GetKind())\n\t}\n\tm.ListFromKv(ctx, nil, nil, \"\")\n\tif f.Kvlists != 1 {\n\t\tt.Errorf(\"expecting 1 list call got [%v]\", f.Kvlists)\n\t}\n\tm.GetKVKey(nil, \"\")\n\tif f.KeyGens != 1 {\n\t\tt.Errorf(\"expecting 1 call to generate key got [%v]\", f.KeyGens)\n\t}\n\n\tif m.GetUpdateMetaFunc() == nil {\n\t\tt.Errorf(\"expecting UpdateMetaFunc to be set\")\n\t}\n\n\tmd := metadata.Pairs(apisrv.RequestParamVersion, \"v1\",\n\t\tapisrv.RequestParamMethod, \"WATCH\")\n\tctx = metadata.NewIncomingContext(ctx, md)\n\tstream := fakeGrpcStream{ctx: ctx}\n\topts := api.ListWatchOptions{}\n\terr := m.WatchFromKv(&opts, stream, \"test\")\n\tif err != nil {\n\t\tt.Errorf(\"watch returned error (%s)\", err)\n\t}\n\n\tif m.GetUpdateMetaFunc() == nil {\n\t\tt.Errorf(\"expecting UpdateMetaFunc to be set\")\n\t}\n\trefMap := map[string]apiintf.ReferenceObj{\n\t\t\"/testmsg/path\": {RefType: apiintf.NamedReference, Refs: []string{\"/test/reference1\", \"/test/reference2\"}},\n\t}\n\tf.RefMap = refMap\n\trefs, err := m.GetReferences(nil)\n\tif err != nil {\n\t\tt.Errorf(\"failed to get references (%s)\", err)\n\t}\n\tif !reflect.DeepEqual(refMap, refs) {\n\t\tt.Errorf(\"returned references does not match got\\n[%+v\\nwant\\n[%+v]\", refs, refMap)\n\t}\n}", "title": "" }, { "docid": "b61ca3c6fe85bb1704893fb8fcaf595d", "score": "0.5157456", "text": "func (_m *Connection) ReceiveMessage(_a0 context.Context) (conn.ChannelID, []byte, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 conn.ChannelID\n\tif rf, ok := ret.Get(0).(func(context.Context) conn.ChannelID); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(conn.ChannelID)\n\t}\n\n\tvar r1 []byte\n\tif rf, ok := ret.Get(1).(func(context.Context) []byte); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).([]byte)\n\t\t}\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(context.Context) error); ok {\n\t\tr2 = rf(_a0)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "title": "" }, { "docid": "461b9b83f6a46ae5daa5fd2d3057f1e1", "score": "0.51488876", "text": "func (m Rgetattr) MessageType() MessageType { return MessageRgetattr }", "title": "" }, { "docid": "ab42a33e811ce604c3ff49d7d1377eea", "score": "0.51475316", "text": "func (_HashFake *HashFakeCaller) MessageTest(opts *bind.CallOpts, d [32]byte, h [32]byte) ([32]byte, error) {\n\tvar out []interface{}\n\terr := _HashFake.contract.Call(opts, &out, \"messageTest\", d, h)\n\n\tif err != nil {\n\t\treturn *new([32]byte), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new([32]byte)).(*[32]byte)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "52e7cf1af2d68ac8f04b2d721f9f3a91", "score": "0.5135321", "text": "func (test Message) Test(t testing.TB, mt protoreflect.MessageType) {\n\ttestType(t, mt)\n\n\tmd := mt.Descriptor()\n\tm1 := mt.New()\n\tfor i := 0; i < md.Fields().Len(); i++ {\n\t\tfd := md.Fields().Get(i)\n\t\ttestField(t, m1, fd)\n\t}\n\tif test.Resolver == nil {\n\t\ttest.Resolver = protoregistry.GlobalTypes\n\t}\n\tvar extTypes []protoreflect.ExtensionType\n\ttest.Resolver.RangeExtensionsByMessage(md.FullName(), func(e protoreflect.ExtensionType) bool {\n\t\textTypes = append(extTypes, e)\n\t\treturn true\n\t})\n\tfor _, xt := range extTypes {\n\t\ttestField(t, m1, xt.TypeDescriptor())\n\t}\n\tfor i := 0; i < md.Oneofs().Len(); i++ {\n\t\ttestOneof(t, m1, md.Oneofs().Get(i))\n\t}\n\ttestUnknown(t, m1)\n\n\t// Test round-trip marshal/unmarshal.\n\tm2 := mt.New().Interface()\n\tpopulateMessage(m2.ProtoReflect(), 1, nil)\n\tfor _, xt := range extTypes {\n\t\tm2.ProtoReflect().Set(xt.TypeDescriptor(), newValue(m2.ProtoReflect(), xt.TypeDescriptor(), 1, nil))\n\t}\n\tb, err := proto.MarshalOptions{\n\t\tAllowPartial: true,\n\t}.Marshal(m2)\n\tif err != nil {\n\t\tt.Errorf(\"Marshal() = %v, want nil\\n%v\", err, prototext.Format(m2))\n\t}\n\tm3 := mt.New().Interface()\n\tif err := (proto.UnmarshalOptions{\n\t\tAllowPartial: true,\n\t\tResolver: test.Resolver,\n\t}.Unmarshal(b, m3)); err != nil {\n\t\tt.Errorf(\"Unmarshal() = %v, want nil\\n%v\", err, prototext.Format(m2))\n\t}\n\tif !proto.Equal(m2, m3) {\n\t\tt.Errorf(\"round-trip marshal/unmarshal did not preserve message\\nOriginal:\\n%v\\nNew:\\n%v\", prototext.Format(m2), prototext.Format(m3))\n\t}\n}", "title": "" }, { "docid": "57cd443696f9ecfff494f56dfb51d12e", "score": "0.51293117", "text": "func (_m *QueueProtocolAPI) NewMessage(topic string, msgid int64, data interface{}) *queue.Message {\n\tret := _m.Called(topic, msgid, data)\n\n\tvar r0 *queue.Message\n\tif rf, ok := ret.Get(0).(func(string, int64, interface{}) *queue.Message); ok {\n\t\tr0 = rf(topic, msgid, data)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*queue.Message)\n\t\t}\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "828647f389b3a3ca91c8729562f31683", "score": "0.51243967", "text": "func (m *MockLogger) GetFields() map[string]interface{} {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetFields\")\n\tret0, _ := ret[0].(map[string]interface{})\n\treturn ret0\n}", "title": "" }, { "docid": "2e50f9f4e45ed3e56ecb9f3cec0a397d", "score": "0.51187533", "text": "func GetMessageType(path string) (proto.Message) {\n fmt.Println(\"HOLAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n fmt.Println(path)\n sensorType := strings.Split(path,\":\")\n fmt.Println(sensorType)\n switch sensorType[0] {\n case \"huawei-bfd\":\n return &huawei_bfd.Bfd{}\n\n case \"huawei-bgp\":\n switch sensorType[1] {\n case \"ESTABLISHED\":\n return &huawei_bgp.ESTABLISHED{}\n case \"BACKWARD\":\n return &huawei_bgp.BACKWARD{}\n }\n return &huawei_bgp.ESTABLISHED{}\n\n case \"huawei-devm\":\n return &huawei_devm.Devm{}\n\n case \"huawei-driver\":\n switch sensorType[1] {\n case \"hwEntityInvalid\":\n return &huawei_driver.HwEntityInvalid{}\n case \"hwEntityResume\":\n return &huawei_driver.HwEntityResume{}\n case \"hwOpticalInvalid\":\n return &huawei_driver.HwOpticalInvalid{}\n case \"hwOpticalInvalidResume\":\n return &huawei_driver.HwOpticalInvalidResume{}\n }\n return &huawei_driver.HwEntityInvalid{}\n\n case \"huawei-ifm\":\n return &huawei_ifm.Ifm{}\n\n case \"huawei-isis\":\n case \"huawei-isiscomm\":\n return &huawei_isiscomm.IsisAdjacencyChange{}\n\n case \"huawei-mpls\":\n return &huawei_mpls.Mpls{}\n\n case \"huawei-ospfv2\":\n switch sensorType[1] {\n case \"ospfNbrStateChange\":\n return &huawei_ospfv2.OspfNbrStateChange{}\n case \"ospfVirtNbrStateChange\":\n return &huawei_ospfv2.OspfVirtNbrStateChange{}\n }\n return &huawei_ospfv2.OspfNbrStateChange{}\n\n case \"huawei-ospfv3\":\n return &huawei_ospfv3.Ospfv3NbrStateChange{}\n\n case \"huawei-qos\":\n return &huawei_qos.Qos{}\n\n case \"huawei-sem\":\n switch sensorType[1] {\n case \"hwCPUUtilizationResume\":\n return &huawei_sem.HwStorageUtilizationResume{}\n case \"hwCPUUtilizationRisingAlarm\":\n return &huawei_sem.HwCPUUtilizationRisingAlarm{}\n case \"hwStorageUtilizationResume\":\n return &huawei_sem.HwStorageUtilizationResume{}\n case \"hwStorageUtilizationRisingAlarm\":\n return &huawei_sem.HwStorageUtilizationRisingAlarm{}\n }\n return &huawei_sem.HwStorageUtilizationResume{}\n\n case \"huawei-telmEmdi\":\n case \"huawei-emdi\":\n return &huawei_telemEmdi.TelemEmdi{}\n\n case \"huawei-trafficmng\":\n return &huawei_trafficmng.Trafficmng{}\n\n default:\n fmt.Println(\"Error Sensor Desconocido\")\n fmt.Println(\"HOLAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n fmt.Println(sensorType)\n return &huawei_devm.Devm{}\n }\n return &huawei_devm.Devm{}\n}", "title": "" }, { "docid": "3003dc15cb46529df9c26581179ad4fe", "score": "0.51093554", "text": "func (_m *Repository) TranslateOrderField(input string) string {\n\tret := _m.Called(input)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(input)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "2cc7c83e6b915ef6fb96da47605f879a", "score": "0.50945413", "text": "func (_m *IState) Messages(channelID string, forceFetch ...bool) ([]*discordgo.Message, error) {\n\t_va := make([]interface{}, len(forceFetch))\n\tfor _i := range forceFetch {\n\t\t_va[_i] = forceFetch[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, channelID)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 []*discordgo.Message\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(string, ...bool) ([]*discordgo.Message, error)); ok {\n\t\treturn rf(channelID, forceFetch...)\n\t}\n\tif rf, ok := ret.Get(0).(func(string, ...bool) []*discordgo.Message); ok {\n\t\tr0 = rf(channelID, forceFetch...)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]*discordgo.Message)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(string, ...bool) error); ok {\n\t\tr1 = rf(channelID, forceFetch...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "320b30aa32416549ad7086899d6cd778", "score": "0.5087472", "text": "func (_m *Hooks) MessageWillBeUpdated(c *plugin.Context, newPost *model.Post, oldPost *model.Post) (*model.Post, string) {\n\tret := _m.Called(c, newPost, oldPost)\n\n\tvar r0 *model.Post\n\tif rf, ok := ret.Get(0).(func(*plugin.Context, *model.Post, *model.Post) *model.Post); ok {\n\t\tr0 = rf(c, newPost, oldPost)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.Post)\n\t\t}\n\t}\n\n\tvar r1 string\n\tif rf, ok := ret.Get(1).(func(*plugin.Context, *model.Post, *model.Post) string); ok {\n\t\tr1 = rf(c, newPost, oldPost)\n\t} else {\n\t\tr1 = ret.Get(1).(string)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "8d1fd5a641e1351e113dc21fb232b7f8", "score": "0.50613767", "text": "func getMessageInfo(mt reflect.Type) *MessageInfo {\n\tm, ok := reflect.Zero(mt).Interface().(pref.ProtoMessage)\n\tif !ok {\n\t\treturn nil\n\t}\n\tmr, ok := m.ProtoReflect().(interface{ ProtoMessageInfo() *MessageInfo })\n\tif !ok {\n\t\treturn nil\n\t}\n\treturn mr.ProtoMessageInfo()\n}", "title": "" }, { "docid": "7e312c7da3686ce37211f622e9f15c4f", "score": "0.50597423", "text": "func (_m *MockCollectionConfigService_WaitForProbeConfigServer) RecvMsg(m interface{}) error {\n\tret := _m.Called(m)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(interface{}) error); ok {\n\t\tr0 = rf(m)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "871e8624cb6f0c79fc45a4a96969ab96", "score": "0.50572926", "text": "func get_message(stub shim.ChaincodeStubInterface, id string) (Message, error) {\n\tvar message Message\n\tmessageAsBytes, err := stub.GetState(id) //getState retreives a key/value from the ledger\n\tif err != nil { //this seems to always succeed, even if key didn't exist\n\t\treturn message, errors.New(\"Failed to find message - \" + id)\n\t}\n\tjson.Unmarshal(messageAsBytes, &message) //un stringify it aka JSON.parse()\n\n\tif message.Id != id { //test if message is actually here or just nil\n\t\treturn message, errors.New(\"Message does not exist - \" + id)\n\t}\n\n\treturn message, nil\n}", "title": "" }, { "docid": "1d0bd24c47fb7790a9f37a9be40f4995", "score": "0.50533175", "text": "func (_m *Connection) SendMessage(_a0 context.Context, _a1 conn.ChannelID, _a2 []byte) error {\n\tret := _m.Called(_a0, _a1, _a2)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, conn.ChannelID, []byte) error); ok {\n\t\tr0 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "0434b5763de3ab36acad7a067c2aac62", "score": "0.50388473", "text": "func MessageType(t *testing.T, message string) string {\n\tt.Helper()\n\tmsh := MSH(t, message)\n\treturn fmt.Sprintf(\"%s^%s\", msh.MessageType.MessageCode.String(), msh.MessageType.TriggerEvent.String())\n}", "title": "" }, { "docid": "53e97e563cbb41efa16b1d3d2715835b", "score": "0.5034128", "text": "func (_e *mock_PDU_Expecter) GetMessage() *mock_PDU_GetMessage_Call {\n\treturn &mock_PDU_GetMessage_Call{Call: _e.mock.On(\"GetMessage\")}\n}", "title": "" }, { "docid": "80309202e2edcdd46643795a9e73b0d3", "score": "0.5032397", "text": "func (o *CustomDeviceCreation) GetMessageTypeOk() (*string, bool) {\n\tif o == nil || o.MessageType == nil {\n\t\treturn nil, false\n\t}\n\treturn o.MessageType, true\n}", "title": "" }, { "docid": "c32f4dbeac660e7169a53d9a5f8e7e53", "score": "0.50261855", "text": "func (_m *FcmClient) Send(msg *fcm.Message) (*fcm.Response, error) {\n\tret := _m.Called(msg)\n\n\tvar r0 *fcm.Response\n\tif rf, ok := ret.Get(0).(func(*fcm.Message) *fcm.Response); ok {\n\t\tr0 = rf(msg)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*fcm.Response)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*fcm.Message) error); ok {\n\t\tr1 = rf(msg)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "623c18a7d68e3ca1e25b5a81444d90f4", "score": "0.501773", "text": "func (_m *MockMessage) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "fcc19eb0089b3d3cc863883f1350d1ef", "score": "0.50172776", "text": "func (_m *MessageProcessor) MessageReceived(ctx interfaces.AppFunctionContext, envelope types.MessageEnvelope, outputHandler interfaces.PipelineResponseHandler) error {\n\tret := _m.Called(ctx, envelope, outputHandler)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(interfaces.AppFunctionContext, types.MessageEnvelope, interfaces.PipelineResponseHandler) error); ok {\n\t\tr0 = rf(ctx, envelope, outputHandler)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "3dc4de8d1ed83cf12437371fe9e5c865", "score": "0.501695", "text": "func (_m *Hooks) MessageWillBePosted(c *plugin.Context, post *model.Post) (*model.Post, string) {\n\tret := _m.Called(c, post)\n\n\tvar r0 *model.Post\n\tif rf, ok := ret.Get(0).(func(*plugin.Context, *model.Post) *model.Post); ok {\n\t\tr0 = rf(c, post)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.Post)\n\t\t}\n\t}\n\n\tvar r1 string\n\tif rf, ok := ret.Get(1).(func(*plugin.Context, *model.Post) string); ok {\n\t\tr1 = rf(c, post)\n\t} else {\n\t\tr1 = ret.Get(1).(string)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "64fc9b6175274665de06ea410f49b60e", "score": "0.5014779", "text": "func (m *MockErrorWithFields) GetFields() map[string]interface{} {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetFields\")\n\tret0, _ := ret[0].(map[string]interface{})\n\treturn ret0\n}", "title": "" }, { "docid": "ee53fd7206f1da3ec19db84d9aab320c", "score": "0.5013303", "text": "func (_m *BitkubRepository) GetServerTime() (string, error) {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func() error); ok {\n\t\tr1 = rf()\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "6c5af2a44c32a8f1941490faeb775a3a", "score": "0.5007393", "text": "func (_m *IStreamMock) GetEventType() StreamName {\n\tret := _m.Called()\n\n\tvar r0 StreamName\n\tif rf, ok := ret.Get(0).(func() StreamName); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(StreamName)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "572844ac23702f2b3a2dcc15531679be", "score": "0.50030404", "text": "func (_m *MockPlcTag) GetDataType() readwritemodel.TransportSize {\n\tret := _m.Called()\n\n\tvar r0 readwritemodel.TransportSize\n\tif rf, ok := ret.Get(0).(func() readwritemodel.TransportSize); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(readwritemodel.TransportSize)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "3d0b0f676235e2669d4572d5d9bba3ed", "score": "0.49966842", "text": "func (m *MockClient) GetInMessage(from string, idx uint64) ([][]byte, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetInMessage\", from, idx)\n\tret0, _ := ret[0].([][]byte)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "0b1d36516628861de115bc337cc4121c", "score": "0.49900642", "text": "func TestMsgClaimHTLCType(t *testing.T) {\n\tmsg := types.NewMsgClaimHTLC(senderStr, idStr, secret.String())\n\trequire.Equal(t, \"claim_htlc\", msg.Type())\n}", "title": "" }, { "docid": "16bf7ffdb732a14d9ed0b17c9b1c1954", "score": "0.49831596", "text": "func (_m *SizeRestrictedOutput) GetMaxMessageSize() *int {\n\tret := _m.Called()\n\n\tvar r0 *int\n\tif rf, ok := ret.Get(0).(func() *int); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*int)\n\t\t}\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "f0e16c594ffd689512d1e09d984c36ab", "score": "0.49805027", "text": "func (m *MockClient) Type() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Type\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "title": "" }, { "docid": "b7cad0bc65cd75487e37c54783b5479f", "score": "0.4975519", "text": "func (_m *mockWsConnInterface) WriteMessage(_a0 int, _a1 []byte) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(int, []byte) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "ff68f74f5892e59b7de056a72902ffc6", "score": "0.4968589", "text": "func (_m *HistoryClient) GetReplicationMessages(\n\tctx context.Context,\n\trequest *replicator.GetReplicationMessagesRequest,\n\topts ...yarpc.CallOption,\n) (*replicator.GetReplicationMessagesResponse, error) {\n\tret := _m.Called(ctx, request)\n\n\tvar r0 *replicator.GetReplicationMessagesResponse\n\tif rf, ok := ret.Get(0).(func(context.Context, *replicator.GetReplicationMessagesRequest) *replicator.GetReplicationMessagesResponse); ok {\n\t\tr0 = rf(ctx, request)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*replicator.GetReplicationMessagesResponse)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *replicator.GetReplicationMessagesRequest) error); ok {\n\t\tr1 = rf(ctx, request)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "230e39840b319eaf9a57b398966c314d", "score": "0.49675784", "text": "func (_m *ClientInterface) GetFileStatus(projectID string, fileURI string) (*smartling.FileStatus, error) {\n\tret := _m.Called(projectID, fileURI)\n\n\tvar r0 *smartling.FileStatus\n\tif rf, ok := ret.Get(0).(func(string, string) *smartling.FileStatus); ok {\n\t\tr0 = rf(projectID, fileURI)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*smartling.FileStatus)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, string) error); ok {\n\t\tr1 = rf(projectID, fileURI)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "64b1b2ffef98342f8e77816e1593e3f0", "score": "0.49622664", "text": "func (_m *FileInteractor) GetJsonFile(fileName string) ([]byte, error) {\n\tret := _m.Called(fileName)\n\n\tvar r0 []byte\n\tif rf, ok := ret.Get(0).(func(string) []byte); ok {\n\t\tr0 = rf(fileName)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]byte)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(fileName)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "1d0685bc2cfaab84184fac5ac7a9d3e8", "score": "0.49455053", "text": "func (m *FakeMessage) GetKind() string { return m.kind }", "title": "" }, { "docid": "9295d5155d466c382817d9a5fefef8c9", "score": "0.49431404", "text": "func (mock *Mock) GetMessages(sender, recipient, start, limit uint) ([]entities.Message, error) {\n\tinputHash := toHash(getInputForGetMessages(sender, recipient, start, limit))\n\tarrOutputForGet, exists := mock.patchGetMessagesMap[inputHash]\n\tif !exists || len(arrOutputForGet) == 0 {\n\t\tpanic(\"Mock not available for GetMessages\")\n\t}\n\n\toutput := arrOutputForGet[0]\n\tarrOutputForGet = arrOutputForGet[1:]\n\tmock.patchGetMessagesMap[inputHash] = arrOutputForGet\n\n\treturn output.messages, output.error\n}", "title": "" }, { "docid": "fe8d1f5260fe85116e9a2dde4052dfd0", "score": "0.49429196", "text": "func (_m *Repo_UploadServer) Recv() (*v1.UploadRequest, error) {\n\tret := _m.Called()\n\n\tvar r0 *v1.UploadRequest\n\tif rf, ok := ret.Get(0).(func() *v1.UploadRequest); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*v1.UploadRequest)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func() error); ok {\n\t\tr1 = rf()\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "440b526dbf83bee5d22471170a441ce4", "score": "0.49398735", "text": "func (_m *Reader) FetchMessage(_a0 context.Context) (kafka.Message, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 kafka.Message\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context) (kafka.Message, error)); ok {\n\t\treturn rf(_a0)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context) kafka.Message); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(kafka.Message)\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "6d6f66db3bdc04b2187778534831014c", "score": "0.4939734", "text": "func (_m *QueueProtocolAPI) GetHeaders(param *types.ReqBlocks) (*types.Headers, error) {\n\tret := _m.Called(param)\n\n\tvar r0 *types.Headers\n\tif rf, ok := ret.Get(0).(func(*types.ReqBlocks) *types.Headers); ok {\n\t\tr0 = rf(param)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*types.Headers)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*types.ReqBlocks) error); ok {\n\t\tr1 = rf(param)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "4d9ff038cc50e7d61abc3c4ef01b5ce0", "score": "0.49339604", "text": "func getType(bt []byte) (mType Type) {\n\t//fmt.Printf(\"GetMsgType % X\\n\", msg)\n\tif len(bt) == 0 {\n\t\treturn UnknownMsg\n\t}\n\tbyte1 := bt[0]\n\n\tswitch {\n\t// channel/Voice Category Status\n\tcase byte1 >= 0x80 && byte1 <= 0xEF:\n\t\treturn getChannelType(byte1)\n\tcase byte1 == 0xF0, byte1 == 0xF7:\n\t\t// TODO what about sysex start stop etc.\n\t\treturn SysExMsg\n\tcase byte1 == 0xFF:\n\t\t/*\n\t\t\tif byte2 > 0 {\n\t\t\t\treturn MetaMsgType\n\t\t\t}\n\t\t*/\n\t\treturn getRealtimeType(byte1)\n\tcase byte1 < 0xF7:\n\t\treturn getSysCommonType(byte1)\n\tcase byte1 > 0xF7:\n\t\treturn getRealtimeType(byte1)\n\tdefault:\n\t\treturn UnknownMsg\n\t}\n}", "title": "" }, { "docid": "b9c5af9433bd3e60fccfc19d33212737", "score": "0.4927734", "text": "func (_m *MockSlackClient) PostMessage(channelID string, options ...slack.MsgOption) (string, string, error) {\n\t_va := make([]interface{}, len(options))\n\tfor _i := range options {\n\t\t_va[_i] = options[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, channelID)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string, ...slack.MsgOption) string); ok {\n\t\tr0 = rf(channelID, options...)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 string\n\tif rf, ok := ret.Get(1).(func(string, ...slack.MsgOption) string); ok {\n\t\tr1 = rf(channelID, options...)\n\t} else {\n\t\tr1 = ret.Get(1).(string)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(string, ...slack.MsgOption) error); ok {\n\t\tr2 = rf(channelID, options...)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "title": "" }, { "docid": "fd8918d5151b6432545f3435d8b174a4", "score": "0.49267283", "text": "func (m *MockConn) ReadMessage() (int, []byte, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadMessage\")\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].([]byte)\n\tret2, _ := ret[2].(error)\n\treturn ret0, ret1, ret2\n}", "title": "" }, { "docid": "29590d1882379a3b0daa1484f1c6560d", "score": "0.4922977", "text": "func (_m *Starfriends_ListStarshipActionsServer) SendMsg(m interface{}) error {\n\tret := _m.Called(m)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(interface{}) error); ok {\n\t\tr0 = rf(m)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "050c7af8278da64c69e99ac0806638f2", "score": "0.49088863", "text": "func TestUnmarshalMessage(t *testing.T) {\n\tmockSchemaRegistry, mockClient, schema, kafkaMessage, avroMessage := setupMockSchemaRegistry(t)\n\tmockClient.On(\"getSchema\", 77, mock.Anything).Return(schema, nil)\n\tmockClient.On(\"unmarshalKafkaMessageMap\", avroMessage).Return([]error{})\n\terrs := mockSchemaRegistry.unmarshalMessage(context.Background(), kafkaMessage, nil)\n\tassert.Empty(t, errs, \"there should be no errors unmarshaling\")\n\tcachedSchema, schemaInCache := mockSchemaRegistry.schemas.Load(uint32(77))\n\trequire.True(t, schemaInCache, \"schema should be in cache\")\n\tassert.Equal(t, schema, cachedSchema.(string))\n}", "title": "" }, { "docid": "040db62ab8d6f8f332d0ebd63bb93300", "score": "0.4907245", "text": "func (_m *MockDatabase) GetMessagesByInboxID(_a0 string) ([]burner.Message, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 []burner.Message\n\tif rf, ok := ret.Get(0).(func(string) []burner.Message); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]burner.Message)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "e3ea91f62b307cdbc43ef956bab349ae", "score": "0.49011812", "text": "func ShouldHaveReceived(iFace interface{}, args ...interface{}) string {\n\tt := new(fakeT)\n\tm, err := getMock(iFace)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\tvar methodName string\n\tif stringVal, ok := args[0].(string); ok {\n\t\tmethodName = stringVal\n\t} else {\n\t\ttyp := reflect.TypeOf(iFace)\n\t\treturn fmt.Sprintf(\"Argument 0 (methodName) should be a string. (was %s)\", typ.Name())\n\t}\n\n\tpassed := m.AssertCalled(t, methodName, args[1:]...)\n\n\tif !passed {\n\t\treturn t.error()\n\t}\n\n\treturn testPassedMessage\n}", "title": "" }, { "docid": "885435a16e879ee4cc4050d0dabcad34", "score": "0.4901026", "text": "func (m *MockMessage) MessageID() uint16 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MessageID\")\n\tret0, _ := ret[0].(uint16)\n\treturn ret0\n}", "title": "" }, { "docid": "ce22a61c89f174c38022524268b53f8d", "score": "0.49010226", "text": "func (_m *QueueProtocolAPI) GetMempool(req *types.ReqGetMempool) (*types.ReplyTxList, error) {\n\tret := _m.Called(req)\n\n\tvar r0 *types.ReplyTxList\n\tif rf, ok := ret.Get(0).(func(*types.ReqGetMempool) *types.ReplyTxList); ok {\n\t\tr0 = rf(req)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*types.ReplyTxList)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*types.ReqGetMempool) error); ok {\n\t\tr1 = rf(req)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "242a3ce87295b1697c5e04fc15318764", "score": "0.49003828", "text": "func (m *Message) GetType() string { return m.Type }", "title": "" }, { "docid": "ce87505c69d087cae70120b1bbdf8441", "score": "0.48984674", "text": "func _TestSendMessage(t *testing.T) {\n\tf := func(test test_tools.Test) []interface{} {\n\t\tserver := NewChatServer(test[\"repo\"].(models.ConversationRepository), nil)\n\t\tresp, err := server.SendNewMessage(context.Background(), test[\"request\"].(*chat.SendMessageRequest))\n\t\treturn []interface{}{resp, err}\n\t}\n\n\tctrl := gomock.NewController(t)\n\tdefer ctrl.Finish()\n\n\tchat1 := &models.Conversation{\n\t\tModel: db.Model{ID: 1},\n\t\tName: \"Test chat\",\n\t}\n\tmember1 := &models.Member{Model: db.Model{ID: 1}}\n\tct := time.Now()\n\tmessage1 := &chat.Message{\n\t\tConversationId: 1,\n\t\tUserId: 1,\n\t\tParts: []*chat.MessagePart{\n\t\t\t{\n\t\t\t\tMimeType: \"text/plain\",\n\t\t\t\tContent: \"test\",\n\t\t\t},\n\t\t},\n\t\tUser: &chat.Member{Id: 1},\n\t\tCreatedAt: ct.Unix(),\n\t}\n\n\trepoSuccess := fixtures.NewMockConversationRepository(ctrl)\n\trepoSuccess.EXPECT().GetByID(gomock.Any()).Return(chat1, nil)\n\trepoSuccess.EXPECT().GetMember(chat1, gomock.Any()).Return(member1, nil)\n\trepoSuccess.EXPECT().AddMessages(chat1, gomock.Any()).Do(func(c *models.Conversation, m *models.Message) {\n\t\tm.ConversationID = uint(c.ID)\n\t\tm.CreatedAt = ct\n\t}).Return(nil)\n\n\trepoNotMember := fixtures.NewMockConversationRepository(ctrl)\n\trepoNotMember.EXPECT().GetByID(gomock.Any()).Return(chat1, nil)\n\trepoNotMember.EXPECT().GetMember(chat1, gomock.Any()).Return(nil, nil)\n\n\ttests := test_tools.Tests{\n\t\t//success\n\t\ttest_tools.Test{\n\t\t\t\"request\": &chat.SendMessageRequest{ConversationId: 1, Messages: []*chat.Message{message1}},\n\t\t\t\"reply\": &chat.SendMessageReply{Chat: &chat.Chat{Id: 1, Name: \"Test chat\"}, Messages: []*chat.Message{message1}},\n\t\t\t\"repo\": repoSuccess,\n\t\t\t\"error\": nil,\n\t\t},\n\t\t//not a member\n\t\ttest_tools.Test{\n\t\t\t\"request\": &chat.SendMessageRequest{1, []*chat.Message{message1}},\n\t\t\t\"reply\": &chat.SendMessageReply{Chat: &chat.Chat{Id: 1, Name: \"Test chat\"}, Error: &chat.Error{\n\t\t\t\tCode: chat.ErrorCode_FORBIDDEN,\n\t\t\t\tMessage: \"User isn't a member\",\n\t\t\t}},\n\t\t\t\"repo\": repoNotMember,\n\t\t\t\"error\": nil,\n\t\t},\n\t}\n\trules := []test_tools.Rule{\n\t\t{test_tools.RuleStr, \"reply\"},\n\t\t{test_tools.RuleStr, \"error\"},\n\t}\n\n\trunner := test_tools.NewRunner(tests, f, rules)\n\n\trunner.RunTests()\n\tif runner.HasErrors() {\n\t\tt.Error(runner.Errors)\n\t}\n}", "title": "" }, { "docid": "038cc32604cc072f4b7f258f52222942", "score": "0.48960403", "text": "func Test_GetMessages_Success(t *testing.T) {\n\tdummy, err := ioutil.ReadFile(\"testdata/getmessage.json\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// set up server to return 200 and message list response with three messages\n\tserver, client := setupTestServer(200, dummy, want(t, \"/messages/3423093\", \"GET\", nil))\n\tdefer server.Close()\n\n\tm, err := client.GetMessage(3423093)\n\tif err != nil {\n\t\tt.Fatal(\"got error:\", err)\n\t}\n\n\twant := Message{\n\t\tID: 3423093,\n\t\tStreamID: 48914,\n\t\tText: \"\",\n\t\tAuthor: User{\n\t\t\tID: 50654,\n\t\t\tName: \"Herman Schaaf\",\n\t\t\tAvatar: \"https://sqwiggle-assets.s3.amazonaws.com/assets/api/heart.png\",\n\t\t\tType: TypeUser,\n\t\t\tSupport: false,\n\t\t},\n\t\tAttachments: []Attachment{\n\t\t\t{\n\t\t\t\tID: 206099,\n\t\t\t\tType: TypeImage,\n\t\t\t\tURL: \"https://api.sqwiggle.com/attachments/206099/view\",\n\t\t\t\tTitle: \"gophercolor.png\",\n\t\t\t\tDescription: \"\",\n\t\t\t\tImage: \"https://sqwiggle-assets.s3.amazonaws.com/assets/api/lightning.png\",\n\t\t\t\tCreatedAt: time.Date(2015, time.February, 5, 13, 23, 8, 115000000, time.UTC),\n\t\t\t\tUpdatedAt: time.Date(2015, time.February, 5, 13, 23, 11, 163000000, time.UTC),\n\t\t\t\tAnimated: false,\n\t\t\t\tStatus: \"uploaded\",\n\t\t\t\tWidth: 3861,\n\t\t\t\tHeight: 3861,\n\t\t\t},\n\t\t},\n\t\tMentions: []Mention{},\n\t\tCreatedAt: time.Date(2015, time.February, 5, 13, 23, 8, 111000000, time.UTC),\n\t\tUpdatedAt: time.Date(2015, time.February, 5, 13, 23, 8, 111000000, time.UTC),\n\t}\n\n\tdiff, err := compare(m, want)\n\tif err != nil {\n\t\tt.Fatal(\"Failed to compare structs:\", err)\n\t}\n\tfor k, d := range diff {\n\t\tt.Errorf(\"%q: got %q, want %q\", k, d.a, d.b)\n\t}\n}", "title": "" }, { "docid": "aca8d0b59b18ffe5a889f30b51a36ae4", "score": "0.48918974", "text": "func (f *Frame) messageType() messageType {\n\treturn f.Header.messageType\n}", "title": "" }, { "docid": "915079222f833213f5c7f38da36732db", "score": "0.48913148", "text": "func (_m *Client) Get(_a0 string) (string, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "e181de1936a23504e2eec093157cd67f", "score": "0.48855063", "text": "func (msg MsgSendMessage) Type() string { return TypeMsgSendMessage }", "title": "" }, { "docid": "7e5559c9f28421ce271560079fca64d0", "score": "0.48791087", "text": "func (_m *Support) ProcessConfigMsg(env *common.Envelope) (*common.Envelope, uint64, error) {\n\tret := _m.Called(env)\n\n\tvar r0 *common.Envelope\n\tif rf, ok := ret.Get(0).(func(*common.Envelope) *common.Envelope); ok {\n\t\tr0 = rf(env)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*common.Envelope)\n\t\t}\n\t}\n\n\tvar r1 uint64\n\tif rf, ok := ret.Get(1).(func(*common.Envelope) uint64); ok {\n\t\tr1 = rf(env)\n\t} else {\n\t\tr1 = ret.Get(1).(uint64)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(*common.Envelope) error); ok {\n\t\tr2 = rf(env)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "title": "" }, { "docid": "3c2b3b465665834818a06333b57cb9c6", "score": "0.48783755", "text": "func (_m *Requester2) Get(path string) error {\n\tret := _m.Called(path)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string) error); ok {\n\t\tr0 = rf(path)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "47a78be0418daa1f474dc5e1000fa9dc", "score": "0.48769155", "text": "func Mock(args []string, db *bolt.DB, s *dgo.Session, e interface{}) error {\n\tm := e.(*dgo.MessageCreate)\n\tif len(m.Message.Mentions) == 0 {\n\t\ts.ChannelMessageSend(m.ChannelID, \"You didn't mention anyone.\")\n\t\treturn ErrIncorrectArgs\n\t}\n\n\ttarget := m.Message.Mentions[0].ID\n\n\tvar targetMsg string\n\tdb.View(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(m.GuildID))\n\t\tmessages := b.Bucket([]byte(\"messages\"))\n\n\t\ttargetMsg = string(messages.Get([]byte(target)))\n\n\t\treturn nil\n\t})\n\n\t_, err := s.ChannelMessageSend(m.ChannelID, trash.Mock(targetMsg))\n\n\treturn err\n}", "title": "" }, { "docid": "7b354155b0541301aa8b755f72a58a8b", "score": "0.48744637", "text": "func TestNewMapTypeResolution(t *testing.T) {\n\tcaseCode := `\npackage TEST\n\ntype NestedMessageC struct {\n\tA int64\n}\ntype MsgWithMap struct {\n\tBeta map[int64]*NestedMessageC\n}\n`\n\tsd, err := New(map[string]io.Reader{\"/tmp/notreal\": strings.NewReader(caseCode)}, nil)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// findMsg defined here for brevity\n\tfindMsg := func(name string) *Message {\n\t\tfor _, m := range sd.Messages {\n\t\t\tif m.Name == name {\n\t\t\t\treturn m\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\tmsg := findMsg(\"MsgWithMap\")\n\tif msg == nil {\n\t\tt.Fatal(\"Couldn't find message 'MsgWithMap'\")\n\t}\n\texpected := findMsg(\"NestedMessageC\")\n\tif expected == nil {\n\t\tt.Fatal(\"Couldn't find message 'NestedMessageC'\")\n\t}\n\n\tbeta := msg.Fields[0].Type.Map\n\n\tif beta.ValueType.Message != expected {\n\t\tt.Fatalf(\"Expected beta ValueType to be 'NestedMessageC', is %q\", beta.ValueType.Message.Name)\n\t}\n\n}", "title": "" }, { "docid": "8e14c5eb6017371ae1c0f55df2da0f3c", "score": "0.48665562", "text": "func TestSQSReaderImpl_GetMessages(t *testing.T) {\n\t// Get a handle on the Mocked SQS client interface\n\tclient := MockedReceiveMsgs{}\n\n\t// Initialise our SQSReaderImpl with a fake URL\n\tq := SQSReaderImpl{\n\t\tClient: client,\n\t\tURL: \"http://fake.url\",\n\t}\n\n\t// Get the list of messages (should have length of 1)\n\tmessages, err := q.GetMessages(20, 10)\n\tmessage := messages[0]\n\n\t// Assertions\n\tConvey(\"Given valid input parameters\", t, func() {\n\t\tSo(q, ShouldNotBeNil)\n\t\tSo(messages, ShouldNotBeNil)\n\t\tSo(err, ShouldBeNil)\n\n\t\tSo(len(messages), ShouldEqual, 1)\n\t\tSo(message.Created, ShouldEqual, \"Now\")\n\t\tSo(message.Url, ShouldEqual, \"/test/url\")\n\t\tSo(message.Term, ShouldEqual, \"test_term\")\n\t\tSo(message.ListType, ShouldEqual, \"test_list_type\")\n\t\tSo(message.GaID, ShouldEqual, \"testgaID\")\n\t\tSo(message.GID, ShouldEqual, \"testgID\")\n\t\tSo(message.PageIndex, ShouldEqual, 0)\n\t\tSo(message.LinkIndex, ShouldEqual, 1)\n\t\tSo(message.PageSize, ShouldEqual, 2)\n\t\tSo(message.ReceiptHandle(), ShouldEqual, \"testHandle\")\n\t})\n}", "title": "" }, { "docid": "6652bdceec290d783f269acff437d886", "score": "0.4855978", "text": "func (_m *MockChatUsecase) Message(_param0 models.User, _param1 models.Message) error {\n\tret := _m.ctrl.Call(_m, \"Message\", _param0, _param1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "0b56ec192a4292b45457ee97f246c112", "score": "0.48553634", "text": "func (o *CustomDeviceCreation) GetMessageType() string {\n\tif o == nil || o.MessageType == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.MessageType\n}", "title": "" }, { "docid": "e3b6575e7920b4e3ede6bca42ef93a2c", "score": "0.48527288", "text": "func (_m *TriggerServiceBinding) ProcessMessage(appContext *appfunction.Context, envelope types.MessageEnvelope, pipeline *interfaces.FunctionPipeline) *runtime.MessageError {\n\tret := _m.Called(appContext, envelope, pipeline)\n\n\tvar r0 *runtime.MessageError\n\tif rf, ok := ret.Get(0).(func(*appfunction.Context, types.MessageEnvelope, *interfaces.FunctionPipeline) *runtime.MessageError); ok {\n\t\tr0 = rf(appContext, envelope, pipeline)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*runtime.MessageError)\n\t\t}\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "7734d60bea5b7e2128e0509fc55b7eb5", "score": "0.4842827", "text": "func (m_2 *MockService_MigrateClient) RecvMsg(m interface{}) error {\n\tm_2.ctrl.T.Helper()\n\tret := m_2.ctrl.Call(m_2, \"RecvMsg\", m)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "1184ea8f0cc3c91aa34f3dcf4f6f66e0", "score": "0.48323414", "text": "func (_m *HTTPCloudEventConverter) Read(t reflect.Type, req *http.Request) (cloudevents.Event, error) {\n\tret := _m.Called(t, req)\n\n\tvar r0 cloudevents.Event\n\tif rf, ok := ret.Get(0).(func(reflect.Type, *http.Request) cloudevents.Event); ok {\n\t\tr0 = rf(t, req)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(cloudevents.Event)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(reflect.Type, *http.Request) error); ok {\n\t\tr1 = rf(t, req)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "title": "" }, { "docid": "9a379d8a53b18112bd551264e88236b8", "score": "0.48251596", "text": "func (_m *MockCall) ToProto() (proto.Message, error) {\n\tret := _m.ctrl.Call(_m, \"ToProto\")\n\tret0, _ := ret[0].(proto.Message)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "4ec6eee2c86bf56e599854189af5b092", "score": "0.48251286", "text": "func (m *_NLMRequestMasterKey) GetMessageType() uint8 {\n\treturn 0x10\n}", "title": "" }, { "docid": "47f17a0c5b76b632a23cd4995a89a8e5", "score": "0.4821857", "text": "func GetMessageType(msg []byte) (id int8, err error) {\n\tdecoder := msgpack.NewDecoder(bytes.NewReader(msg))\n\tid, _, err = decoder.DecodeExtHeader()\n\n\treturn\n}", "title": "" }, { "docid": "51286b431018b6088944daa2420a0690", "score": "0.48210296", "text": "func (m *MockEvent) GetMessageByMessageID(messageID string) (*discordgo.Message, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetMessageByMessageID\", messageID)\n\tret0, _ := ret[0].(*discordgo.Message)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "5eb4707eba55cdcfc551ded403dfee36", "score": "0.4811857", "text": "func testMessageProto(body string) proto.Message {\n\tmsg, err := starlarkproto.FromTextPB(testMessageType, []byte(body), false)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn msg.ToProto()\n}", "title": "" }, { "docid": "d0a9e24723258c4cea8dbbb982af7967", "score": "0.48044556", "text": "func (_m *MockCollectionConfigService_WaitForProbeConfigServer) SendMsg(m interface{}) error {\n\tret := _m.Called(m)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(interface{}) error); ok {\n\t\tr0 = rf(m)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "362dff58833bd5ddaa5da91da151fb7c", "score": "0.48042902", "text": "func (m_2 *MockService_MigrateServer) RecvMsg(m interface{}) error {\n\tm_2.ctrl.T.Helper()\n\tret := m_2.ctrl.Call(m_2, \"RecvMsg\", m)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "fc7772bcb8ddc6270fdc526eda30958e", "score": "0.4801974", "text": "func (m *MessageService_MessageClientMock) Recv() (r *message.Msg, r1 error) {\n\tatomic.AddUint64(&m.RecvPreCounter, 1)\n\tdefer atomic.AddUint64(&m.RecvCounter, 1)\n\n\tif m.RecvFunc == nil {\n\t\tm.t.Fatal(\"Unexpected call to MessageService_MessageClientMock.Recv\")\n\t\treturn\n\t}\n\n\treturn m.RecvFunc()\n}", "title": "" } ]
50beb42bb2b80b8a7876bdd2d84bef23
decodeStringRequest decode request params to struct
[ { "docid": "217890e99fa96f575f371d008d7ae59d", "score": "0.5394539", "text": "func decodeHotPlayMoviesrRequest(ctx context.Context, r *http.Request) (interface{}, error) {\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tfmt.Printf(\"read body err, %v\\n\", err)\n\t\treturn nil, err\n\t}\n\tprintln(\"json-request:\", string(body))\n\n\tvar rhe endpoint.MoviesListRequest // 请求参数解析后放在结构体中\n\tif err = json.Unmarshal(body, &rhe); err != nil {\n\t\tfmt.Printf(\"Unmarshal err, %v\\n\", err)\n\t\treturn nil, err\n\t}\n\tfmt.Println(\"request\", rhe)\n\n\treturn &rhe, nil\n}", "title": "" } ]
[ { "docid": "861e5183f2ab9898159fd072a6c77ff2", "score": "0.7416616", "text": "func decodeStringRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\trequestType, ok := vars[\"type\"]\n\tif !ok {\n\t\treturn nil, ErrorBadRequest\n\t}\n\n\tpa, ok := vars[\"a\"]\n\tif !ok {\n\t\treturn nil, ErrorBadRequest\n\t}\n\n\tpb, ok := vars[\"b\"]\n\tif !ok {\n\t\treturn nil, ErrorBadRequest\n\t}\n\n\treturn endpoint.StringRequest{\n\t\tRequestType: requestType,\n\t\tA: pa,\n\t\tB: pb,\n\t}, nil\n}", "title": "" }, { "docid": "01eb3f2151dd39cccab5b97501692319", "score": "0.63759947", "text": "func DecodeRequest(body []byte, validFields []string, objPtr interface{}) error {\n\trv := reflect.ValueOf(objPtr)\n\tif rv.Kind() != reflect.Ptr || rv.IsNil() {\n\t\tpanic(errors.New(\"obj should be a pointer to a struct\"))\n\t}\n\n\tobjType := rv.Elem().Type()\n\tfor _, fieldName := range validFields {\n\t\t_, ok := objType.FieldByName(fieldName)\n\t\tif !ok {\n\t\t\tpanic(fmt.Sprintf(\"%s is not a part of %s\", fieldName, objType.String()))\n\t\t}\n\t}\n\n\trequestValues := make(map[string]json.RawMessage)\n\terr := json.Unmarshal(body, &requestValues)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfieldMap := getFieldsMappedToJsonTag(objType)\n\n\terrs := make([]string, 0)\n\tfor jsonKey := range requestValues {\n\t\tif _, ok := fieldMap[jsonKey]; !ok {\n\t\t\terrs = append(errs, fmt.Sprintf(\"%s: This property does not exist.\", jsonKey))\n\t\t\tcontinue\n\t\t}\n\n\t\t// don't add an additional validation checks if the array is empty\n\t\tif len(validFields) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tfield := fieldMap[jsonKey]\n\t\tcanBeSet := false\n\t\tfor _, f := range validFields {\n\t\t\tif f == field {\n\t\t\t\tcanBeSet = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif !canBeSet {\n\t\t\terrs = append(errs, fmt.Sprintf(\"%s: This property is not allowed to be set.\", jsonKey))\n\t\t}\n\t}\n\n\tif len(errs) > 0 {\n\t\treturn errors.New(strings.Join(errs, ERROR_DELIMITER))\n\t}\n\n\t// decode each key in the request individually\n\tfor jsonKey, rawJson := range requestValues {\n\t\tfield := rv.Elem().FieldByName(fieldMap[jsonKey])\n\t\tfieldValuePtr := reflect.New(field.Type()).Interface()\n\n\t\terr := json.Unmarshal(rawJson, fieldValuePtr)\n\t\tif err != nil {\n\t\t\terrs = append(errs, err.Error())\n\t\t} else {\n\t\t\tfield.Set(reflect.ValueOf(fieldValuePtr).Elem())\n\t\t}\n\t}\n\n\tif len(errs) > 0 {\n\t\treturn errors.New(strings.Join(errs, ERROR_DELIMITER))\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "16a1a915485184c5c8c3ffa819a25f2d", "score": "0.6285147", "text": "func deserializeRequestStruct(ret []byte) *requestStruct {\n\trq := new(requestStruct)\n\trq.RequestType = ret[0]\n\trq.length = binary.LittleEndian.Uint64(ret[1:9])\n\tcopy(rq.Requester[0:rsautil.KeySize], ret[9:9+rsautil.KeySize])\n\trq.data = make([]byte, rq.length)\n\n\tcopy(rq.data[0:rq.length], ret[9+rsautil.KeySize:9+rsautil.KeySize+rq.length])\n\treturn rq\n}", "title": "" }, { "docid": "6ddc327ea8a8aa71efa9e3d778494a09", "score": "0.61025715", "text": "func decodeRequestQuery(r *http.Request, v interface{}) error {\n\tif err := schema.NewDecoder().Decode(v, r.URL.Query()); err != nil {\n\t\tlog.WithField(\"err\", err).Info(\"Invalid request query\")\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "06fe551084ba185a277832123ad3e1cf", "score": "0.6087386", "text": "func UnmarshalWithParams(b []byte, val interface{}, params string) (rest []byte, err error) {}", "title": "" }, { "docid": "7fd2ced16c7edb5c8891ec865f53e413", "score": "0.60840607", "text": "func (r *Request) UnmarshalParams(v interface{}) error { return json.Unmarshal(r.params, v) }", "title": "" }, { "docid": "d5a11d9217063ed36663a8d7ae8fa773", "score": "0.6076518", "text": "func decodeCalculatorRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\n\ta, ok := vars[\"a\"]\n\tif !ok {\n\t\treturn nil, ErrBadRouting\n\t}\n\n\tb, ok := vars[\"b\"]\n\tif !ok {\n\t\treturn nil, ErrBadRouting\n\t}\n\n\taint, _ := strconv.Atoi(a)\n\tbint, _ := strconv.Atoi(b)\n\treturn CalculatorRequest{\n\t\tA: aint,\n\t\tB: bint,\n\t}, nil\n}", "title": "" }, { "docid": "fa3df3d8d41f278455cbeec346980410", "score": "0.6073206", "text": "func DecodeRequestString(r *http.Request) (string, error) {\n\tif r.Body == http.NoBody || r.Body == nil {\n\t\treturn \"\", errdefs.InvalidParameter(errors.New(\"http body is required\"))\n\t}\n\n\tb, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\treturn \"\", errdefs.InvalidParameter(fmt.Errorf(\"failed to decode request body: %w\", err))\n\t}\n\n\treturn string(b), nil\n}", "title": "" }, { "docid": "6a78c16712d7da029a4ec777df531539", "score": "0.5986136", "text": "func DecodeQuestionRequest(ctx context.Context, r *http.Request) (interface{}, error) {\n var req QuestionRequest\n err := json.NewDecoder(r.Body).Decode(&req)\n if err != nil {\n return nil, err\n }\n return req, nil\n}", "title": "" }, { "docid": "dc2b441329e2b68c5c71309e9507c855", "score": "0.59854585", "text": "func DecodeParams(val string, r *http.Request) string {\n\tparam := mux.Vars(r)\n\treturn param[val]\n}", "title": "" }, { "docid": "255fa7c0d1349b698b1b77206fb35503", "score": "0.5954431", "text": "func decodeGetTagRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.GetTagRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "e7ddbbe8dfead85c7a04d86d6e14462d", "score": "0.5882746", "text": "func decodeVerifyRequest(_ context.Context, r interface{}) (interface{}, error) {\n\trq := r.(*pb.VerifyRequest)\n\n\treturn endpoint.VerifyRequest{\n\t\tToken: rq.Token,\n\t\tType: rq.Type,\n\t\tCode: rq.Code,\n\t}, nil\n}", "title": "" }, { "docid": "1201275d585ed0ba50139fc13ce2096c", "score": "0.5876347", "text": "func decodeUpdateTagRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.UpdateTagRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "9963b2701725f65488073ed7c9f56848", "score": "0.5870464", "text": "func decodeGetDealByStateRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvals := r.URL.Query()\n\tstate := \"\"\n\tstates, okk := vals[\"state\"]\n\tif okk {\n\t\tstate = states[0]\n\t}\n\treq := endpoint.GetDealByStateRequest{\n\t\tState: state,\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "d647b840b842589a0ba8689edd5532db", "score": "0.58484125", "text": "func parseFieldParameters(str string) (params fieldParameters) {}", "title": "" }, { "docid": "aba850b08057f75097f0837bb069c409", "score": "0.58477515", "text": "func DecodeUserRequest(ctx context.Context, r *http.Request) (interface{}, error) {\n var req UserRequest\n req.User = mux.Vars(r)[\"user\"]\n return req, nil\n}", "title": "" }, { "docid": "6b22cd290033d984ce8496b89c4ea6d3", "score": "0.5844706", "text": "func decodeGetUserRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\treq := endpoint.GetUserRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "7ebef0d6b0410ca7260515119ce7e568", "score": "0.579933", "text": "func parseRequest(str string, req *Request) error {\n\tchunks := strings.Split(str, \" \")\n\tif len(chunks) != 3 {\n\t\treturn fmt.Errorf(\"invalid request format\")\n\t}\n\n\treq.Method = chunks[0]\n\treq.Proto = chunks[2]\n\n\tif uri, err := url.Parse(chunks[1]); err == nil {\n\t\treq.Host = uri.Host\n\t\treq.Path = uri.Path\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "c7c48b0d0e7e17cc4dabf183f79dedf4", "score": "0.57871217", "text": "func DecodeRequest[I any](ctx context.Context, r *http.Request) (in I, err error) {\n\tswitch r.Method {\n\tcase \"POST\", \"PUT\", \"PATCH\":\n\t\terr = magic.Decode(r, &in,\n\t\t\tmagic.JSON,\n\t\t\tmagic.ChiRouter,\n\t\t)\n\tcase \"GET\", \"DELETE\":\n\t\terr = magic.Decode(r, &in,\n\t\t\tmagic.QueryParams,\n\t\t\tmagic.ChiRouter,\n\t\t)\n\tdefault:\n\t\terr = errors.Errorf(\"method %s not supported\", r.Method)\n\t}\n\n\tif err == io.EOF {\n\t\terr = errors.New(\"empty body\")\n\t}\n\n\treturn in, errors.E(err, \"can not unmarshal request\", errors.Unmarshal)\n}", "title": "" }, { "docid": "bea392b5d7eedfe9b6ebc7ec9571c8f1", "score": "0.5784374", "text": "func decodeDeleteTagRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.DeleteTagRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "cfa98787c0e36bc6c91c903ab22b3d34", "score": "0.5746106", "text": "func decodeCreateTagRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.CreateTagRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "ddb7a032eb83431e0e671e7dd2f18767", "score": "0.57316273", "text": "func decodeGetRequest(ctx context.Context, r *http.Request) (interface{}, error) {\n\tvar req getRequest\n\tsymbol := mux.Vars(r)[\"symbol\"]\n\treq.symbol = symbol\n\treturn req, nil\n}", "title": "" }, { "docid": "32f3e3c06c2b4198788d51d8d0d51947", "score": "0.57234067", "text": "func decodeSubRequest(ctx context.Context, r *http.Request) (interface{}, error) {\n\tvar req subRequest\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "f51eb25af067dd604d5f68ff5d2f7099", "score": "0.5712689", "text": "func decodeRequest(_ context.Context, r *http.Request) (request interface{}, err error) {\n\tdefer r.Body.Close()\n\treturn nil, nil\n}", "title": "" }, { "docid": "1415a035cf3c3a1cd97e824c49692056", "score": "0.5711706", "text": "func decodePutDealStateRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\treq := endpoint.PutDealStateRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "daae0383e021b45321fe499f6cd7a8c9", "score": "0.568516", "text": "func decodeListOrderRequest(_ context.Context, r *stdhttp.Request) (interface{}, error) {\n\tqp := processBasicQP(r)\n\treturn qp, nil\n}", "title": "" }, { "docid": "466eb0284edf0d314a31f3cf537049d8", "score": "0.5684304", "text": "func DecodeIDRequest(ctx context.Context, r *http.Request) (interface{}, error) {\n var req IDRequest\n req.Id = mux.Vars(r)[\"id\"]\n return req, nil\n}", "title": "" }, { "docid": "37f7472308a3d75b99e3fb458534dc29", "score": "0.5679625", "text": "func decodeGetPostRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.GetPostRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "3cde9161e8e7c27e508e38abf9183085", "score": "0.567473", "text": "func decodeCreateUserRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\treq := endpoint.CreateUserRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "60452626cb6f389f48e23233ea486abf", "score": "0.5666416", "text": "func decodeUpdatePostRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.UpdatePostRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "04f9d801b955fc82597c836e93df4b20", "score": "0.5662553", "text": "func decodeGetByCreteriaRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\n\tvars := mux.Vars(r)\n\tname, ok := vars[\"name\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid creteria\")\n\t}\n\treq := endpoint.GetByCreteriaRequest{\n\t\tCreteria: name,\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "3ec7b3f08b7879665621652cd68f6472", "score": "0.5650108", "text": "func DecodeRemoveRequest(_ context.Context, r *http.Request) (req interface{}, err error) {\n\treq = endpoints.RemoveRequest{Id: mux.Vars(r)[\"id\"]}\n\t//err = json.NewDecoder(r.Body).Decode(&r)\n\treturn req, err\n}", "title": "" }, { "docid": "69f919e251b75ea1e942b2ec7457d42e", "score": "0.5649946", "text": "func decodeDeletePostRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.DeletePostRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "9017f940f31906a0f73179dffb3f1a15", "score": "0.56487226", "text": "func decodeGetByMultiCriteriaRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\treq := endpoint.GetByMultiCriteriaRequest{\n\t\tUrlMap: r.URL.String(),\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "420d87b19d70db2be72299fa994ab710", "score": "0.5615043", "text": "func extractStruct(r *http.Request, inputStructPtr interface{}) error {\n\tb, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"can't read input data: %v, err: %v\", string(b), err)\n\t}\n\n\t// decode input data\n\tif err = json.Unmarshal(b, inputStructPtr); err != nil {\n\t\treturn fmt.Errorf(\"can't decode input data: %v, err: %v\", string(b), err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "592f4255dda905f12922890fec1004a3", "score": "0.56017834", "text": "func decodeDeleteRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\tid, ok := vars[\"id\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid ID\")\n\t}\n\treq := endpoint.DeleteRequest{\n\t\tId: id,\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "592f4255dda905f12922890fec1004a3", "score": "0.56017834", "text": "func decodeDeleteRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\tid, ok := vars[\"id\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid ID\")\n\t}\n\treq := endpoint.DeleteRequest{\n\t\tId: id,\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "3090360df546d8f42c633a7b5229ad93", "score": "0.557434", "text": "func DecodeRequest(r BytesReader) ([]string, error) {\n\t// Decode the value\n\tval, err := Decode(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Must be an array\n\tar, ok := val.(Array)\n\tif !ok {\n\t\treturn nil, ErrNotAnArray\n\t}\n\n\t// Must have at least one element\n\tif len(ar) < 1 {\n\t\treturn nil, ErrInvalidRequest\n\t}\n\n\t// Must have only strings\n\tstrs := make([]string, len(ar))\n\tfor i, v := range ar {\n\t\tv, ok := v.(string)\n\t\tif !ok {\n\t\t\treturn nil, ErrInvalidRequest\n\t\t}\n\t\tstrs[i] = v\n\t}\n\treturn strs, nil\n}", "title": "" }, { "docid": "22eb7faaf4ab82ce47af862e9c6f4479", "score": "0.5574205", "text": "func decodeGetRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\treq := endpoint.GetRequest{}\n\treturn req, nil\n}", "title": "" }, { "docid": "22eb7faaf4ab82ce47af862e9c6f4479", "score": "0.5574205", "text": "func decodeGetRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\treq := endpoint.GetRequest{}\n\treturn req, nil\n}", "title": "" }, { "docid": "8be73305af33ec0d9a44b740b27df257", "score": "0.55634236", "text": "func decodeRegisterRequest(_ context.Context, r interface{}) (interface{}, error) {\n\treq := r.(*pb.RegisterRequest)\n\t//进行数据的转换\n\tvar user = service.UserInfo{\n\t\tId:req.User.Id,\n\t\tPhone:req.User.Phone,\n\t\tPassword:req.User.Password,\n\t\tAge:req.User.Age,\n\n\t}\n\treturn endpoint.RegisterRequest{\n\t\tUser:user,\n\t},nil\n}", "title": "" }, { "docid": "644d21fc1fd72dabdfadc95d6bc0fab0", "score": "0.5548539", "text": "func unpackRequestBody(decoder *hessian.Decoder, reqObj interface{}) error {\n\tif decoder == nil {\n\t\treturn perrors.Errorf(\"@decoder is nil\")\n\t}\n\n\treq, ok := reqObj.([]interface{})\n\tif !ok {\n\t\treturn perrors.Errorf(\"@reqObj is not of type: []interface{}\")\n\t}\n\tif len(req) < 7 {\n\t\treturn perrors.New(\"length of @reqObj should be 7\")\n\t}\n\n\tvar (\n\t\terr error\n\t\tdubboVersion, target, serviceVersion, method, argsTypes interface{}\n\t\targs []interface{}\n\t)\n\n\tdubboVersion, err = decoder.Decode()\n\tif err != nil {\n\t\treturn perrors.WithStack(err)\n\t}\n\treq[0] = dubboVersion\n\n\ttarget, err = decoder.Decode()\n\tif err != nil {\n\t\treturn perrors.WithStack(err)\n\t}\n\treq[1] = target\n\n\tserviceVersion, err = decoder.Decode()\n\tif err != nil {\n\t\treturn perrors.WithStack(err)\n\t}\n\treq[2] = serviceVersion\n\n\tmethod, err = decoder.Decode()\n\tif err != nil {\n\t\treturn perrors.WithStack(err)\n\t}\n\treq[3] = method\n\n\targsTypes, err = decoder.Decode()\n\tif err != nil {\n\t\treturn perrors.WithStack(err)\n\t}\n\treq[4] = argsTypes\n\n\tats := DescRegex.FindAllString(argsTypes.(string), -1)\n\tvar arg interface{}\n\tfor i := 0; i < len(ats); i++ {\n\t\targ, err = decoder.Decode()\n\t\tif err != nil {\n\t\t\treturn perrors.WithStack(err)\n\t\t}\n\t\targs = append(args, arg)\n\t}\n\treq[5] = args\n\n\tattachments, err := decoder.Decode()\n\tif err != nil {\n\t\treturn perrors.WithStack(err)\n\t}\n\tif v, ok := attachments.(map[interface{}]interface{}); ok {\n\t\tv[DUBBO_VERSION_KEY] = dubboVersion\n\t\treq[6] = ToMapStringInterface(v)\n\t\treturn nil\n\t}\n\n\treturn perrors.Errorf(\"get wrong attachments: %+v\", attachments)\n}", "title": "" }, { "docid": "2169eb6a20b9f3c273b0921289f44a71", "score": "0.55472624", "text": "func (r *Request) Decode(v interface{}, tag string) error {\n\tvar (\n\t\terr error\n\t\tct = r.RequestCtx.Request.Header.ContentType()\n\t)\n\n\t// Validate compulsory fields in JSON body. The struct to be unmarshaled into needs a struct tag with required=true for enforcing presence.\n\tif bytes.Contains(ct, constJSON) {\n\t\terr = json.Unmarshal(r.RequestCtx.PostBody(), &v)\n\t} else if bytes.Contains(ct, constXML) {\n\t\terr = xml.Unmarshal(r.RequestCtx.PostBody(), &v)\n\t} else {\n\t\t_, err = ScanArgs(r.RequestCtx.PostArgs(), v, tag)\n\t}\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error decoding request: %v\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c9e4cf88a3a7dda6e93afec666914bd0", "score": "0.55413496", "text": "func decodePostAcceptDealRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\treq := endpoint.PostAcceptDealRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "b95b5386a5fa8aef0b972420708315ca", "score": "0.55404013", "text": "func (recorder *ReqRecorder) Unmarshal(params []byte) (interface{}, error) {\n\treturn params, nil\n}", "title": "" }, { "docid": "90f1893e98d0fdc16993fb88ba6004cd", "score": "0.553442", "text": "func decodeCreatePostRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.CreatePostRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "2081827fd354d0ace1848b5767539197", "score": "0.5527696", "text": "func decodeRegisterRequest(_ context.Context, r interface{}) (interface{}, error) {\n\trq := r.(*pb.RegisterRequest)\n\treturn endpoint.RegisterRequest{\n\t\tUsername: rq.Username,\n\t\tPassword: rq.Password,\n\t\tName: rq.Name,\n\t\tLastName: rq.LastName,\n\t\tPhone: rq.Phone,\n\t\tEmail: rq.Email,\n\t}, nil\n}", "title": "" }, { "docid": "1efc2ae83380b40867880c12363f8868", "score": "0.55244565", "text": "func DecodeUserDetailsRequest(_ context.Context, r interface{}) (interface{}, error) {\n\treq := r.(*pb.UserDetailsRequest)\n\treturn &UserDetailsRequest{\n\t\tJwt: req.Jwt,\n\t}, nil\n}", "title": "" }, { "docid": "a38028f2065bf521cd59c1934e274310", "score": "0.551018", "text": "func Unpack(req *http.Request, ptr interface{}) error {\n\tif err := req.ParseForm(); err != nil {\n\t\treturn err\n\t}\n\n\t// Build map of fields keyed by effective name.\n\tfields := make(map[string]reflect.Value)\n\tv := reflect.ValueOf(ptr).Elem() // the struct variable\n\tfor i := 0; i < v.NumField(); i++ {\n\t\tfieldInfo := v.Type().Field(i) // a reflect.StructField\n\t\ttag := fieldInfo.Tag // a reflect.StructTag\n\t\tname := tag.Get(\"http\")\n\t\tif name == \"\" {\n\t\t\tname = strings.ToLower(fieldInfo.Name)\n\t\t}\n\t\tfields[name] = v.Field(i)\n\t}\n\n\t// Update struct field for each parameter in the request.\n\tfor name, values := range req.Form {\n\t\tf := fields[name]\n\t\tif !f.IsValid() {\n\t\t\tcontinue // ignore unrecognized HTTP parameters\n\t\t}\n\t\tfor _, value := range values {\n\t\t\tif f.Kind() == reflect.Slice {\n\t\t\t\telem := reflect.New(f.Type().Elem()).Elem()\n\t\t\t\tif err := populate(elem, value); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"%s: %v\", name, err)\n\t\t\t\t}\n\t\t\t\tf.Set(reflect.Append(f, elem))\n\t\t\t} else {\n\t\t\t\tif err := populate(f, value); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"%s: %v\", name, err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a38028f2065bf521cd59c1934e274310", "score": "0.551018", "text": "func Unpack(req *http.Request, ptr interface{}) error {\n\tif err := req.ParseForm(); err != nil {\n\t\treturn err\n\t}\n\n\t// Build map of fields keyed by effective name.\n\tfields := make(map[string]reflect.Value)\n\tv := reflect.ValueOf(ptr).Elem() // the struct variable\n\tfor i := 0; i < v.NumField(); i++ {\n\t\tfieldInfo := v.Type().Field(i) // a reflect.StructField\n\t\ttag := fieldInfo.Tag // a reflect.StructTag\n\t\tname := tag.Get(\"http\")\n\t\tif name == \"\" {\n\t\t\tname = strings.ToLower(fieldInfo.Name)\n\t\t}\n\t\tfields[name] = v.Field(i)\n\t}\n\n\t// Update struct field for each parameter in the request.\n\tfor name, values := range req.Form {\n\t\tf := fields[name]\n\t\tif !f.IsValid() {\n\t\t\tcontinue // ignore unrecognized HTTP parameters\n\t\t}\n\t\tfor _, value := range values {\n\t\t\tif f.Kind() == reflect.Slice {\n\t\t\t\telem := reflect.New(f.Type().Elem()).Elem()\n\t\t\t\tif err := populate(elem, value); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"%s: %v\", name, err)\n\t\t\t\t}\n\t\t\t\tf.Set(reflect.Append(f, elem))\n\t\t\t} else {\n\t\t\t\tif err := populate(f, value); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"%s: %v\", name, err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f0b264cce68e6f781f42dacf71f20af8", "score": "0.5505385", "text": "func decodeUploadRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\treq := endpoint.UploadRequest{}\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "c75a07f97e79327b0aef63c73baf3761", "score": "0.55036867", "text": "func decodeCreateOrderRequest(_ context.Context, r *stdhttp.Request) (interface{}, error) {\n\treq := dto.CreateOrderRequest{}\n\tdecoder := json.NewDecoder(r.Body)\n\tdecoder.DisallowUnknownFields()\n\terr := decoder.Decode(&req)\n\tif err != nil {\n\t\terr = ce.ErrInvalidReqBody\n\t}\n\treturn req, err\n}", "title": "" }, { "docid": "f0f4eabda8dce6834db86b8bbe5e66ca", "score": "0.5502936", "text": "func Decode(ctx context.Context, r *http.Request, val interface{}) error {\n\n\tif r.Method == http.MethodPost || r.Method == http.MethodPut || r.Method == http.MethodPatch || r.Method == http.MethodDelete {\n\t\tdecoder := json.NewDecoder(r.Body)\n\t\tdecoder.DisallowUnknownFields()\n\t\tif err := decoder.Decode(val); err != nil {\n\t\t\treturn weberror.NewErrorMessage(ctx, err, http.StatusBadRequest, \"decode request body failed\")\n\t\t}\n\t} else {\n\t\tdecoder := schema.NewDecoder()\n\t\tif err := decoder.Decode(val, r.URL.Query()); err != nil {\n\t\t\terr = errors.Wrap(err, \"decode request query failed\")\n\t\t\treturn weberror.NewErrorMessage(ctx, err, http.StatusBadRequest, \"decode request query failed\")\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "81f53873e7fc4a191dd3de5b60e3689b", "score": "0.5478044", "text": "func (r *Route) DecodeRequest(req interface{}) *Route {\n\tr.request = reflect.TypeOf(req)\n\tif r.request.Kind() != reflect.Ptr {\n\t\tpanic(\"request structure must be a pointer\")\n\t}\n\treturn r\n}", "title": "" }, { "docid": "cec92b163e2b81854d23516e5cf4c8b8", "score": "0.54757833", "text": "func decodeRequest(r io.Reader) *plugin.CodeGeneratorRequest {\n\tvar req plugin.CodeGeneratorRequest\n\tinput, err := ioutil.ReadAll(r)\n\tif err != nil {\n\t\tlog.Fatal(\"unable to read stdin: \" + err.Error())\n\t}\n\tif err := proto.Unmarshal(input, &req); err != nil {\n\t\tlog.Fatal(\"unable to marshal stdin as protobuf: \" + err.Error())\n\t}\n\treturn &req\n}", "title": "" }, { "docid": "bd9825578bc6774bd895c3fdb59fc932", "score": "0.54701513", "text": "func decodeRequestForm(r *http.Request, v interface{}) error {\n\tif err := r.ParseForm(); err != nil {\n\t\tlog.WithField(\"err\", err).Info(\"Invalid request form\")\n\t\treturn err\n\t}\n\tdecoder := schema.NewDecoder()\n\tdecoder.IgnoreUnknownKeys(true)\n\treturn decoder.Decode(v, r.PostForm)\n}", "title": "" }, { "docid": "bdb43e07c111c7e1ae2f513a15725e83", "score": "0.5469636", "text": "func decodeHelloRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tvar request helloRequest\n\tif err := json.NewDecoder(r.Body).Decode(&request); err != nil {\n\t\treturn nil, err\n\t}\n\treturn request, nil\n}", "title": "" }, { "docid": "77d3225953bb93e62670c81d26e7c07c", "score": "0.54398257", "text": "func decodeGetByIDRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\tid, ok := vars[\"id\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid ID\")\n\t}\n\treq := endpoint.GetByIDRequest{\n\t\tId: id,\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "77d3225953bb93e62670c81d26e7c07c", "score": "0.54398257", "text": "func decodeGetByIDRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\tid, ok := vars[\"id\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid ID\")\n\t}\n\treq := endpoint.GetByIDRequest{\n\t\tId: id,\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "5d4dcd4564c8a6c220486996d8aacc0f", "score": "0.54381144", "text": "func DecodeRequest(ctx context.Context, req *http.Request, pathParams map[string]string, queryParams map[string]string) (interface{}, error) {\n\treturn DecodeRequestWithHeaders(ctx, req, pathParams, queryParams, nil)\n}", "title": "" }, { "docid": "c1c92c828b81b82e46e59266c0e40222", "score": "0.5420812", "text": "func DecodeRequestWithHeaders(_ context.Context, req *http.Request, pathParams map[string]string, queryParams map[string]string, headers []string) (interface{}, error) {\n\tvar request = map[string]string{}\n\n\t// Fetch and validate path parameter such as realm, userID, ...\n\tvar m = mux.Vars(req)\n\tfor key, validationRegExp := range pathParams {\n\t\tif v, ok := m[key]; ok {\n\t\t\tif matched, _ := regexp.Match(validationRegExp, []byte(v)); !matched {\n\t\t\t\treturn nil, errorhandler.CreateInvalidPathParameterError(key)\n\t\t\t}\n\t\t\trequest[key] = m[key]\n\t\t}\n\t}\n\n\trequest[\"scheme\"] = getScheme(req)\n\trequest[\"host\"] = req.Host\n\n\tbuf := new(bytes.Buffer)\n\t_, _ = buf.ReadFrom(req.Body)\n\t// Input validation of body content should be performed once the content is unmarshalled (Endpoint layer)\n\trequest[\"body\"] = buf.String()\n\n\t// Fetch and validate query parameter such as email, firstName, ...\n\tfor key, validationRegExp := range queryParams {\n\t\tif value := req.URL.Query().Get(key); value != \"\" {\n\t\t\tif matched, _ := regexp.Match(validationRegExp, []byte(value)); !matched {\n\t\t\t\treturn nil, errorhandler.CreateInvalidQueryParameterError(key)\n\t\t\t}\n\n\t\t\trequest[key] = value\n\t\t}\n\t}\n\n\tfor _, headerKey := range headers {\n\t\trequest[headerKey] = req.Header.Get(headerKey)\n\t}\n\n\treturn request, nil\n}", "title": "" }, { "docid": "e216a022b6c925b64918533e2be9da97", "score": "0.5420152", "text": "func decodeGRPCEchoRequest(_ context.Context, grpcReq interface{}) (interface{}, error) {\n\treq := grpcReq.(*pb.EchoRequest)\n\treturn endpoints.EchoRequest{Word: req.Word}, nil\n}", "title": "" }, { "docid": "cf86b73af0caff6f9ee84d2879c2f8ac", "score": "0.5405533", "text": "func decodeLoginUPRequest(_ context.Context, r interface{}) (interface{}, error) {\n\trq := r.(*pb.LoginUPRequest)\n\treturn endpoint.LoginUPRequest{\n\t\tUsername: rq.Username,\n\t\tPassword: rq.Password,\n\t}, nil\n}", "title": "" }, { "docid": "a0b758582f63958f672c9cd5373c121c", "score": "0.53965837", "text": "func decodeReq(r *http.Request, to interface{}) error {\n\tif err := json.NewDecoder(r.Body).Decode(to); err != nil {\n\t\tif err != io.EOF {\n\t\t\treturn errors.WithStack(err)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2d5f668965bbdcf2cbb90f6902a17677", "score": "0.53950995", "text": "func Unpack(req *http.Request, ptr interface{}) error {\n\tif err := req.ParseForm(); err != nil {\n\t\treturn err\n\t}\n\n\t// Build map of fields keyed by effective name.\n\tfields := make(map[string]*field)\n\tv := reflect.ValueOf(ptr).Elem() // the struct variable\n\tfor i := 0; i < v.NumField(); i++ {\n\t\tfieldInfo := v.Type().Field(i) // a reflect.StructField\n\t\ttag := fieldInfo.Tag // a reflect.StructTag\n\t\tname := tag.Get(\"http\")\n\t\tif name == \"\" {\n\t\t\tname = strings.ToLower(fieldInfo.Name)\n\t\t}\n\t\trules, err := parseValidateField(tag.Get(\"validate\"))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfields[name] = &field{v.Field(i), rules}\n\t}\n\n\t// Update struct field for each parameter in the request.\n\tfor name, values := range req.Form {\n\t\tf := fields[name]\n\t\tif !f.v.IsValid() {\n\t\t\tcontinue // ignore unrecognized HTTP parameters\n\t\t}\n\t\tfor _, value := range values {\n\t\t\tif err := validate(f.rules, name, value); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif f.v.Kind() == reflect.Slice {\n\t\t\t\telem := reflect.New(f.v.Type().Elem()).Elem()\n\t\t\t\tif err := populate(elem, value); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"%s: %v\", name, err)\n\t\t\t\t}\n\t\t\t\tf.v.Set(reflect.Append(f.v, elem))\n\t\t\t} else {\n\t\t\t\tif err := populate(f.v, value); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"%s: %v\", name, err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "94a86624bf6ba3a0d5e62cfc40a4a5dc", "score": "0.5382957", "text": "func decodeGetUserDealByStateRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\tvals := r.URL.Query()\n\tstate := \"\"\n\tid, ok := vars[\"userId\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid userId\")\n\t}\n\tstates, okk := vals[\"state\"]\n\tif okk {\n\t\tstate = states[0]\n\t}\n\treq := endpoint.GetUserDealByStateRequest{\n\t\tId: id,\n\t\tState: state,\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "72cbf07764250b8aea8ccab12ecc6cfb", "score": "0.5376863", "text": "func decodeDeleteBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tbookId, err := parseBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request to delete book\n\tvar request deleteBookRequest\n\trequest = deleteBookRequest{\n\t\tBookId: bookId,\n\t}\n\n\treturn request, nil\n}", "title": "" }, { "docid": "931540f8e218dfd358303dc642d3cdfd", "score": "0.53720385", "text": "func DeSerializeQuery(bytes []byte) Query {\n if len(bytes) != 32 {\n fmt.Println(\"Error : bytes length is not 32. Its \", len(bytes))\n }\n\n return Query {\n action : bytes[0],\n empty : 0,\n replyIp : binary.BigEndian.Uint32(bytes[2:6]),\n replyPort : binary.BigEndian.Uint16(bytes[6:8]),\n key : binary.BigEndian.Uint64(bytes[8:16]),\n value : binary.BigEndian.Uint64(bytes[16:24]),\n timeToLive: binary.BigEndian.Uint32(bytes[24:28]),\n requestId : binary.BigEndian.Uint32(bytes[28:32]),\n }\n}", "title": "" }, { "docid": "df52fe43159c32cc91f03cd815a1a61d", "score": "0.537193", "text": "func (pkt *SubModRequest) Decode(bytes []byte) (err error) {\n\tvar used int\n\toffset := 4 // header\n\n\tpkt.XID, used, err = XdrGetUint32(bytes[offset:])\n\tif err != nil {\n\t\treturn err\n\t}\n\toffset += used\n\n\tpkt.SubID, used, err = XdrGetInt64(bytes[offset:])\n\tif err != nil {\n\t\treturn err\n\t}\n\toffset += used\n\n\tpkt.Expression, used, err = XdrGetString(bytes[offset:])\n\tif err != nil {\n\t\treturn err\n\t}\n\toffset += used\n\n\tpkt.AcceptInsecure, used, err = XdrGetBool(bytes[offset:])\n\tif err != nil {\n\t\treturn err\n\t}\n\toffset += used\n\n\tpkt.AddKeys, used, err = XdrGetKeys(bytes[offset:])\n\tif err != nil {\n\t\treturn err\n\t}\n\toffset += used\n\n\tpkt.DelKeys, used, err = XdrGetKeys(bytes[offset:])\n\tif err != nil {\n\t\treturn err\n\t}\n\toffset += used\n\n\treturn nil\n}", "title": "" }, { "docid": "00a69c96119cc0aa775088afff3938e7", "score": "0.5363948", "text": "func decodeCreateBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t// Get bearer from headers\n\tbearer := parseBearer(r)\n\n\t///////////////////\n\t// Parse body\n\tvar createBookRequest createBookRequest\n\tif err := json.NewDecoder(r.Body).Decode(&createBookRequest); err != nil {\n\t\tfmt.Println(\"Error decoding book request: \", err)\n\t\treturn nil, err\n\t}\n\n\tcreateBookRequest.Bearer = bearer\n\n\treturn createBookRequest, nil\n}", "title": "" }, { "docid": "c2b5918bcc359f3a0666bb405d4380db", "score": "0.5363328", "text": "func decodeUpdateBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tbookId, err := parseBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get bearer from headers\n\tbearer := parseBearer(r)\n\n\t///////////////////\n\t// Parse body\n\tvar updateBook updateBookRequest\n\tif err := json.NewDecoder(r.Body).Decode(&updateBook); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Set bookid on update request\n\tupdateBook.BookId = bookId\n\n\tupdateBook.Bearer = bearer\n\n\treturn updateBook, nil\n}", "title": "" }, { "docid": "47b5af37a7675b3ae048f2c0d2f319d5", "score": "0.5348648", "text": "func decodeGetEventsRequest(_ context.Context, r interface{}) (interface{}, error) {\r\n\treturn nil, errors.New(\"'Events' Decoder is not impelemented\")\r\n}", "title": "" }, { "docid": "3000bfc899d739f76c324a475fc83090", "score": "0.5347236", "text": "func DecodeUnsealRequest(_ context.Context, grpcReq interface{}) (interface{}, error) {\n\treq := grpcReq.(*pb.UnsealRequest)\n\treturn &endpoints.UnsealRequest{Key: req.Key, Reset: req.Reset_}, nil\n}", "title": "" }, { "docid": "895b4bd6beb3554fd987f09419cf6ed6", "score": "0.5342158", "text": "func decodeGetAllKeyPersonsRequest(_ context.Context, request interface{}) (interface{}, error) {\n\treq := request.(*pb.GetAllKeyPersonsRequest)\n\tdecoded := endpoints.GetAllKeyPersonsRequest{\n\t\tID: req.Id,\n\t\tCompanyID: req.CompanyId,\n\t\tName: req.Name,\n\t\tContactNumber: req.ContactNumber,\n\t\tEmail: req.Email,\n\t\tJobTitle: req.JobTitle,\n\t}\n\treturn decoded, nil\n}", "title": "" }, { "docid": "b835923f50af49e06a9dfcf2882ef2ef", "score": "0.533756", "text": "func decodeUpdateKeyPersonRequest(_ context.Context, request interface{}) (interface{}, error) {\n\treq := request.(*pb.UpdateKeyPersonRequest)\n\treturn endpoints.UpdateKeyPersonRequest{ID: req.Id, KeyPerson: models.KeyPersonToORM(req.KeyPerson)}, nil\n}", "title": "" }, { "docid": "9c7164b54e2dfcd83f6fc2808764511b", "score": "0.53320974", "text": "func decodeAddRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\treq := endpoint.AddRequest{\n\t\tio.Department{\n\t\t\tDepartmentName: r.FormValue(\"DepartmentName\"),\n\t\t},\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "f3300ae5e4bede6b05063fd807192f48", "score": "0.5328638", "text": "func decodeAddRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tzip, _ := strconv.Atoi(r.FormValue(\"ZipCode\"))\n\ttel, _ := strconv.Atoi(r.FormValue(\"EmployeeNumTel\"))\n\tEmergencyTel, _ := strconv.Atoi(r.FormValue(\"EmergencyContactTel\"))\n\tsalary, _ := strconv.ParseFloat(r.FormValue(\"EmployeeSalary\"), 32)\n\tiban, _ := strconv.Atoi(r.FormValue(\"EmployeeIban\"))\n\tbic, _ := strconv.Atoi(r.FormValue(\"EmployeeBic\"))\n\treq := endpoint.AddRequest{\n\t\tio.Employee{\n\t\t\tEmployeeName: r.FormValue(\"EmployeeName\"),\n\t\t\tEmployeeEmail: r.FormValue(\"EmployeeEmail\"),\n\t\t\tAddress: r.FormValue(\"Address\"),\n\t\t\tZipCode: zip,\n\t\t\tEmployeeBirthDate: r.FormValue(\"EmployeeBirthDate\"),\n\t\t\tEmployeeNumTel: tel,\n\t\t\tEmergencyContactName: r.FormValue(\"EmergencyContactName\"),\n\t\t\tEmergencyContactTel: EmergencyTel,\n\t\t\tEmployeeStartDate: r.FormValue(\"EmployeeStartDate\"),\n\t\t\tEmployeeSalary: salary,\n\t\t\tEmployeeIban: iban,\n\t\t\tEmployeeBic: bic,\n\t\t},\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "2220a7075acff555c207c02ee5a1424c", "score": "0.53261316", "text": "func DecodeListRequest(_ context.Context, r *http.Request) (req interface{}, err error) {\n\t//req = endpoints.ListRequest{}\n\t//err = json.NewDecoder(r.Body).Decode(&r)\n\treturn nil, nil\n}", "title": "" }, { "docid": "d4362a23383cd688e3d7c5d91c08e1ae", "score": "0.5325935", "text": "func decodeLoginPRequest(_ context.Context, r interface{}) (interface{}, error) {\n\trq := r.(*pb.LoginPRequest)\n\n\treturn endpoint.LoginPRequest{\n\t\tPhone: rq.Phone,\n\t}, nil\n}", "title": "" }, { "docid": "58c72468964ccf73b1332190228f8606", "score": "0.53228426", "text": "func decodeGetDealByDIDRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\tid, ok := vars[\"dId\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid dId\")\n\t}\n\treq := endpoint.GetDealByDIDRequest{\n\t\tId: id,\n\t}\n\treturn req, nil\n}", "title": "" }, { "docid": "72578c71ae0fee457815cf38e72ecc99", "score": "0.53104967", "text": "func decodeDeleteKeyPersonRequest(_ context.Context, request interface{}) (interface{}, error) {\n\treq := request.(*pb.DeleteKeyPersonRequest)\n\treturn endpoints.DeleteKeyPersonRequest{ID: req.Id}, nil\n}", "title": "" }, { "docid": "806912166562a8464bd40410af172580", "score": "0.52999383", "text": "func decodeUpdateJobPostRequest(_ context.Context, request interface{}) (interface{}, error) {\n\treq := request.(*pb.UpdateJobPostRequest)\n\treturn endpoints.UpdateJobPostRequest{ID: req.Id, JobPost: models.JobPostToORM(req.JobPost)}, nil\n}", "title": "" }, { "docid": "6e7d9ab73d5480793e798ea44a585f0b", "score": "0.52970326", "text": "func decodeGetJobPostByIDRequest(_ context.Context, request interface{}) (interface{}, error) {\n\treq := request.(*pb.GetJobPostByIDRequest)\n\treturn endpoints.GetJobPostByIDRequest{ID: req.Id}, nil\n}", "title": "" }, { "docid": "34b3feea58c2286dceea49dcc2fb01dd", "score": "0.52967036", "text": "func (r *KubeCover) decodeInput(req *http.Request, data interface{}) (string, error) {\n\t// step: read in the content payload\n\tcontent, err := ioutil.ReadAll(req.Body)\n\tif err != nil {\n\t\tglog.Errorf(\"unable to read in the content, error: %s\", err)\n\t\treturn \"\", err\n\t}\n\tdefer func() {\n\t\t// we need to set the content back\n\t\treq.Body = ioutil.NopCloser(bytes.NewReader(content))\n\t}()\n\n\trdr := strings.NewReader(string(content))\n\n\t// step: decode the json\n\terr = json.NewDecoder(rdr).Decode(data)\n\tif err != nil {\n\t\tglog.Errorf(\"unable to decode the request body, error: %s\", err)\n\t\treturn \"\", err\n\t}\n\n\treturn string(content), nil\n}", "title": "" }, { "docid": "522f37577e650402948042232129f17b", "score": "0.5294698", "text": "func (d Decoder) DecodeString(dst interface{}, src string) error {\n\tvs, err := url.ParseQuery(src)\n\tif err != nil {\n\t\treturn err\n\t}\n\tv := reflect.ValueOf(dst)\n\treturn d.decodeNode(v, parseValues(d.d, d.e, vs, canIndexOrdinally(v)))\n}", "title": "" }, { "docid": "b2c614dcef3078e4161cdf0b6378de8f", "score": "0.5294275", "text": "func parseKeyHookRequest(Request *http.Request) (keyHookRequest, error) {\n\tNewRequest := keyHookRequest{}\n\terr := json.NewDecoder(Request.Body).Decode(&NewRequest)\n\tif err != nil {\n\t\treturn keyHookRequest{}, err\n\t}\n\treturn NewRequest, nil\n}", "title": "" }, { "docid": "541c34783c923941f6bcb2ce51813189", "score": "0.5290188", "text": "func DecodeStartRequest(_ context.Context, r *http.Request) (req interface{}, err error) {\n\treq = endpoints.StartRequest{Id: mux.Vars(r)[\"id\"]}\n\t//err = json.NewDecoder(r.Body).Decode(&r)\n\treturn req, err\n}", "title": "" }, { "docid": "4479a45adaa86c0ac06c9686bf7234ae", "score": "0.52893245", "text": "func (req *pbRequest) Unmarshal(data []byte) error {\n\tvar length = uint64(len(data))\n\tvar offset uint64\n\tvar n uint64\n\tvar tag uint64\n\tvar fieldNumber int\n\tvar wireType uint8\n\tfor {\n\t\tif offset < length {\n\t\t\ttag = uint64(data[offset])\n\t\t\toffset++\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t\tfieldNumber = int(tag >> 3)\n\t\twireType = uint8(tag & 0x7)\n\t\tswitch fieldNumber {\n\t\tcase 1:\n\t\t\tif wireType != 0 {\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Seq\", wireType)\n\t\t\t}\n\t\t\tn = code.DecodeVarint(data[offset:], &req.Seq)\n\t\t\toffset += n\n\t\tcase 2:\n\t\t\tif wireType != 2 {\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Upgrade\", wireType)\n\t\t\t}\n\t\t\tn = code.DecodeBytes(data[offset:], &req.Upgrade)\n\t\t\toffset += n\n\t\tcase 3:\n\t\t\tif wireType != 2 {\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field ServiceMethod\", wireType)\n\t\t\t}\n\t\t\tn = code.DecodeString(data[offset:], &req.ServiceMethod)\n\t\t\toffset += n\n\t\tcase 4:\n\t\t\tif wireType != 2 {\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Args\", wireType)\n\t\t\t}\n\t\t\tn = code.DecodeBytes(data[offset:], &req.Args)\n\t\t\toffset += n\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "5e435471afce13ef515e28861ba6844a", "score": "0.527609", "text": "func decodeHTTPNewJobRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tvar req endpoint.NewJobRequest\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "2e20490b1e8996795cc515047b2556d6", "score": "0.5266246", "text": "func DecodeString(in string, objs ...interface{}) (err error) {\n\tbbuf, err := base64.URLEncoding.DecodeString(in)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = DecodeBytes(bbuf, objs...)\n\treturn\n}", "title": "" }, { "docid": "2ad98939d93640e64045d87c64bfba3f", "score": "0.5257564", "text": "func decode(r *http.Request, v ok) error {\n\tif r.Body == nil {\n\t\treturn errors.New(\"Invalid Body\")\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(v); err != nil {\n\t\treturn err\n\t}\n\treturn v.OK()\n}", "title": "" }, { "docid": "92b197b0460e653102b50c865305291b", "score": "0.5254458", "text": "func parseParams(s string) (map[string]string, string, error) {\n if len(s) < 1 || s[0] != paramDelimOpen {\n return nil, \"\", syntaxError(fmt.Errorf(\"Invalid parameters; expected '%v', got '%v'\", string(paramDelimOpen), string(s[0])))\n }else{\n s = s[1:]\n }\n \n params := make(map[string]string)\n for len(s) > 0 {\n _, s = scan.White(s)\n \n if len(s) < 1 {\n return nil, \"\", syntaxError(fmt.Errorf(\"Unexpected end of parameters\"))\n }\n if s[0] == paramDelimClose {\n s = s[1:]\n break\n }\n if s[0] == paramDelimList {\n s = s[1:]\n continue\n }\n \n var k, v string\n var err error\n k, v, s, err = parseKeyValue(s)\n if err != nil {\n return nil, \"\", err\n }\n \n params[k] = v\n }\n \n return params, s, nil\n}", "title": "" }, { "docid": "c15a50012423753a8dc6e19b0ebc5f2d", "score": "0.52501065", "text": "func DecodeGetRequest(_ context.Context, r *http.Request) (req interface{}, err error) {\n\treq = endpoints.GetRequest{Id: mux.Vars(r)[\"id\"]}\n\t//err = json.NewDecoder(r.Body).Decode(&r)\n\treturn req, err\n}", "title": "" }, { "docid": "b493fb9eb1f725a91f29b47913c332db", "score": "0.5232928", "text": "func decodeReqIntoTeam(w http.ResponseWriter, req *http.Request) (team models.Team) {\n\tif err := json.NewDecoder(req.Body).Decode(&team); err != nil {\n\t\tutils.RespondWithAppError(w, err, \"Invalid team data\", 500)\n\t\treturn\n\t}\n\treturn\n}", "title": "" }, { "docid": "04441080dd7b4cf7354b4264d696ac51", "score": "0.5223733", "text": "func parseReqBodyParams(body []byte) (map[string]interface{}, error) {\n\n\tvar params map[string]interface{}\n\n\terr := json.Unmarshal(body, &params)\n\n\t// fmt.Println(params)\n\tif err != nil {\n\t\t// fmt.Println(err)\n\t\treturn params, errors.New(\"invalid JSON\")\n\t}\n\n\n\tfor key, _ := range params {\n\t\tif(!fieldNameIsValid(key)) {\n\t\t\treturn params, errors.New(fmt.Sprintf(\"invalid field name: %s\", key))\n\t\t}\n\n\t\tif key == \"updatedAt\" || key == \"createdAt\" || key == \"objectId\" {\n\t\t\treturn params, errors.New(fmt.Sprintf(\"%s is an invalid field name\", key))\n\t\t}\n }\n\t\n\n\treturn params, nil\n}", "title": "" }, { "docid": "7a0131abc85a70f19465b243cba4db9f", "score": "0.52173483", "text": "func parsePostRequest(r *http.Request) (models.BodyRequest, error) {\n\tbodyReq := models.BodyRequest{}\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\treturn models.BodyRequest{}, err\n\t}\n\terr = json.Unmarshal(body, &bodyReq)\n\tif err != nil {\n\t\treturn models.BodyRequest{}, err\n\t}\n\tbodyReq.PlacementID = r.URL.Query().Get(\"placement\")\n\treturn bodyReq, nil\n}", "title": "" }, { "docid": "8eacce88a0ef168b0119243a42827437", "score": "0.5206378", "text": "func ReadQueryIntoStruct(request *http.Request, dest interface{}, ignoreMissing bool) error {\n\tquery := request.URL.Query()\n\tif query == nil {\n\t\treturn errors.New(\"Request has no query parameters\")\n\t}\n\tfor k, v := range query {\n\t\tif len(v) == 1 {\n\t\t\tquery[k] = strings.Split(v[0], \",\")\n\t\t}\n\t}\n\treturn ReadMapIntoStruct(query, dest, ignoreMissing)\n}", "title": "" }, { "docid": "c0e88aca7083d8e557130f5b01552045", "score": "0.5189729", "text": "func decodeHTTPConcatRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tvar req endpoints.ConcatRequest\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "120dd45eb0852d70b529a9b09d1fff6a", "score": "0.5185403", "text": "func decodeHTTPSumRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tvar req endpoints.SumRequest\n\terr := json.NewDecoder(r.Body).Decode(&req)\n\treturn req, err\n}", "title": "" }, { "docid": "0fde13763a24c8d29629fddceb1b8093", "score": "0.5181364", "text": "func decodeCreateJobPostRequest(_ context.Context, request interface{}) (interface{}, error) {\n\treq := request.(*pb.CreateJobPostRequest)\n\treturn endpoints.CreateJobPostRequest{JobPost: models.JobPostToORM(req.JobPost)}, nil\n}", "title": "" } ]
9743150fbe2d5283327cfee82bb24ee0
Encode returns the binary representation of the walTransaction. 1. 8 Bytes: Timestamp 2. 8 Bytes: Heap ID 3. 8 Bytes: Value File ID 4. 2 Bytes: Number Of Changes 5. Repeated: walTransactionChange
[ { "docid": "6f2752f58732f8de81d74f2b9e11294d", "score": "0.8146103", "text": "func (t *walTransaction) Encode() []byte {\n\tbuf := buffers.NewBytesBuffer()\n\tbuf.AppendUint64(t.Timestamp)\n\tbuf.AppendUint64(t.HeapId)\n\tbuf.AppendUint64(t.ValueFileId)\n\tbuf.AppendUint16(uint16(len(t.Entries)))\n\tfor _, change := range t.Entries {\n\t\tbuf.Append(change.Encode()...)\n\t}\n\n\treturn buf.Bytes()\n}", "title": "" } ]
[ { "docid": "f9f9bfe4a1f5b5dbfa7191a5a5a78bab", "score": "0.71914625", "text": "func (c *walTransactionChange) Encode() []byte {\n\tbuf := buffers.NewBytesBuffer()\n\tbuf.AppendByte(byte(c.Type))\n\tbuf.Append(c.Key...)\n\n\tswitch c.Type {\n\t// Right now only a set type will need the actual value. There might\n\t// be others in the future that do or do not need the value stored.\n\tcase walTransactionChangeTypeSet:\n\t\tbuf.Append(c.Value...)\n\t}\n\n\treturn buf.Bytes()\n}", "title": "" }, { "docid": "3a422f511ad450958c8abf3bf31a55ad", "score": "0.6185129", "text": "func encodeTransaction(obj *Transaction) ([]byte, error) {\n\tn := encodeSizeTransaction(obj)\n\tbuf := make([]byte, n)\n\n\tif err := encodeTransactionToBuffer(buf, obj); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn buf, nil\n}", "title": "" }, { "docid": "d7dc095e347bf9221cc9aed228c7d12c", "score": "0.6145449", "text": "func (c changeView) EncodeBinary(w *io.BinWriter) {\n\tw.WriteU32LE(c.timestamp)\n}", "title": "" }, { "docid": "ef3df2ec937043ef0ebe7ec3ad42c1aa", "score": "0.6060446", "text": "func (c *changeView) EncodeBinary(w *io.BinWriter) {\n\tw.WriteU64LE(c.timestamp)\n\tw.WriteB(byte(c.reason))\n}", "title": "" }, { "docid": "0fb3632a3d95499a431084562f3d9d4c", "score": "0.6047068", "text": "func (tx Transaction) Serialize() []byte {\n var encoded bytes.Buffer\n\n enc := gob.NewEncoder(&encoded)\n err := enc.Encode(tx)\n if err != nil {\n log.Panic(err)\n }\n \n return encoded.Bytes()\n}", "title": "" }, { "docid": "5bf3d461dbb20cd67fad752d292cbc8a", "score": "0.60303766", "text": "func (txn *Transaction) Serialize() []byte {\n\treturn encoder.Serialize(*txn)\n}", "title": "" }, { "docid": "afbf3b4adbcd73e2f45039009c4eaaf7", "score": "0.59767354", "text": "func (tr *txnRecord) Encode(writer io.Writer) error {\n\t_, err := datastore.ToMsgpack(tr.Transaction).WriteTo(writer)\n\treturn err\n}", "title": "" }, { "docid": "d3c62426fc75c664ea5699602a4dfe31", "score": "0.58713275", "text": "func (tx Transaction) MarshalBinary() ([]byte, error) {\n\tif tx.marshalBinaryCache != nil {\n\t\treturn tx.marshalBinaryCache, nil\n\t}\n\n\tdata, err := tx.MarshalBinaryLedger()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, rcdSig := range tx.Signatures {\n\t\tdata = append(data, rcdSig.RCD...)\n\t\tdata = append(data, rcdSig.Signature...)\n\t}\n\n\treturn data, nil\n}", "title": "" }, { "docid": "d05bc9cba9e19b6899faf6a8668b8ade", "score": "0.58506614", "text": "func (tx Transaction) Serialize() []byte {\n\tdata, err := rlp.EncodeToBytes(tx)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\treturn data\n}", "title": "" }, { "docid": "8d490e19c816684384a89b716512abb3", "score": "0.5834153", "text": "func (transaction Transaction) MarshalBinary() ([]byte, error) {\n\treturn protobuf.Marshal(transaction.pb)\n}", "title": "" }, { "docid": "8d3c0c150d974da3f7c0d85d2f73fef8", "score": "0.58314574", "text": "func (tx Transaction) Serialize() []byte {\n\tvar encoded bytes.Buffer\n\n\t// gob.NewEncoder f func(w io.Writer) *gob.Encoder\n\tenc := gob.NewEncoder(&encoded)\n\terr := enc.Encode(tx)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\treturn encoded.Bytes()\n}", "title": "" }, { "docid": "88338f7140efbe96a97e8427fc6a0a60", "score": "0.5823243", "text": "func (tx Transaction) Serialize() []byte {\n\tvar encoded bytes.Buffer\n\n\tenc := gob.NewEncoder(&encoded)\n\terr := enc.Encode(tx)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\treturn encoded.Bytes()\n}", "title": "" }, { "docid": "88338f7140efbe96a97e8427fc6a0a60", "score": "0.5823243", "text": "func (tx Transaction) Serialize() []byte {\n\tvar encoded bytes.Buffer\n\n\tenc := gob.NewEncoder(&encoded)\n\terr := enc.Encode(tx)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\treturn encoded.Bytes()\n}", "title": "" }, { "docid": "88338f7140efbe96a97e8427fc6a0a60", "score": "0.5823243", "text": "func (tx Transaction) Serialize() []byte {\n\tvar encoded bytes.Buffer\n\n\tenc := gob.NewEncoder(&encoded)\n\terr := enc.Encode(tx)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\treturn encoded.Bytes()\n}", "title": "" }, { "docid": "e84f5a7fc21b3e0f04d894c9421eb03d", "score": "0.58107173", "text": "func (tx *Transaction) Serialize() []byte {\n\tvar buffer bytes.Buffer\n\tencoder := gob.NewEncoder(&buffer)\n\tutil.CheckAnxiety(encoder.Encode(tx))\n\treturn buffer.Bytes()\n}", "title": "" }, { "docid": "4c0753e32045dd4ce47fcc416345ed3c", "score": "0.5778274", "text": "func (tx Transaction) Serialize() []byte {\n\tvar data bytes.Buffer\n\tenc := gob.NewEncoder(&data)\n\n\tenc.Encode(Transaction{tx.ID, tx.Vin, tx.Vout})\n\n\treturn data.Bytes()\n}", "title": "" }, { "docid": "ad4eb84c0a3d207f4a640dec2fa278ee", "score": "0.5746122", "text": "func (t Transaction) MarshalBinary() ([]byte, error) {\n var out bytes.Buffer\n \n data, err := t.MarshalBinarySig()\n if err != nil {\n return nil, err\n }\n out.Write(data)\n \n for i, rcd := range t.rcds {\n \n // Write the RCD\n data, err := rcd.MarshalBinary()\n if err != nil {\n return nil, err\n }\n out.Write(data)\n \n // Then write its signature blocks. This needs to be\n // reworked so we use the information from the RCD block\n // to control the writing of the signatures. After all,\n // we don't want to restrict what might be required to\n // sign an input.\n if len(t.sigBlocks) <= i {\n t.sigBlocks = append(t.sigBlocks, new(SignatureBlock))\n }\n data, err = t.sigBlocks[i].MarshalBinary()\n if err != nil {\n return nil, err\n }\n out.Write(data)\n }\n \n for i := 0; i < len(t.inputs); i++ {\n }\n \n return out.Bytes(), nil\n}", "title": "" }, { "docid": "c72f3c8d56e281519644682b834eff1b", "score": "0.5688834", "text": "func (t *Tx) MarshalBinary() ([]byte, error) {\n\tbuf := new(bytes.Buffer)\n\tbuf.Grow(t.Tx.SerializeSize())\n\terr := t.Tx.Serialize(buf)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "title": "" }, { "docid": "72525d32881cb4ee1f01d942a8c5aad2", "score": "0.56314343", "text": "func _559sqlite3WalBeginWriteTransaction(tls crt.TLS, _pWal uintptr /* *TWal = SWal */) (r int32) {\n\tvar _rc int32\n\n\tif (*(*uint8)(unsafe.Pointer(_pWal + 66))) == 0 {\n\t\tgoto _1\n\t}\n\n\treturn int32(8)\n\n_1:\n\t_rc = _882walLockExclusive(tls, _pWal, int32(0), int32(1))\n\tif _rc == 0 {\n\t\tgoto _2\n\t}\n\n\treturn _rc\n\n_2:\n\t*(*uint8)(unsafe.Pointer(_pWal + 64)) = uint8(1)\n\tif crt.Xmemcmp(tls, _pWal+72, _883walIndexHdr(tls, _pWal), uint64(48)) == int32(0) {\n\t\tgoto _3\n\t}\n\n\t_884walUnlockExclusive(tls, _pWal, int32(0), int32(1))\n\t*(*uint8)(unsafe.Pointer(_pWal + 64)) = uint8(0)\n\t_rc = int32(517)\n_3:\n\treturn _rc\n}", "title": "" }, { "docid": "445e3786596e475dc058a1225f6540da", "score": "0.5539729", "text": "func (mtx *MinerTransaction) Serialize(bw *io.BinaryWriter) {\n\tmtx.SerializeUnsigned(bw)\n\tmtx.SerializeWitnesses(bw)\n}", "title": "" }, { "docid": "000c4eca46fc3debc569331b944f9754", "score": "0.5506339", "text": "func (d *GetBlockByIndex) EncodeBinary(bw *io.BinWriter) {\n\tbw.WriteU32LE(d.IndexStart)\n\tbw.WriteU16LE(uint16(d.Count))\n}", "title": "" }, { "docid": "d80f1501172cc91d3a0bffb4c04f39a8", "score": "0.5476847", "text": "func (w *Wallet) Encode() []byte {\n\treturn versionedBytes(1).\n\t\tAddData(uint32Bytes(w.AssetID)).\n\t\tAddData(config.Data(w.Settings)).\n\t\tAddData(w.EncryptedPW).\n\t\tAddData([]byte(w.Address)).\n\t\tAddData([]byte(w.Type))\n}", "title": "" }, { "docid": "4be336a65a38d92ed4581b42ff12738b", "score": "0.5443438", "text": "func (m *Transaction) Marshal() []byte {\n\twriter := jspb.NewWriter()\n\tm.MarshalToWriter(writer)\n\treturn writer.GetResult()\n}", "title": "" }, { "docid": "ff1fc09cbb664ac477b6f2c177e9266d", "score": "0.54383445", "text": "func (t txdata) MarshalJSON() ([]byte, error) {\n\ttype txdata struct {\n\t\tV *hexutil.Big `json:\"v\"\tgencodec:\"required\"`\n\t\tR *hexutil.Big `json:\"r\"\tgencodec:\"required\"`\n\t\tS *hexutil.Big `json:\"s\"\tgencodec:\"required\"`\n\t\tAbnormal uint64 `json:\"abnormal gencodec:\"required\"`\n\t\tSrcAddress string `json:\"srcAddress\"\tgencodec:\"required\"`\n\t\tDestAddress string `json:\"destAddress\"\tgencodec:\"required\"`\n\t\tSrcPort *big.Int `json:\"srcPort\" \tgencodec:\"required\"`\n\t\tDestPort *big.Int `json:\"destPort\"\tgencodec:\"required\"`\n\t\tProtocol *big.Int `json:\"protocol\"\tgencodec:\"required\"`\n\t\tStartTime *hexutil.Big `json:\"startTime\"\tgencodec:\"required\"`\n\t\tLastTime *hexutil.Big `json:\"lastTime\"\tgencodec:\"required\"`\n\t\tSize *hexutil.Big `json:\"size\"\t\tgencodec:\"required\"`\n\t\tPubKey []byte `json:\"pubKey\"\t\tgencodec:\"required\"`\n\t\tHash *common.Hash `json:\"hash\" rlp:\"-\"`\n\t}\n\tvar enc txdata\n\tenc.V = (*hexutil.Big)(t.V)\n\tenc.R = (*hexutil.Big)(t.R)\n\tenc.S = (*hexutil.Big)(t.S)\n\tenc.Abnormal = t.Abnormal\n\tenc.SrcAddress = t.SrcAddress\n\tenc.DestAddress = t.DestAddress\n\tenc.SrcPort = t.SrcPort\n\tenc.DestPort = t.DestPort\n\tenc.Protocol = t.Protocol\n\tenc.StartTime = (*hexutil.Big)(t.StartTime)\n\tenc.LastTime = (*hexutil.Big)(t.LastTime)\n\tenc.Size = (*hexutil.Big)(t.Size)\n\tenc.PubKey = t.PubKey\n\tenc.Hash = t.Hash\n\treturn json.Marshal(&enc)\n}", "title": "" }, { "docid": "e342a3e3b2395d1d85d522a7cfed8cca", "score": "0.54151237", "text": "func (m *TransactionToCommit) Marshal() []byte {\n\twriter := jspb.NewWriter()\n\tm.MarshalToWriter(writer)\n\treturn writer.GetResult()\n}", "title": "" }, { "docid": "54086da0ffd4a4b374d9fcac87bf34c0", "score": "0.54100883", "text": "func (tx *Transaction) MarshalBinary() ([]byte, error) {\n\tvar txBytes bytes.Buffer\n\t_, err := xdr.Marshal(&txBytes, tx.xdrEnvelope)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal XDR\")\n\t}\n\n\treturn txBytes.Bytes(), nil\n}", "title": "" }, { "docid": "d83069c8b908134765a646b767cc152d", "score": "0.54020125", "text": "func (trx *Transaction) Marshal() ([]byte, error) {\n\treturn json.Marshal(trx)\n}", "title": "" }, { "docid": "9a37185e9abb45c6d1d50ba2d6c73c5d", "score": "0.53801376", "text": "func (tx *LeaseV1) MarshalBinary() ([]byte, error) {\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal LeaseV1 transaction to bytes\")\n\t}\n\tbl := len(b)\n\tbuf := make([]byte, bl+crypto.SignatureSize)\n\tcopy(buf[0:], b)\n\tcopy(buf[bl:], tx.Signature[:])\n\treturn buf, nil\n}", "title": "" }, { "docid": "1094f35b0ebeaa2c52e6c9c028a93cb1", "score": "0.5361399", "text": "func _916sqlite3WalEndWriteTransaction(tls crt.TLS, _pWal uintptr /* *TWal = SWal */) (r int32) {\n\tif (*(*uint8)(unsafe.Pointer(_pWal + 64))) == 0 {\n\t\tgoto _1\n\t}\n\n\t_884walUnlockExclusive(tls, _pWal, int32(0), int32(1))\n\t*(*uint8)(unsafe.Pointer(_pWal + 64)) = uint8(0)\n\t*(*uint32)(unsafe.Pointer(_pWal + 124)) = uint32(0)\n\t*(*uint8)(unsafe.Pointer(_pWal + 67)) = uint8(0)\n_1:\n\treturn int32(0)\n}", "title": "" }, { "docid": "b672760e11476c25bf32dc131f8f63e2", "score": "0.53559715", "text": "func (tx *TransferV1) MarshalBinary() ([]byte, error) {\n\tsl := crypto.SignatureSize\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal TransferV1 transaction to bytes\")\n\t}\n\tbl := len(b)\n\tbuf := make([]byte, 1+sl+bl)\n\tbuf[0] = byte(tx.Type)\n\tcopy(buf[1:], tx.Signature[:])\n\tcopy(buf[1+sl:], b)\n\treturn buf, nil\n}", "title": "" }, { "docid": "a403de21a09d43747eedff0a256c8abb", "score": "0.5351977", "text": "func (t TransactionMeta) MarshalJSON() ([]byte, error) {\n\ttype TransactionMeta struct {\n\t\tL1BlockNumber *big.Int `json:\"l1BlockNumber\"`\n\t\tL1MessageSender *common.Address `json:\"l1MessageSender\" gencodec:\"required\"`\n\t\tSignatureHashType SignatureHashType `json:\"signatureHashType\" gencodec:\"required\"`\n\t\tQueueOrigin *big.Int `json:\"queueOrigin\" gencodec:\"required\"`\n\t\tIndex *uint64 `json:\"index\" gencodec:\"required\"`\n\t}\n\tvar enc TransactionMeta\n\tenc.L1BlockNumber = t.L1BlockNumber\n\tenc.L1MessageSender = t.L1MessageSender\n\tenc.SignatureHashType = t.SignatureHashType\n\tenc.QueueOrigin = t.QueueOrigin\n\tenc.Index = t.Index\n\treturn json.Marshal(&enc)\n}", "title": "" }, { "docid": "1543b8f7a9b79116b4dcfec5110e2353", "score": "0.532258", "text": "func (t *Transaction) MarshalBinarySig() ([]byte, error) {\n\tvar out bytes.Buffer\n \n// \t{ // limit the scope of tmp\n// var tmp bytes.Buffer\n// binary.Write(&tmp, binary.BigEndian, uint64(t.lockTime))\n// \t out.Write(tmp.Bytes()[3:])\n// }\n binary.Write(&out, binary.BigEndian, uint64(t.lockTime)) \n\tbinary.Write(&out, binary.BigEndian, uint16(len(t.inputs)))\n\tbinary.Write(&out, binary.BigEndian, uint16(len(t.outputs)))\n\tbinary.Write(&out, binary.BigEndian, uint16(len(t.outECs)))\n\n\tfor _, input := range t.inputs {\n\t\tdata, err := input.MarshalBinary()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tout.Write(data)\n\t}\n\n\tfor _, output := range t.outputs {\n\t\tdata, err := output.MarshalBinary()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tout.Write(data)\n\t}\n\n\tfor _, outEC := range t.outECs {\n\t\tdata, err := outEC.MarshalBinary()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tout.Write(data)\n\t}\n \n\treturn out.Bytes(), nil\n}", "title": "" }, { "docid": "fe1a2d0219a7e5af37ff6eba10a5c4b3", "score": "0.5281756", "text": "func (tx *ExchangeV1) MarshalBinary() ([]byte, error) {\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal ExchangeV1 transaction to bytes\")\n\t}\n\tbl := len(b)\n\tbuf := make([]byte, bl+crypto.SignatureSize)\n\tcopy(buf[0:], b)\n\tcopy(buf[bl:], tx.Signature[:])\n\treturn buf, nil\n}", "title": "" }, { "docid": "aac04bc5321de0f935fb476c70fbaabe", "score": "0.5266394", "text": "func (w *walSegment) Append(txn walTransaction) (err error) {\n\t// The header will always be 16 bytes and consists of a single 64 bit integer and two 32 bit\n\t// integers.\n\theader := make([]byte, 16)\n\n\t// Encode the transactions changes to be written to the file.\n\tdata := txn.Encode()\n\n\t// Allocate space for the item to be written to the WAL.\n\tok, headerOffset, dataOffset := w.Space.Allocate(header, data)\n\tif !ok {\n\t\treturn ErrInsufficientSpace\n\t}\n\n\t// The header will always be 16 bytes, it will contain the the TransactionId, and the start and\n\t// end offsets for the actual transaction changes within the file.\n\tbinary.BigEndian.PutUint64(header[0:8], txn.TransactionId)\n\tbinary.BigEndian.PutUint32(header[8:12], uint32(dataOffset))\n\tbinary.BigEndian.PutUint32(header[12:16], uint32(dataOffset+int64(len(data))))\n\n\t// Write the header to the file.\n\tif _, err = w.File.WriteAt(header, headerOffset); err != nil {\n\t\treturn err\n\t}\n\n\t// Write the actual transaction data.\n\tif _, err = w.File.WriteAt(data, dataOffset); err != nil {\n\t\treturn err\n\t}\n\n\t// Everything worked, we can return nil.\n\treturn nil\n}", "title": "" }, { "docid": "bf5506d52e6d2ec8e77814b1d7b727ad", "score": "0.5227502", "text": "func (b *Balance) Encode() []byte {\n\treturn versionedBytes(0).\n\t\tAddData(encodeAssetBalance(&b.Balance)).\n\t\tAddData(uint64Bytes(uint64(b.Stamp.UnixMilli())))\n}", "title": "" }, { "docid": "417294e97e9d366937ddc7c1d5eb3ec4", "score": "0.52235675", "text": "func encodeSizeTransaction(obj *Transaction) uint64 {\n\ti0 := uint64(0)\n\n\t// obj.Length\n\ti0 += 4\n\n\t// obj.Type\n\ti0++\n\n\t// obj.InnerHash\n\ti0 += 32\n\n\t// obj.Sigs\n\ti0 += 4\n\t{\n\t\ti1 := uint64(0)\n\n\t\t// x\n\t\ti1 += 65\n\n\t\ti0 += uint64(len(obj.Sigs)) * i1\n\t}\n\n\t// obj.In\n\ti0 += 4\n\t{\n\t\ti1 := uint64(0)\n\n\t\t// x\n\t\ti1 += 32\n\n\t\ti0 += uint64(len(obj.In)) * i1\n\t}\n\n\t// obj.Out\n\ti0 += 4\n\t{\n\t\ti1 := uint64(0)\n\n\t\t// x.Address.Version\n\t\ti1++\n\n\t\t// x.Address.Key\n\t\ti1 += 20\n\n\t\t// x.Coins\n\t\ti1 += 8\n\n\t\t// x.Hours\n\t\ti1 += 8\n\n\t\ti0 += uint64(len(obj.Out)) * i1\n\t}\n\n\treturn i0\n}", "title": "" }, { "docid": "898475333a48ace6dc58c55fb759d5e2", "score": "0.52046674", "text": "func (c Compression) MarshalBinary() ([]byte, error) {\n\treturn []byte{byte(c.format), byte(c.level)}, nil\n}", "title": "" }, { "docid": "a1740dff94863456035ca29606139432", "score": "0.52007383", "text": "func (m *SignedTransaction) Marshal() []byte {\n\twriter := jspb.NewWriter()\n\tm.MarshalToWriter(writer)\n\treturn writer.GetResult()\n}", "title": "" }, { "docid": "f0d0b9df6ab5784663a63bce77da762b", "score": "0.5193432", "text": "func (tx *SetScriptV1) MarshalBinary() ([]byte, error) {\n\tbb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal SetScriptV1 transaction to bytes\")\n\t}\n\tbl := len(bb)\n\tif tx.Proofs == nil {\n\t\treturn nil, errors.New(\"failed to marshal SetScriptV1 transaction to bytes: no proofs\")\n\t}\n\tpb, err := tx.Proofs.MarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal SetScriptV1 transaction to bytes\")\n\t}\n\tbuf := make([]byte, 1+bl+len(pb))\n\tcopy(buf[1:], bb)\n\tcopy(buf[1+bl:], pb)\n\treturn buf, nil\n}", "title": "" }, { "docid": "05ac8d1b374e2be9a1f7303a31f34aa1", "score": "0.51896596", "text": "func (tx *BurnV1) MarshalBinary() ([]byte, error) {\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal BurnV1 transaction to bytes\")\n\t}\n\tbuf := make([]byte, burnV1Len)\n\tcopy(buf, b)\n\tcopy(buf[burnV1BodyLen:], tx.Signature[:])\n\treturn buf, nil\n}", "title": "" }, { "docid": "8a6a29cb90fc6693c12565829f682082", "score": "0.51882786", "text": "func (tx Transaction) MarshalBinaryLen() int {\n\tif tx.marshalBinaryCache != nil {\n\t\treturn len(tx.marshalBinaryCache)\n\t}\n\n\tsize := TransactionHeaderSize +\n\t\t(len(tx.FCTInputs)+len(tx.FCTOutputs)+len(tx.ECOutputs))*32\n\n\tfor _, adrs := range [][]AddressAmount{\n\t\ttx.FCTInputs,\n\t\ttx.FCTOutputs, tx.ECOutputs,\n\t} {\n\t\tfor _, adr := range adrs {\n\t\t\tsize += varintf.BufLen(adr.Amount)\n\t\t}\n\t}\n\n\tfor _, rcdSig := range tx.Signatures {\n\t\tsize += rcdSig.Len()\n\t}\n\n\treturn size\n}", "title": "" }, { "docid": "76eb221cda1ee258a1ccc8fc68f1cd63", "score": "0.5169594", "text": "func (p *Payload) EncodeBinary(w *io.BinWriter) {\n\tp.encodeData()\n\tp.Extensible.EncodeBinary(w)\n}", "title": "" }, { "docid": "24f45bf79d609c44a86aa3f85f5b4912", "score": "0.5165243", "text": "func (txn Txn) Json() []byte {\n\tb, err := json.Marshal(txn)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tpanic(\"Could not marshal txn\")\n\t}\n\n\treturn b\n}", "title": "" }, { "docid": "de48f5f566db38fd75205162154e9ac9", "score": "0.5164835", "text": "func (tx *Tx) ToBytes() []byte {\r\n\treturn tx.toBytesHelper(0, nil)\r\n}", "title": "" }, { "docid": "e0968ecc5c62835b079ba0376f563abe", "score": "0.51538515", "text": "func (tx *ReissueV1) MarshalBinary() ([]byte, error) {\n\tsl := crypto.SignatureSize\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal ReissueV1 transaction to bytes\")\n\t}\n\tbl := len(b)\n\tbuf := make([]byte, 1+sl+bl)\n\tbuf[0] = byte(tx.Type)\n\tcopy(buf[1:], tx.Signature[:])\n\tcopy(buf[1+sl:], b)\n\treturn buf, nil\n}", "title": "" }, { "docid": "14df7de66d885d22e0cae25997a65096", "score": "0.51530045", "text": "func (t Transaction) MarshalJSON() ([]byte, error) {\n\ttype Alias Transaction\n\tv := &struct {\n\t\tMemo *string `json:\"memo,omitempty\"`\n\t\tMemoBytes *string `json:\"memo_bytes,omitempty\"`\n\t\t*Alias\n\t}{\n\t\tAlias: (*Alias)(&t),\n\t}\n\tif t.MemoType != \"none\" {\n\t\tv.Memo = &t.Memo\n\t}\n\n\tif t.MemoType == \"text\" {\n\t\tv.MemoBytes = &t.MemoBytes\n\t}\n\n\treturn json.Marshal(v)\n}", "title": "" }, { "docid": "408c3a74305510c96d574014e786ea95", "score": "0.51507884", "text": "func (tx *DataV1) MarshalBinary() ([]byte, error) {\n\tbb, err := tx.BodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal DataV1 transaction to bytes\")\n\t}\n\tbl := len(bb)\n\tif tx.Proofs == nil {\n\t\treturn nil, errors.New(\"failed to marshal DataV1 transaction to bytes: no proofs\")\n\t}\n\tpb, err := tx.Proofs.MarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal DataV1 transaction to bytes\")\n\t}\n\tpl := len(pb)\n\tbuf := make([]byte, 1+bl+pl)\n\tbuf[0] = 0\n\tcopy(buf[1:], bb)\n\tcopy(buf[1+bl:], pb)\n\treturn buf, nil\n}", "title": "" }, { "docid": "3259fac9763ea6d58fae224ea6fe1269", "score": "0.51506174", "text": "func (t txdata) MarshalJSON() ([]byte, error) {\n\ttype txdata struct {\n\t\tRecipient *bgmcommon.Address `json:\"to\" rlp:\"nil\"`\n\t\tAmount *hexutil.Big `json:\"value\" gencodec:\"required\"`\n\t\tPayload hexutil.Bytes `json:\"input\" gencodec:\"required\"`\n\t\tV *hexutil.Big `json:\"v\" gencodec:\"required\"`\n\t\tR *hexutil.Big `json:\"r\" gencodec:\"required\"`\n\t\tS *hexutil.Big `json:\"s\" gencodec:\"required\"`\n\t\tHash *bgmcommon.Hash `json:\"hash\" rlp:\"-\"`\n\t\tType TxType `json:\"type\" gencodec:\"required\"`\n\t\tAccountNonce hexutil.Uint64 `json:\"nonce\" gencodec:\"required\"`\n\t\tPrice *hexutil.Big `json:\"gasPrice\" gencodec:\"required\"`\n\t\tGasLimit *hexutil.Big `json:\"gas\" gencodec:\"required\"`\n\t\t\n\t}\n\tvar enc txdata\n\tencPtr.R = (*hexutil.Big)(tPtr.R)\n\tencPtr.S = (*hexutil.Big)(tPtr.S)\n\tencPtr.Hash = tPtr.Hash\n\treturn json.Marshal(&enc)\n\tencPtr.Type = tPtr.Type\n\tencPtr.AccountNonce = hexutil.Uint64(tPtr.AccountNonce)\n\tencPtr.Price = (*hexutil.Big)(tPtr.Price)\n\tencPtr.GasLimit = (*hexutil.Big)(tPtr.GasLimit)\n\tencPtr.Recipient = tPtr.Recipient\n\tencPtr.Amount = (*hexutil.Big)(tPtr.Amount)\n\tencPtr.Payload = tPtr.Payload\n\tencPtr.V = (*hexutil.Big)(tPtr.V)\n\t\n\t\n}", "title": "" }, { "docid": "ba3d47176c0b79cfe7e27d81a4628ba6", "score": "0.51490015", "text": "func encodeTransactionToBuffer(buf []byte, obj *Transaction) error {\n\tif uint64(len(buf)) < encodeSizeTransaction(obj) {\n\t\treturn encoder.ErrBufferUnderflow\n\t}\n\n\te := &encoder.Encoder{\n\t\tBuffer: buf[:],\n\t}\n\n\t// obj.Length\n\te.Uint32(obj.Length)\n\n\t// obj.Type\n\te.Uint8(obj.Type)\n\n\t// obj.InnerHash\n\te.CopyBytes(obj.InnerHash[:])\n\n\t// obj.Sigs maxlen check\n\tif len(obj.Sigs) > 65535 {\n\t\treturn encoder.ErrMaxLenExceeded\n\t}\n\n\t// obj.Sigs length check\n\tif uint64(len(obj.Sigs)) > math.MaxUint32 {\n\t\treturn errors.New(\"obj.Sigs length exceeds math.MaxUint32\")\n\t}\n\n\t// obj.Sigs length\n\te.Uint32(uint32(len(obj.Sigs)))\n\n\t// obj.Sigs\n\tfor _, x := range obj.Sigs {\n\n\t\t// x\n\t\te.CopyBytes(x[:])\n\n\t}\n\n\t// obj.In maxlen check\n\tif len(obj.In) > 65535 {\n\t\treturn encoder.ErrMaxLenExceeded\n\t}\n\n\t// obj.In length check\n\tif uint64(len(obj.In)) > math.MaxUint32 {\n\t\treturn errors.New(\"obj.In length exceeds math.MaxUint32\")\n\t}\n\n\t// obj.In length\n\te.Uint32(uint32(len(obj.In)))\n\n\t// obj.In\n\tfor _, x := range obj.In {\n\n\t\t// x\n\t\te.CopyBytes(x[:])\n\n\t}\n\n\t// obj.Out maxlen check\n\tif len(obj.Out) > 65535 {\n\t\treturn encoder.ErrMaxLenExceeded\n\t}\n\n\t// obj.Out length check\n\tif uint64(len(obj.Out)) > math.MaxUint32 {\n\t\treturn errors.New(\"obj.Out length exceeds math.MaxUint32\")\n\t}\n\n\t// obj.Out length\n\te.Uint32(uint32(len(obj.Out)))\n\n\t// obj.Out\n\tfor _, x := range obj.Out {\n\n\t\t// x.Address.Version\n\t\te.Uint8(x.Address.Version)\n\n\t\t// x.Address.Key\n\t\te.CopyBytes(x.Address.Key[:])\n\n\t\t// x.Coins\n\t\te.Uint64(x.Coins)\n\n\t\t// x.Hours\n\t\te.Uint64(x.Hours)\n\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "db44464b31c22063eb6a4ecd2b5cc56b", "score": "0.51155776", "text": "func (tx *Genesis) MarshalBinary() ([]byte, error) {\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal Genesis transaction to bytes\")\n\t}\n\treturn b, nil\n}", "title": "" }, { "docid": "dd7234bdddd59b6761548d4d8a351b72", "score": "0.5109968", "text": "func _1156walWriteToLog(tls crt.TLS, _p uintptr /* *TWalWriter = struct{XpWal *SWal...XsyncFlags int32;XszPage int32;} */, _pContent uintptr /* *void */, _iAmt int32, _iOffset int64) (r int32) {\n\tvar (\n\t\t_rc int32\n\t\t_iFirstAmt int32\n\t)\n\tif _iOffset >= (*(*int64)(unsafe.Pointer(_p + 16))) || (_iOffset+int64(_iAmt)) < (*(*int64)(unsafe.Pointer(_p + 16))) {\n\t\tgoto _1\n\t}\n\n\t_iFirstAmt = int32((*(*int64)(unsafe.Pointer(_p + 16))) - _iOffset)\n\t_rc = _163sqlite3OsWrite(tls, *(*uintptr)(unsafe.Pointer(_p + 8)), _pContent, _iFirstAmt, _iOffset)\n\tif _rc == 0 {\n\t\tgoto _2\n\t}\n\n\treturn _rc\n\n_2:\n\t_iOffset = int64(_iOffset + int64(_iFirstAmt))\n\t_iAmt = int32(_iAmt - _iFirstAmt)\n\t_pContent = uintptr(_iFirstAmt) + _pContent\n\n\t_rc = _354sqlite3OsSync(tls, *(*uintptr)(unsafe.Pointer(_p + 8)), (*(*int32)(unsafe.Pointer(_p + 24)))&int32(0x3))\n\tif _iAmt != int32(0) && _rc == 0 {\n\t\tgoto _3\n\t}\n\n\treturn _rc\n\n_3:\n_1:\n\t_rc = _163sqlite3OsWrite(tls, *(*uintptr)(unsafe.Pointer(_p + 8)), _pContent, _iAmt, _iOffset)\n\treturn _rc\n}", "title": "" }, { "docid": "28d09ec327fef897f6b5728bb12e1adc", "score": "0.5090814", "text": "func broadcastTransaction(rw *bufio.ReadWriter, t Token.Transaction) {\n\tbytes, err := json.Marshal(t)\n\tif err != nil {\n\t\tlogger.Warnf(\"Broadcast Transaction : %v\", err)\n\t}\n\n\t_, _ = rw.WriteString(fmt.Sprintf(\"%s\\n\", string(bytes)))\n\t_ = rw.Flush()\n}", "title": "" }, { "docid": "5ae3e20c1075b3b5dc61e12fa76d64fe", "score": "0.5084777", "text": "func _886sqlite3WalSavepoint(tls crt.TLS, _pWal uintptr /* *TWal = SWal */, _aWalData uintptr /* *Tu32 = uint32 */) {\n\t*(*uint32)(unsafe.Pointer(_aWalData)) = *(*uint32)(unsafe.Pointer((_pWal + 72) + 16))\n\t*(*uint32)(unsafe.Pointer(_aWalData + 4)) = *(*uint32)(unsafe.Pointer((_pWal + 72) + 24))\n\t*(*uint32)(unsafe.Pointer(_aWalData + 8)) = *(*uint32)(unsafe.Pointer(((_pWal + 72) + 24) + 4))\n\t*(*uint32)(unsafe.Pointer(_aWalData + 12)) = *(*uint32)(unsafe.Pointer(_pWal + 136))\n}", "title": "" }, { "docid": "0f048164637dd44cbf79a679df6ff3ad", "score": "0.5066641", "text": "func (m *message) EncodeBinary(w *io.BinWriter) {\n\tw.WriteB(byte(m.Type))\n\tw.WriteU32LE(m.BlockIndex)\n\tw.WriteB(m.ValidatorIndex)\n\tw.WriteB(m.ViewNumber)\n\tm.payload.EncodeBinary(w)\n}", "title": "" }, { "docid": "58350b14bf6a150b2b3fb33b6b4ebfaa", "score": "0.5063005", "text": "func (tx *IssueV1) MarshalBinary() ([]byte, error) {\n\tsl := crypto.SignatureSize\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal IssueV1 transaction to bytes\")\n\t}\n\tbl := len(b)\n\tbuf := make([]byte, 1+sl+bl)\n\tbuf[0] = byte(tx.Type)\n\tcopy(buf[1:], tx.Signature[:])\n\tcopy(buf[1+sl:], b)\n\treturn buf, nil\n}", "title": "" }, { "docid": "f78facb4e36235e1a893bb68902c8621", "score": "0.50602865", "text": "func (t Time) MarshalBinary() ([]byte, error) {\n\treturn t.Time.MarshalText()\n}", "title": "" }, { "docid": "960d85884caaa17638b814088c636476", "score": "0.50600564", "text": "func (m *TransactionWithProof) Marshal() []byte {\n\twriter := jspb.NewWriter()\n\tm.MarshalToWriter(writer)\n\treturn writer.GetResult()\n}", "title": "" }, { "docid": "4d564700d7d7016e817543116985176f", "score": "0.50509536", "text": "func (msg *CommitTransactionRequest) MarshalBinary() ([]byte, error) {\n\t// A simple encoding: plain text.\n\tvar b bytes.Buffer\n\t_, err := fmt.Fprintln(&b, msg.BufLength, msg.verbId, msg.sequenceNo, msg.timestamp,\n\t\tmsg.requestId, msg.authToken, msg.sessionId, msg.dataOffset, msg.isUpdatable, msg.addedList,\n\t\tmsg.updatedList, msg.removedList, msg.attrDescSet)\n\tif err != nil {\n\t\tlogger.Error(fmt.Sprintf(\"ERROR: Returning CommitTransactionRequest:MarshalBinary w/ Error: '%+v'\", err.Error()))\n\t\treturn nil, err\n\t}\n\treturn b.Bytes(), nil\n}", "title": "" }, { "docid": "a319b642aec9bee0a8302a52404d7030", "score": "0.5046327", "text": "func EncodeBinary(item Item, w *io.BinWriter) {\n\tdata, err := Serialize(item)\n\tif err != nil {\n\t\tw.Err = err\n\t\treturn\n\t}\n\tw.WriteBytes(data)\n}", "title": "" }, { "docid": "8c918cc768a906ab6c178489c92231ec", "score": "0.5020987", "text": "func (pkt Packet) EncodeBinary(writer io.Writer) error {\n\tvar err error\n\tbuf := bufio.NewWriter(writer)\n\tlength := VarInt(pkt.ID.Length() + len(pkt.Data))\n\tif err = length.EncodeBinary(buf); err != nil {\n\t\treturn err\n\t}\n\tif err = pkt.ID.EncodeBinary(buf); err != nil {\n\t\treturn err\n\t}\n\tif err = binary.Write(buf, binary.BigEndian, pkt.Data); err != nil {\n\t\treturn err\n\t}\n\treturn buf.Flush()\n}", "title": "" }, { "docid": "8fda6147c9b72ca825e75fc49d4f4a92", "score": "0.5004584", "text": "func (w *Withdrawal) Bytes() []byte {\n\tb, err := json.Marshal(w)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn b\n}", "title": "" }, { "docid": "84c24b7a649cbd224bb41d9c69ccf26b", "score": "0.4990727", "text": "func (s *relayState) encode() string {\n\tb, _ := json.Marshal(s)\n\treturn base64.StdEncoding.EncodeToString(b)\n}", "title": "" }, { "docid": "8776bfc129cd7b0e120248e7d0e4fa91", "score": "0.49831712", "text": "func _1155walEncodeFrame(tls crt.TLS, _pWal uintptr /* *TWal = SWal */, _iPage uint32, _nTruncate uint32, _aData uintptr /* *Tu8 = uint8 */, _aFrame uintptr /* *Tu8 = uint8 */) {\n\tvar (\n\t\t_nativeCksum int32\n\t\t_aCksum uintptr // *Tu32 = uint32\n\t)\n\t_aCksum = (_pWal + 72) + 24\n\n\t_329sqlite3Put4byte(tls, _aFrame, _iPage)\n\t_329sqlite3Put4byte(tls, _aFrame+4, _nTruncate)\n\tif (*(*uint32)(unsafe.Pointer(_pWal + 124))) != uint32(0) {\n\t\tgoto _1\n\t}\n\n\tcrt.Xmemcpy(tls, _aFrame+8, (_pWal+72)+32, uint64(8))\n\t_nativeCksum = bool2int(int32(*(*uint8)(unsafe.Pointer((_pWal + 72) + 13))) == int32(0))\n\t_901walChecksumBytes(tls, _nativeCksum, _aFrame, int32(8), _aCksum, _aCksum)\n\t_901walChecksumBytes(tls, _nativeCksum, _aData, int32(*(*uint32)(unsafe.Pointer(_pWal + 56))), _aCksum, _aCksum)\n\t_329sqlite3Put4byte(tls, _aFrame+16, *(*uint32)(unsafe.Pointer(_aCksum)))\n\t_329sqlite3Put4byte(tls, _aFrame+20, *(*uint32)(unsafe.Pointer(_aCksum + 4)))\n\tgoto _2\n\n_1:\n\tcrt.Xmemset(tls, _aFrame+8, int32(0), uint64(16))\n_2:\n}", "title": "" }, { "docid": "117fb99cee0933801079804ff0911e89", "score": "0.49774033", "text": "func (wal *baseWAL) Write(msg WALMessage) error {\n\tif wal == nil {\n\t\treturn nil\n\t}\n\n\tif err := wal.enc.Write(TimedWALMessage{tmtime.Now(), msg}); err != nil {\n\t\twal.Logger.Error(\"Error writing msg to consensus wal. WARNING: recover may not be possible for the current height\",\n\t\t\t\"err\", err, \"msg\", msg)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "806f1c39fdc93c930918e9f249788ed0", "score": "0.49766517", "text": "func (tx *SetAssetScriptV1) MarshalBinary() ([]byte, error) {\n\tbb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal SetAssetScriptV1 transaction to bytes\")\n\t}\n\tbl := len(bb)\n\tif tx.Proofs == nil {\n\t\treturn nil, errors.New(\"failed to marshal SetAssetScriptV1 transaction to bytes: no proofs\")\n\t}\n\tpb, err := tx.Proofs.MarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal SetAssetScriptV1 transaction to bytes\")\n\t}\n\tbuf := make([]byte, 1+bl+len(pb))\n\tcopy(buf[1:], bb)\n\tcopy(buf[1+bl:], pb)\n\treturn buf, nil\n}", "title": "" }, { "docid": "7cf1344f66f2d685d2c615ede6c41754", "score": "0.49758652", "text": "func (msg *CommitTransactionResponse) MarshalBinary() ([]byte, error) {\n\t// A simple encoding: plain text.\n\tvar b bytes.Buffer\n\t_, err := fmt.Fprintln(&b, msg.BufLength, msg.verbId, msg.sequenceNo, msg.timestamp,\n\t\tmsg.requestId, msg.authToken, msg.sessionId, msg.dataOffset, msg.isUpdatable, msg.addedIdList, msg.addedCount,\n\t\tmsg.updatedIdList, msg.updatedCount, msg.removedIdList, msg.removedCount, msg.attrDescIdList, msg.attrDescCount,\n\t\tmsg.graphObjFact, msg.exception)\n\tif err != nil {\n\t\tlogger.Error(fmt.Sprintf(\"ERROR: Returning CommitTransactionResponse:MarshalBinary w/ Error: '%+v'\", err.Error()))\n\t\treturn nil, err\n\t}\n\treturn b.Bytes(), nil\n}", "title": "" }, { "docid": "cff9e7f1461b84927038aedf9fc6aefb", "score": "0.49716944", "text": "func (b *Block) MarshalBinary() ([]byte, error) {\n\tres := make([]byte, 1+8+64+4+8+32+4)\n\tres[0] = byte(b.Version)\n\tbinary.BigEndian.PutUint64(res[1:9], b.Timestamp)\n\tcopy(res[9:], b.Parent[:])\n\tbinary.BigEndian.PutUint32(res[73:77], b.ConsensusBlockLength)\n\tbinary.BigEndian.PutUint64(res[77:85], b.BaseTarget)\n\tcopy(res[85:117], b.GenSignature[:])\n\tbinary.BigEndian.PutUint32(res[117:121], b.TransactionBlockLength)\n\tif b.Version >= NgBlockVersion {\n\t\t// Add tx count.\n\t\tbuf := make([]byte, 4)\n\t\tbinary.BigEndian.PutUint32(buf, uint32(b.TransactionCount))\n\t\tres = append(res, buf...)\n\t\tres = append(res, b.Transactions...)\n\t\tbinary.BigEndian.PutUint32(buf, uint32(b.FeaturesCount))\n\t\tres = append(res, buf...)\n\t\t// Add features.\n\t\tfb, err := featuresToBinary(b.Features)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tres = append(res, fb...)\n\t} else {\n\t\tres = append(res, byte(b.TransactionCount))\n\t\tres = append(res, b.Transactions...)\n\t}\n\tres = append(res, b.GenPublicKey[:]...)\n\tres = append(res, b.BlockSignature[:]...)\n\n\treturn res, nil\n}", "title": "" }, { "docid": "185d3458fcb7949bdd2156d2ee979ef7", "score": "0.4971036", "text": "func MarshalTransaction(r *bytes.Buffer, f *Transaction) error {\n\tif err := encoding.WriteUint32LE(r, f.Version); err != nil {\n\t\treturn err\n\t}\n\n\tif err := encoding.WriteUint32LE(r, uint32(f.TxType)); err != nil {\n\t\treturn err\n\t}\n\n\treturn MarshalTransactionPayload(r, f.Payload)\n}", "title": "" }, { "docid": "1e09e7359198b140a9e8cfdcf096edc0", "score": "0.49703276", "text": "func TimeMarshalBinary(t time.Time,) ([]byte, error)", "title": "" }, { "docid": "36e836859628d602764bc96211602990", "score": "0.4969306", "text": "func (transactionBuilder *TransactionBuilder) ToBytesEncoded() ([]byte, error) {\n\ttransactionBuilder.MakeDefaultFee()\n\tvar txBytes bytes.Buffer\n\t_, err := xdr.Marshal(&txBytes, transactionBuilder.TransactionXDR)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"marshal xdr failed\")\n\t}\n\n\treturn txBytes.Bytes(), nil\n}", "title": "" }, { "docid": "e8fd9dff761a59ed0eefaf60479ba75f", "score": "0.49686617", "text": "func (e *Encoder) Encode(index uint64, p []byte) (int, error) {\n\tif index == 0 {\n\t\treturn 0, fmt.Errorf(\"index 0 is invalid as a starting index for a write-ahead log\")\n\t}\n\tif e.nextIndex != 0 && index != e.nextIndex {\n\t\treturn 0, fmt.Errorf(\"expected next index in sequence %d, got %d\", e.nextIndex, index)\n\t}\n\te.nextIndex = index + 1\n\n\tcrc := crc32.Checksum(p, CRCTable)\n\tentry := make([]byte, len(p)+12)\n\tbinary.LittleEndian.PutUint64(entry[0:8], index)\n\tbinary.LittleEndian.PutUint32(entry[8:12], crc)\n\tcopy(entry[12:], p)\n\n\tn, err := e.dest.Encode(entry)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"wal append: %w\", err)\n\t}\n\treturn n, nil\n}", "title": "" }, { "docid": "6c92f0f0851647065b76fd3503787fe7", "score": "0.4957165", "text": "func (ptn *PatriciaTrieNode) MarshalBinary() (_ []byte, err error) {\n\tvar buf bytes.Buffer\n\tenc := gob.NewEncoder(&buf)\n\n\t// reportURI\n\tenc.Encode(ptn.reportURI)\n\n\t// Filter\n\thasFilter := ptn.filterObject != nil\n\tenc.Encode(hasFilter)\n\tif hasFilter {\n\t\terr = enc.Encode(ptn.filterObject)\n\t}\n\n\t// One\n\thasOne := ptn.one != nil\n\tenc.Encode(hasOne)\n\tif hasOne {\n\t\terr = enc.Encode(ptn.one)\n\t}\n\n\t// Zero\n\thasZero := ptn.zero != nil\n\tenc.Encode(hasZero)\n\tif hasZero {\n\t\terr = enc.Encode(ptn.zero)\n\t}\n\n\treturn buf.Bytes(), err\n}", "title": "" }, { "docid": "ef43c8ffcf76bef66d4b0db0af0e416c", "score": "0.4953492", "text": "func (e *LogEntry) encode(w io.Writer) (int, error) {\n\tdefer e.log.pBuffer.Reset()\n\te.log.pLogEntry.Index = proto.Uint64(e.Index)\n\te.log.pLogEntry.Term = proto.Uint64(e.Term)\n\te.log.pLogEntry.CommandName = proto.String(e.CommandName)\n\te.log.pLogEntry.Command = e.Command\n\n\terr := e.log.pBuffer.Marshal(e.log.pLogEntry)\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\n\tif _, err = fmt.Fprintf(w, \"%8x\\n\", len(e.log.pBuffer.Bytes())); err != nil {\n\t\treturn -1, err\n\t}\n\n\treturn w.Write(e.log.pBuffer.Bytes())\n}", "title": "" }, { "docid": "e99b288c4c5c185871eabaf3b99b19d6", "score": "0.49437916", "text": "func (v *Version) EncodeBinary(w *io.BinWriter) {\n\tw.WriteU32LE(uint32(v.Major))\n\tw.WriteU32LE(uint32(v.Minor))\n\tw.WriteU32LE(uint32(v.Build))\n\tw.WriteU32LE(uint32(v.Revision))\n}", "title": "" }, { "docid": "414359aed92d4cdd1c7f3a45606d4527", "score": "0.49421644", "text": "func (msg *HCMsgTx) encodeWitness(w io.Writer, pver uint32) error {\n\tcount := uint64(len(msg.HCTxIn))\n\terr := WriteVarInt(w, pver, count)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor _, ti := range msg.HCTxIn {\n\t\terr = writeHCTxInWitness(w, pver, msg.Version, ti)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "3abba937580e4eb1cf1fa1b597d60290", "score": "0.494179", "text": "func (tx Tx) String() string { return fmt.Sprintf(\"Tx{%X}\", []byte(tx)) }", "title": "" }, { "docid": "001a86b7ea0de3251dc1193a726b19b6", "score": "0.4931715", "text": "func (tx *LeaseCancelV1) MarshalBinary() ([]byte, error) {\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal LeaseCancelV1 transaction to bytes\")\n\t}\n\tbl := len(b)\n\tbuf := make([]byte, bl+crypto.SignatureSize)\n\tcopy(buf[0:], b)\n\tcopy(buf[bl:], tx.Signature[:])\n\treturn buf, nil\n}", "title": "" }, { "docid": "24e3bb03658fe920756fb06eaa73397a", "score": "0.49250758", "text": "func (tx *Tx) ToString() string {\r\n\treturn hex.EncodeToString(tx.ToBytes())\r\n}", "title": "" }, { "docid": "3dc5d7ce7500cb9ebeed445108b0f0a1", "score": "0.49224928", "text": "func (t *TLSH) Binary() []byte {\n\treturn append([]byte{swapByte(t.checksum), swapByte(t.lValue), t.qRatio}, t.code[:]...)\n}", "title": "" }, { "docid": "89041a8ca60bd875c243c63ae3d45b1c", "score": "0.49153367", "text": "func (arr *BitArray) Encode() string {\n return b64.StdEncoding.EncodeToString(arr.data)\n}", "title": "" }, { "docid": "89041a8ca60bd875c243c63ae3d45b1c", "score": "0.49153367", "text": "func (arr *BitArray) Encode() string {\n return b64.StdEncoding.EncodeToString(arr.data)\n}", "title": "" }, { "docid": "a65a2a4044421ddc331cff52f553ce0d", "score": "0.48874512", "text": "func (m *TransactionListWithProof) Marshal() []byte {\n\twriter := jspb.NewWriter()\n\tm.MarshalToWriter(writer)\n\treturn writer.GetResult()\n}", "title": "" }, { "docid": "505558908e33f3f0c704c5208c073a6d", "score": "0.48853865", "text": "func (w *byteBufferWAL) Write(m WALMessage) error {\n\tif w.stopped {\n\t\tw.logger.Debug(\"WAL already stopped. Not writing message\", \"msg\", m)\n\t\treturn nil\n\t}\n\n\tif endMsg, ok := m.(EndHeightMessage); ok {\n\t\tw.logger.Debug(\"WAL write end height message\", \"height\", endMsg.Height, \"stopHeight\", w.heightToStop)\n\t\tif endMsg.Height == w.heightToStop {\n\t\t\tw.logger.Debug(\"Stopping WAL at height\", \"height\", endMsg.Height)\n\t\t\tw.signalWhenStopsTo <- struct{}{}\n\t\t\tw.stopped = true\n\t\t\treturn nil\n\t\t}\n\t}\n\n\tw.logger.Debug(\"WAL Write Message\", \"msg\", m)\n\terr := w.enc.Encode(&TimedWALMessage{fixedTime, m})\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"failed to encode the msg %v\", m))\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "77c45d0ab7b8f01b34910eb6cb24bf71", "score": "0.48849854", "text": "func (n *Node) MarshalBinary() ([]byte, error) {\n\tb := make([]byte, nodeMarshaledSize)\n\n\tl := binary.LittleEndian\n\tl.PutUint64(b[0:8], n.Key)\n\tl.PutUint64(b[8:16], n.TransactionID)\n\tl.PutUint64(b[16:24], n.RawKey.Addr)\n\tl.PutUint64(b[24:32], n.RawKey.Size)\n\tl.PutUint64(b[32:40], n.RawValue.Addr)\n\tl.PutUint64(b[40:48], n.RawValue.Size)\n\tfor i := 0; i < width; i++ {\n\t\tl.PutUint64(b[48+(i*8):56+(i*8)], n.Children[i].Addr)\n\t\tl.PutUint64(b[56+(i*8):64+(i*8)], n.Children[i].Size)\n\t}\n\n\treturn b, nil\n}", "title": "" }, { "docid": "92cd317f44a5c590b14f7fca3c5402ed", "score": "0.48842758", "text": "func EncodeBinary(b []byte, i []byte) []byte {\n\tb = append(b, orderedEncodingBinary)\n\ts := uint(1)\n\tt := byte(0)\n\tfor _, v := range i {\n\t\tb = append(b, byte(0x80|t|((v&0xff)>>s)))\n\t\tif s < 7 {\n\t\t\tt = v << (7 - s)\n\t\t\ts++\n\t\t} else {\n\t\t\tb = append(b, byte(0x80|v))\n\t\t\ts = 1\n\t\t\tt = 0\n\t\t}\n\t}\n\tif s > 1 {\n\t\tb = append(b, byte(0x80|t))\n\t}\n\treturn append(b, orderedEncodingTerminator)\n}", "title": "" }, { "docid": "f37198ed39179e000e53594d5c7e7fb4", "score": "0.48818332", "text": "func (rf *Raft) persist() {\n // Your code here (2C).\n // Example:\n // w := new(bytes.Buffer)\n // e := labgob.NewEncoder(w)\n // e.Encode(rf.xxx)\n // e.Encode(rf.yyy)\n // data := w.Bytes()\n // rf.persister.SaveRaftState(data)\n\n labgob.Register(logContent{})\n w := new(bytes.Buffer)\n e := labgob.NewEncoder(w)\n e.Encode(rf.currentTerm)\n e.Encode(rf.voteFor)\n e.Encode(rf.commitIndex)\n e.Encode(rf.lastIncludedTerm)\n e.Encode(rf.lastIncludedIndex)\n\n for i := 1 + rf.lastIncludedIndex; i <= rf.commitIndex; i++ {\n e.Encode(rf.log[i-rf.lastIncludedIndex])\n }\n data := w.Bytes()\n rf.persister.SaveRaftState(data)\n}", "title": "" }, { "docid": "e2bbbf6dcf7558aa654492427385a858", "score": "0.48797262", "text": "func (cp *ChainProperties) MarshalTransaction(encoder *transaction.Encoder) error {\n\tenc := transaction.NewRollingEncoder(encoder)\n\tenc.Encode(cp.AccountCreationFee)\n\tenc.Encode(cp.MaximumBlockSize)\n\tenc.Encode(cp.SBDInterestRate)\n\tenc.Encode(cp.CreateAccountMinGolosFee)\n\tenc.Encode(cp.CreateAccountMinDelegation)\n\tenc.Encode(cp.CreateAccountDelegationTime)\n\tenc.Encode(cp.MinDelegation)\n\tenc.Encode(cp.MaxReferralInterestRate)\n\tenc.Encode(cp.MaxReferralTermSec)\n\tenc.Encode(cp.MinReferralBreakFee)\n\tenc.Encode(cp.MaxReferralBreakFee)\n\tenc.Encode(cp.PostsWindow)\n\tenc.Encode(cp.PostsPerWindow)\n\tenc.Encode(cp.CommentsWindow)\n\tenc.Encode(cp.CommentsPerWindow)\n\tenc.Encode(cp.VotesWindow)\n\tenc.Encode(cp.VotesPerWindow)\n\tenc.Encode(cp.AuctionWindowSize)\n\tenc.Encode(cp.MaxDelegatedVestingInterestRate)\n\tenc.Encode(cp.CustomOpsBandwidthMultiplier)\n\tenc.Encode(cp.MinCurationPercent)\n\tenc.Encode(cp.MaxCurationPercent)\n\tenc.Encode(cp.CurationRewardCurve)\n\tenc.Encode(cp.AllowDistributeAuctionReward)\n\tenc.Encode(cp.AllowReturnAuctionRewardToFund)\n\treturn enc.Err()\n}", "title": "" }, { "docid": "9d13ee95e0c383fd1af84e7065a22973", "score": "0.4875586", "text": "func (obj *T3) MarshalJSONBinary() ([]byte, error) {\n\treturn json.Marshal(obj)\n}", "title": "" }, { "docid": "2e9ee3f1dc465b0cbbbea1f8494e4bd6", "score": "0.48726803", "text": "func (t *TxDetail) Bytes() []byte {\n\treturn util.ToBytes(t)\n}", "title": "" }, { "docid": "5284f6362e8fb874784566a6890efab3", "score": "0.48715207", "text": "func (action *Alteration) serialize() ([]byte, error) {\n\tbuf := new(bytes.Buffer)\n\t// EntryTxID (TxId)\n\t{\n\t\tb, err := action.EntryTxID.Serialize()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif err := write(buf, b); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Message (string)\n\t{\n\t\tif err := WriteVarChar(buf, action.Message, 32); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn buf.Bytes(), nil\n}", "title": "" }, { "docid": "07136229bf6ca5c920908c5667b35378", "score": "0.48678294", "text": "func (tx Transaction) String() string {\n\tvar lines []string\n\n\tlines = append(lines, fmt.Sprintf(\"--- Transaction %x:\", tx.ID))\n\t// lines = append(lines, fmt.Sprintf(\" Block %x:\", tx.BlockHash))\n\tlines = append(lines, fmt.Sprintf(\" Signature %x:\", tx.Signature))\n\n\t// for i, input := range tx.Inputs {\n\t// \tlines = append(lines, fmt.Sprintf(\" Input %d:\", i))\n\t// \tlines = append(lines, fmt.Sprintf(\" TXID: %x\", input.ID))\n\t// \tlines = append(lines, fmt.Sprintf(\" Out: %d\", input.Out))\n\t// \tlines = append(lines, fmt.Sprintf(\" Signature: %x\", input.Signature))\n\t// \tlines = append(lines, fmt.Sprintf(\" PubKey: %x\", input.PubKey))\n\t// }\n\n\t// for i, output := range tx.Outputs {\n\t// \tlines = append(lines, fmt.Sprintf(\" Output %d:\", i))\n\t// \tlines = append(lines, fmt.Sprintf(\" Value: %d\", output.Value))\n\t// \tlines = append(lines, fmt.Sprintf(\" Script: %x\", output.PubKeyHash))\n\t// }\n\n\treturn strings.Join(lines, \"\\n\")\n}", "title": "" }, { "docid": "cfcd130301f426787eccc76564ecd01c", "score": "0.48657364", "text": "func (obj *DatabaseStatisticsImpl) MarshalBinary() ([]byte, error) {\n\t// A simple encoding: plain text.\n\tvar b bytes.Buffer\n\t_, err := fmt.Fprintln(&b, obj.blockSize, obj.dataBlockSize, obj.dataFree, obj.dataSize, obj.dataUsed,\n\t\tobj.dbSize, obj.indexFree, obj.indexSize, obj.indexUsed, obj.numDataSegments, obj.numIndexSegments)\n\tif err != nil {\n\t\tlogger.Error(fmt.Sprintf(\"ERROR: Returning DatabaseStatisticsImpl:MarshalBinary w/ Error: '%+v'\", err.Error()))\n\t\treturn nil, err\n\t}\n\treturn b.Bytes(), nil\n}", "title": "" }, { "docid": "6573032b39607b9f859fa178a97ee7dc", "score": "0.4862928", "text": "func raftLogToBytes(l raft.Log) []byte {\n\tbytes, err := json.Marshal(l)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn bytes\n}", "title": "" }, { "docid": "228be125d5c05ddadf09ba6defd96331", "score": "0.48610324", "text": "func (tx *MassTransferV1) MarshalBinary() ([]byte, error) {\n\tbb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal MassTransferV1 transaction to bytes\")\n\t}\n\tbl := len(bb)\n\tif tx.Proofs == nil {\n\t\treturn nil, errors.New(\"failed to marshal MassTransferV1 transaction to bytes: no proofs\")\n\t}\n\tpb, err := tx.Proofs.MarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal MassTransferV1 transaction to bytes\")\n\t}\n\tpl := len(pb)\n\tbuf := make([]byte, bl+pl)\n\tcopy(buf[0:], bb)\n\tcopy(buf[bl:], pb)\n\treturn buf, nil\n}", "title": "" }, { "docid": "87124d27a046c9dd0ab0002bd18f3208", "score": "0.48566574", "text": "func (t *Item) Encode() (ItemBinary, error) {\n\tbuf := bytes.NewBuffer(nil)\n\terr := gob.NewEncoder(buf).Encode(t)\n\treturn buf.Bytes(), err\n}", "title": "" }, { "docid": "25faa8d3fa007fc7c180c90ab3dc7c6e", "score": "0.48516193", "text": "func (tx *LeaseV2) MarshalBinary() ([]byte, error) {\n\tbb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal LeaseV2 transaction to bytes\")\n\t}\n\tbl := len(bb)\n\tif tx.Proofs == nil {\n\t\treturn nil, errors.New(\"failed to marshal LeaseV2 transaction to bytes: no proofs\")\n\t}\n\tpb, err := tx.Proofs.MarshalBinary()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal LeaseV2 transaction to bytes\")\n\t}\n\tbuf := make([]byte, 1+bl+len(pb))\n\tbuf[0] = 0\n\tcopy(buf[1:], bb)\n\tcopy(buf[1+bl:], pb)\n\treturn buf, nil\n}", "title": "" }, { "docid": "e230fedd979c783bffb7dcb37778be9c", "score": "0.4847712", "text": "func (bt *Bytes) Encode() ([]byte, error) {\n\tencoded := make([]byte, 5+len(bt.v))\n\tencoded[0] = VTypeBytes\n\tbinary.BigEndian.PutUint32(encoded[1:5], uint32(len(bt.v)))\n\tcopy(encoded[5:], bt.v)\n\treturn encoded, nil\n}", "title": "" }, { "docid": "f15edbd71a82e6aaf6919f685dab184b", "score": "0.48461694", "text": "func (s *FraudSerializer) Serialize(p *Transaction, w io.Writer) (err error) {\n\tvar buf []byte\n\tbuf = append(buf, p.Id...)\n\tbuf = append(buf, p.TransactionValues...)\n\tbuf = append(buf, p.ReferenceValues...)\n\t_, err = w.Write(buf)\n\treturn err\n}", "title": "" }, { "docid": "1c1e93e6da233a931d35b2138377d5df", "score": "0.4840726", "text": "func (tx *Payment) MarshalBinary() ([]byte, error) {\n\tb, err := tx.bodyMarshalBinary()\n\tif err != nil {\n\n\t}\n\tbuf := make([]byte, paymentBodyLen+crypto.SignatureSize)\n\tcopy(buf, b)\n\tcopy(buf[paymentBodyLen:], tx.Signature[:])\n\treturn buf, nil\n}", "title": "" }, { "docid": "e58e35e2f5cc36bb05029ca4fb363106", "score": "0.48364022", "text": "func (rf *Raft) encodeRaftState() []byte {\n\tw := new(bytes.Buffer)\n\te := labgob.NewEncoder(w)\n\te.Encode(rf.currentTerm)\n\te.Encode(rf.voteFor)\n\te.Encode(rf.log)\n\n\te.Encode(rf.lastIncludedIndex)\n\te.Encode(rf.lastIncludedTerm)\n\tdata := w.Bytes()\n\treturn data\n}", "title": "" } ]
4d86b8e93aade4fd6cbf41a6c7d433d1
SetNewWhiteList replace the old whitelist with the new one
[ { "docid": "fc921421f2ff0a73238f7478ec712e84", "score": "0.73051476", "text": "func (br *BoostrappingRoutine) SetNewWhiteList(newPeers [][]byte) error {\n\n\tnewWhiteList := make(map[peer.ID][]ma.Multiaddr)\n\tfor _, marshalled := range newPeers {\n\n\t\tvar unmarshalled peer.AddrInfo\n\t\terr := unmarshalled.UnmarshalJSON(marshalled)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tnewWhiteList[unmarshalled.ID] = unmarshalled.Addrs\n\t}\n\n\tbxd := byXORDistance{\n\t\tslice: make([]peer.ID, len(newPeers)),\n\t\treference: br.Host.ID(),\n\t}\n\n\tindex := 0 // Population the slice with the keys (peer.IDs) of newWhiteList\n\tfor id := range newWhiteList {\n\t\tbxd.slice[index] = id\n\t\tindex++\n\t}\n\tbxd.Sort()\n\n\tbr.mut.Lock()\n\tbr.whitelist = newWhiteList\n\tbr.sortedPeers = bxd.slice\n\tbr.mut.Unlock()\n\n\treturn nil\n}", "title": "" } ]
[ { "docid": "a21fc84662da2419e2939120ec0c0a9b", "score": "0.654687", "text": "func (base *BaseHandler) SetCollectorWhiteList(whiteList []string) {\n\tbase.whiteListedCollectors = make(map[string]bool)\n\tfor _, collectorName := range whiteList {\n\t\tbase.whiteListedCollectors[collectorName] = true\n\t}\n}", "title": "" }, { "docid": "5974e6cd71259cad89c88f31e5996ff6", "score": "0.62148076", "text": "func loadWhiteList(wf *wafg.WafServer) {\n\tentries := viper.GetStringSlice(\"whitelist\")\n\tfor _, ip := range entries {\n\t\tlogger.WithField(\"ip\", ip).\n\t\t\tDebug(\"whitelisted ip\")\n\t\twf.IpBanManager.WhiteList(ip)\n\t}\n}", "title": "" }, { "docid": "b2ccf493faef05d0e7b4326598d7af8e", "score": "0.621057", "text": "func (_ERC20 *ERC20Transactor) SetWhitelist(opts *bind.TransactOpts, who common.Address, enable bool) (*types.Transaction, error) {\n\treturn _ERC20.contract.Transact(opts, \"setWhitelist\", who, enable)\n}", "title": "" }, { "docid": "c2937d862cd6a0136f3597690b461a81", "score": "0.59899276", "text": "func (client *Client) ModifyDomainBlackWhiteList(request *ModifyDomainBlackWhiteListRequest) (response *ModifyDomainBlackWhiteListResponse, err error) {\n\tresponse = CreateModifyDomainBlackWhiteListResponse()\n\terr = client.DoAction(request, response)\n\treturn\n}", "title": "" }, { "docid": "e1d8e3a069105809b7012d6ecbf4b234", "score": "0.5869673", "text": "func (_ERC20 *ERC20Session) SetWhitelist(who common.Address, enable bool) (*types.Transaction, error) {\n\treturn _ERC20.Contract.SetWhitelist(&_ERC20.TransactOpts, who, enable)\n}", "title": "" }, { "docid": "2f856541579609dd54b765b6e13ae454", "score": "0.5828546", "text": "func (o *VariableNameOptimizer) SetBlacklist(bl []string) {\n\to.blacklist = make(map[string]bool)\n\tfor _, el := range bl {\n\t\to.blacklist[el] = true\n\t}\n}", "title": "" }, { "docid": "7d8e6b103d0f9e29bad97835f8d8bb39", "score": "0.5790337", "text": "func (_ERC20 *ERC20TransactorSession) SetWhitelist(who common.Address, enable bool) (*types.Transaction, error) {\n\treturn _ERC20.Contract.SetWhitelist(&_ERC20.TransactOpts, who, enable)\n}", "title": "" }, { "docid": "083bd429799ff9c8aa52252889ac32e9", "score": "0.57411844", "text": "func (_ERC20 *ERC20Session) Whitelist(arg0 common.Address) (bool, error) {\n\treturn _ERC20.Contract.Whitelist(&_ERC20.CallOpts, arg0)\n}", "title": "" }, { "docid": "52c88c848487e191d13f5dad21489625", "score": "0.5721292", "text": "func updateEmbargoWhitelist(w http.ResponseWriter, r *http.Request) {\n\tlog.Printf(\"Update the site IPs used for embargo process.\\n\")\n\n\terr := embargo.UpdateWhitelist()\n\tif err != nil {\n\t\tlog.Print(err.Error())\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tw.WriteHeader(http.StatusOK)\n\tw.Write([]byte(\"OK\"))\n}", "title": "" }, { "docid": "062d5b75bd98ed96fa6e47f5df3c7973", "score": "0.56904215", "text": "func (_ERC20 *ERC20CallerSession) Whitelist(arg0 common.Address) (bool, error) {\n\treturn _ERC20.Contract.Whitelist(&_ERC20.CallOpts, arg0)\n}", "title": "" }, { "docid": "55b4a380266fc8a770dd8898fe49e793", "score": "0.56328386", "text": "func (base *BaseHandler) SetCollectorBlackList(blackList []string) {\n\tbase.blackListedCollectors = make(map[string]bool)\n\tfor _, collectorName := range blackList {\n\t\tbase.blackListedCollectors[collectorName] = true\n\t}\n}", "title": "" }, { "docid": "5ee0047f111709bfa13347d977080758", "score": "0.55984265", "text": "func SetupManagerWhitelist(server *Server) {\n\tif server.Config.TLS.ManagerWhiteList != \"\" {\n\t\tserver.lock.Lock()\n\t\tdefer server.lock.Unlock()\n\t\tarr := strings.Split(server.Config.TLS.ManagerWhiteList, \",\")\n\t\tserver.ManagerWhiteList = make(map[string]struct{}, len(arr))\n\t\tfor _, cn := range arr {\n\t\t\tserver.ManagerWhiteList[cn] = struct{}{}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "113de31440058f3e9873c75b2de8a438", "score": "0.5477099", "text": "func VerifyWhitelist(next http.Handler) http.Handler {\n\tfn := func(w http.ResponseWriter, r *http.Request) {\n\t\tif cidrRanger != nil {\n\t\t\t// check remote address\n\t\t\tremoteIP, _, _ := net.SplitHostPort(r.RemoteAddr)\n\t\t\tallowed, _ := cidrRanger.Contains(net.ParseIP(remoteIP))\n\t\t\tforwardedFor := r.Header.Get(\"X-Forwarded-For\")\n\t\t\tif !allowed && forwardedFor != \"\" {\n\t\t\t\t// chckk forwarded for\n\t\t\t\tfor _, ip := range strings.Split(forwardedFor, \",\") {\n\t\t\t\t\tallowed, _ = cidrRanger.Contains(net.ParseIP(strings.TrimSpace(ip)))\n\t\t\t\t\tif allowed {\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !allowed {\n\t\t\t\tlogger.Info(\"blocked by IP whitelist\",\n\t\t\t\t\tzap.String(\"remote_ip\", remoteIP), zap.String(\"forwarded_for\", forwardedFor))\n\t\t\t\thttp.Error(w, http.StatusText(http.StatusForbidden), http.StatusForbidden)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tnext.ServeHTTP(w, r)\n\t}\n\treturn http.HandlerFunc(fn)\n}", "title": "" }, { "docid": "782cab65fd82d7ed22213bb10c362ddd", "score": "0.53558785", "text": "func (_ERC20 *ERC20Caller) Whitelist(opts *bind.CallOpts, arg0 common.Address) (bool, error) {\n\tvar out []interface{}\n\terr := _ERC20.contract.Call(opts, &out, \"whitelist\", arg0)\n\n\tif err != nil {\n\t\treturn *new(bool), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(bool)).(*bool)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "492dd5408f24a829c7034e836d12d631", "score": "0.53534424", "text": "func NewWhitelist(s string) *Whitelist {\n\tw := &Whitelist{\n\t\taccounts: make(map[string]struct{}),\n\t}\n\tfor _, acctId := range strings.Split(s, \",\") {\n\t\tw.accounts[acctId] = struct{}{}\n\t}\n\treturn w\n}", "title": "" }, { "docid": "91499a93dd5aa14b174716c7c7331123", "score": "0.5321395", "text": "func (o OriginEndpointOutput) Whitelist() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v *OriginEndpoint) pulumi.StringArrayOutput { return v.Whitelist }).(pulumi.StringArrayOutput)\n}", "title": "" }, { "docid": "8ce3498f615a20328d2e1e0f8ee87cee", "score": "0.531403", "text": "func WithWhitelist(pluginNames ...string) Option {\n\treturn func(mgr *PluginManagerSwapper) {\n\t\tmgr.opts.whitelist = pluginNames\n\t}\n}", "title": "" }, { "docid": "3fdea0b041607c91703a4f617e1c227e", "score": "0.5260335", "text": "func (b *InsertBuilder) Whitelist(columns ...string) *InsertBuilder {\n\tb.cols = columns\n\treturn b\n}", "title": "" }, { "docid": "2a43fbaa6ea6d4928e0d2572fd9f8785", "score": "0.52214944", "text": "func Whitelist(chats ...int64) tele.MiddlewareFunc {\n\treturn func(next tele.HandlerFunc) tele.HandlerFunc {\n\t\treturn Restrict(RestrictConfig{\n\t\t\tChats: chats,\n\t\t\tIn: next,\n\t\t\tOut: func(c tele.Context) error { return nil },\n\t\t})(next)\n\t}\n}", "title": "" }, { "docid": "56da257658535dd200d5ff05118ba994", "score": "0.51694155", "text": "func MockIPWhitelist(ip, whitelist string) MockedDatum {\n\tif net.ParseIP(ip) == nil {\n\t\tpanic(fmt.Sprintf(\"%q is not a valid IP address\", ip))\n\t}\n\treturn MockedDatum{\n\t\tapply: func(db *FakeDB) {\n\t\t\twl, ok := db.ips[ip]\n\t\t\tif !ok {\n\t\t\t\twl = stringset.New(1)\n\t\t\t\tif db.ips == nil {\n\t\t\t\t\tdb.ips = make(map[string]stringset.Set, 1)\n\t\t\t\t}\n\t\t\t\tdb.ips[ip] = wl\n\t\t\t}\n\t\t\twl.Add(whitelist)\n\t\t},\n\t}\n}", "title": "" }, { "docid": "2f2b8fc94a62ac25e9859dca97240e88", "score": "0.5128742", "text": "func (m *IosSingleSignOnSettings) SetAllowedAppsList(value []AppListItemable)() {\n err := m.GetBackingStore().Set(\"allowedAppsList\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "1719b57434fae4cd15e237fa1af34c5d", "score": "0.5083489", "text": "func (s *SIPWhiteListService) SetSipWhiteListItem(params SetSipWhiteListItemParams) (*SetSipWhiteListItemReturn, *structure.VError, error) {\n\treq, err := s.client.NewRequest(\"POST\", \"SetSipWhiteListItem\", params)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tresponse := &SetSipWhiteListItemReturn{}\n\tverr, err := s.client.MakeResponse(req, response)\n\tif verr != nil || err != nil {\n\t\treturn nil, verr, err\n\t}\n\treturn response, nil, nil\n}", "title": "" }, { "docid": "a8c3b36d9cd7068eec142b08e00ed7f0", "score": "0.508246", "text": "func (c *Connection) WhitelistIP(name string, value string) error {\n\tc.lock.Lock()\n\tdefer c.lock.Unlock()\n\n\tdebugLogger.Print(\"whitelisting ip {\", name, \",\", value, \"}\")\n\n\tnewNetwork := AuthorizedNetwork{\n\t\tValue: value,\n\t\tName: name,\n\t\tKind: \"sql#aclEntry\",\n\t}\n\n\tupdatedNetworks := c.Instance.Settings.IPConfiguration.AuthorizedNetworks\n\tupdatedNetworks = append(updatedNetworks, newNetwork)\n\n\treturn c.updateAuthorizedNetworks(updatedNetworks)\n}", "title": "" }, { "docid": "2deb8b48189a291375d64ee59d032580", "score": "0.5055965", "text": "func loadWhitelist(ctx context.Context, db *sql.DB) {\n\tif logger.GetLogger().V(logger.Verbose) {\n\t\tlogger.GetLogger().Log(logger.Verbose, \"Begin loading whitelist\")\n\t}\n\tif logger.GetLogger().V(logger.Verbose) {\n\t\tdefer func() {\n\t\t\tlogger.GetLogger().Log(logger.Verbose, \"Done loading whitelist\")\n\t\t}()\n\t}\n\n\tconn, err := db.Conn(ctx)\n\tif err != nil {\n\t\tlogger.GetLogger().Log(logger.Alert, \"Error (conn) loading whitelist:\", err)\n\t\treturn\n\t}\n\tdefer conn.Close()\n\tstmt, err := conn.PrepareContext(ctx, getWLSQL())\n\tif err != nil {\n\t\tlogger.GetLogger().Log(logger.Alert, \"Error (stmt) loading whitelist:\", err)\n\t\treturn\n\t}\n\trows, err := stmt.QueryContext(ctx)\n\tif err != nil {\n\t\tlogger.GetLogger().Log(logger.Alert, \"Error (query) loading whitelist:\", err)\n\t\treturn\n\t}\n\tdefer rows.Close()\n\n\tcfg := WLCfg{records: make(map[interface{}]*ShardMapRecord)}\n\tfor rows.Next() {\n\t\tvar shardKey uint64\n\t\tvar shardKeyStr string\n\t\tvar rstatus, wstatus sql.NullString\n\t\tvar rec ShardMapRecord\n\t\tif GetConfig().ShardKeyValueTypeIsString {\n\t\t\terr = rows.Scan(&shardKeyStr, &(rec.logical), &rstatus, &wstatus)\n\t\t} else {\n\t\t\terr = rows.Scan(&shardKey, &(rec.logical), &rstatus, &wstatus)\n\t\t}\n\t\tif err != nil {\n\t\t\tlogger.GetLogger().Log(logger.Alert, \"Error (rows) loading whitelist:\", err)\n\t\t\treturn\n\t\t}\n\t\tif rstatus.Valid && rstatus.String[0] == 'N' {\n\t\t\trec.flags |= 0x0008\n\t\t}\n\t\tif wstatus.Valid && wstatus.String[0] == 'N' {\n\t\t\trec.flags |= 0x0002\n\t\t}\n\t\tif GetConfig().ShardKeyValueTypeIsString {\n\t\t\tcfg.records[shardKeyStr] = &rec\n\t\t} else {\n\t\t\tcfg.records[shardKey] = &rec\n\t\t}\n\t}\n\tgWLCfg.Store(&cfg)\n}", "title": "" }, { "docid": "f6d8704c9653329b53a9feb19ddacad1", "score": "0.49888608", "text": "func NewIPWhiteLister(whiteList []string, useXForwardedFor bool) (*IPWhiteLister, error) {\n\tif len(whiteList) == 0 {\n\t\treturn nil, errors.New(\"no white list provided\")\n\t}\n\n\twhiteLister := IPWhiteLister{}\n\n\tip, err := whitelist.NewIP(whiteList, false, useXForwardedFor)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"parsing CIDR whitelist %s: %v\", whiteList, err)\n\t}\n\twhiteLister.whiteLister = ip\n\n\twhiteLister.handler = negroni.HandlerFunc(whiteLister.handle)\n\tlog.Debugf(\"configured IP white list: %s\", whiteList)\n\n\treturn &whiteLister, nil\n}", "title": "" }, { "docid": "d7451a4e0c5e4dc097b273634ce685a5", "score": "0.49795592", "text": "func (base *BaseHandler) CollectorWhiteList() map[string]bool {\n\treturn base.whiteListedCollectors\n}", "title": "" }, { "docid": "4540886b3b69265662eaa0003de844d4", "score": "0.4952258", "text": "func (m *SharepointSettings) SetSharingBlockedDomainList(value []string)() {\n err := m.GetBackingStore().Set(\"sharingBlockedDomainList\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "e5d23091bb5d91893ed492711714193c", "score": "0.49136752", "text": "func (c *Client) ModifyVpcEndPointServiceWhiteList(request *ModifyVpcEndPointServiceWhiteListRequest) (response *ModifyVpcEndPointServiceWhiteListResponse, err error) {\n return c.ModifyVpcEndPointServiceWhiteListWithContext(context.Background(), request)\n}", "title": "" }, { "docid": "ba826ed331dc5874da3c9b25233bd37e", "score": "0.48951694", "text": "func (f WhiteListFilter) Pre(c filter.Context) (statusCode int, err error) {\n\tif !c.InWhitelist(GetRealClientIP(c.GetOriginRequestCtx())) {\n\t\treturn fasthttp.StatusForbidden, ErrWhitelist\n\t}\n\n\treturn f.BaseFilter.Pre(c)\n}", "title": "" }, { "docid": "97d6186a7a298aeb5bb93387d005311f", "score": "0.48907733", "text": "func (tl *TokenList) Set(s []string) {\n\ttl.SetString(strings.Join(s, \" \"))\n}", "title": "" }, { "docid": "77cb41477f7332d5ed1b78c9c1406ce7", "score": "0.48777655", "text": "func (hook *Hook) Blacklist(b []string) {\n\thook.mu.Lock()\n\tdefer hook.mu.Unlock()\n\tfor _, elem := range b {\n\t\thook.blacklist[elem] = true\n\t}\n}", "title": "" }, { "docid": "a90652f357d953e247f2b3cd6489262a", "score": "0.48764065", "text": "func BlamelistRepoWhiteset(notifications notifypb.Notifications) stringset.Set {\n\twhiteset := stringset.New(0)\n\tfor _, notification := range notifications.GetNotifications() {\n\t\tblamelistInfo := notification.GetNotifyBlamelist()\n\t\tfor _, repo := range blamelistInfo.GetRepositoryWhitelist() {\n\t\t\twhiteset.Add(repo)\n\t\t}\n\t}\n\treturn whiteset\n}", "title": "" }, { "docid": "b7bf528ea120e9e7cdf190e6e9d77208", "score": "0.48744723", "text": "func (client *Client) ModifyDomainBlackWhiteListWithChan(request *ModifyDomainBlackWhiteListRequest) (<-chan *ModifyDomainBlackWhiteListResponse, <-chan error) {\n\tresponseChan := make(chan *ModifyDomainBlackWhiteListResponse, 1)\n\terrChan := make(chan error, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tdefer close(responseChan)\n\t\tdefer close(errChan)\n\t\tresponse, err := client.ModifyDomainBlackWhiteList(request)\n\t\tif err != nil {\n\t\t\terrChan <- err\n\t\t} else {\n\t\t\tresponseChan <- response\n\t\t}\n\t})\n\tif err != nil {\n\t\terrChan <- err\n\t\tclose(responseChan)\n\t\tclose(errChan)\n\t}\n\treturn responseChan, errChan\n}", "title": "" }, { "docid": "5bd0cbd5d168232afa72f12030d30aaa", "score": "0.48513162", "text": "func (m *IosSingleSignOnSettings) SetAllowedUrls(value []string)() {\n err := m.GetBackingStore().Set(\"allowedUrls\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "f39114dd20b25fd59aafc289363b8613", "score": "0.48282367", "text": "func (client *Client) ModifyDomainBlackWhiteListWithCallback(request *ModifyDomainBlackWhiteListRequest, callback func(response *ModifyDomainBlackWhiteListResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *ModifyDomainBlackWhiteListResponse\n\t\tvar err error\n\t\tdefer close(result)\n\t\tresponse, err = client.ModifyDomainBlackWhiteList(request)\n\t\tcallback(response, err)\n\t\tresult <- 1\n\t})\n\tif err != nil {\n\t\tdefer close(result)\n\t\tcallback(nil, err)\n\t\tresult <- 0\n\t}\n\treturn result\n}", "title": "" }, { "docid": "0ed70c63a0c5901338380eb04d7c7a76", "score": "0.4815818", "text": "func (f *Filesystem) InstallWhitelist(ctx context.Context, m *fs.MountNamespace) error {\n\treturn installWhitelist(ctx, m, f.paths)\n}", "title": "" }, { "docid": "2112f42c1b8daab44dc59c828e600707", "score": "0.4798186", "text": "func StartBlacklist() {\n\tTokenBlacklist = NewBlacklist(context.Background(), defaultCacheTimeout, cacheCleanupInterval)\n}", "title": "" }, { "docid": "5e2294a8b5b0302034f9d0e55fc57189", "score": "0.47886714", "text": "func (b *InsertBuilder) Blacklist(columns ...string) *InsertBuilder {\n\tb.isBlacklist = true\n\tb.cols = columns\n\treturn b\n}", "title": "" }, { "docid": "1e6d3737ad44036da310a511004a9615", "score": "0.4762603", "text": "func (wl HostStub) Add(ip net.IP) {\n\tlog.Printf(\"WARNING: IP %s added to whitelist but whitelisting is stubbed\", ip)\n}", "title": "" }, { "docid": "22cae66d843fb0403559dd19a5bc66f8", "score": "0.4716495", "text": "func (nodeCfg *NodeConfig) AddQuarantineWhitelist(address string) {\n\tnodeCfg.P2P.Policy.QuarantineWhitelist = append(nodeCfg.P2P.Policy.QuarantineWhitelist, address)\n}", "title": "" }, { "docid": "fd76bae43db70b39edc6a8e8a9719670", "score": "0.46993646", "text": "func newWhitelistService(sling *sling.Sling) *WhitelistService {\n\treturn &WhitelistService{\n\t\tsling: sling.Path(\"groups/\"),\n\t}\n}", "title": "" }, { "docid": "f049f507f1d1613111c35a09262535e7", "score": "0.4684475", "text": "func ListReplace(e build.Expr, old, value, pkg string) bool {\n\treplaced := false\n\tfor _, li := range allListsIncludingSelects(e) {\n\t\tfor k, elem := range li.List {\n\t\t\tstr, ok := elem.(*build.StringExpr)\n\t\t\tif !ok || !labels.Equal(str.Value, old, pkg) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tli.List[k] = &build.StringExpr{Value: ShortenLabel(value, pkg), Comments: *elem.Comment()}\n\t\t\treplaced = true\n\t\t}\n\t}\n\treturn replaced\n}", "title": "" }, { "docid": "b6f07698be547262dfa7ae6d14d00e8d", "score": "0.46830478", "text": "func (wl *Basic) Remove(ip net.IP) {\n\tif !validIP(ip) {\n\t\treturn\n\t}\n\n\twl.lock.Lock()\n\tdefer wl.lock.Unlock()\n\tdelete(wl.whitelist, ip.String())\n}", "title": "" }, { "docid": "7fb0b7ff206d0b4480c768d34a768fcd", "score": "0.46573162", "text": "func (wl *Basic) Add(ip net.IP) {\n\tif !validIP(ip) {\n\t\treturn\n\t}\n\n\twl.lock.Lock()\n\tdefer wl.lock.Unlock()\n\twl.whitelist[ip.String()] = true\n}", "title": "" }, { "docid": "7502b139589047a66d43e8183369b138", "score": "0.4657068", "text": "func NewIPWhitelister(whitelistStrings []string) (*IPWhitelister, error) {\n\n\tif len(whitelistStrings) == 0 {\n\t\treturn nil, errors.New(\"no whitelists provided\")\n\t}\n\n\twhitelister := IPWhitelister{}\n\n\tfor _, whitelistString := range whitelistStrings {\n\t\t_, whitelist, err := net.ParseCIDR(whitelistString)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"parsing CIDR whitelist %s: %v\", whitelist, err)\n\t\t}\n\t\twhitelister.whitelists = append(whitelister.whitelists, whitelist)\n\t}\n\n\twhitelister.handler = negroni.HandlerFunc(whitelister.handle)\n\tlog.Debugf(\"configured %u IP whitelists: %s\", len(whitelister.whitelists), whitelister.whitelists)\n\n\treturn &whitelister, nil\n}", "title": "" }, { "docid": "401966585ee5006d53d27722c3b33455", "score": "0.46471798", "text": "func (o *Book) Whitelist() (wl []string) {\n\tif len(o.whitelist) > 0 {\n\t\treturn o.whitelist\n\t}\n\n\t// Calculates changed columns as whitelist\n\tv := reflect.Indirect(reflect.ValueOf(o.readonly))\n\tvnew := reflect.Indirect(reflect.ValueOf(o))\n\n\tfor _, c := range bookColumns {\n\t\tif f, ok := BookFieldMapping[c]; ok {\n\t\t\tvar before, after interface{}\n\t\t\tif v.IsValid() {\n\t\t\t\tbefore = v.FieldByName(f).Interface()\n\t\t\t}\n\n\t\t\tif vnew.IsValid() {\n\t\t\t\tafter = vnew.FieldByName(f).Interface()\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(before, after) || o.operation == \"DELETE\" {\n\t\t\t\twl = append(wl, c)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "983cb0d9c1e980cb908dd4571077412c", "score": "0.46241513", "text": "func (m *HardwareInformation) SetWiredIPv4Addresses(value []string)() {\n err := m.GetBackingStore().Set(\"wiredIPv4Addresses\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "46337c40b91429791e34b14fc2ef2202", "score": "0.4597128", "text": "func (cnaoRepoOps *gitCnaoRepo) resetInAllowedList(allowList []string) error {\n\tif len(allowList) == 0 {\n\t\treturn nil\n\t}\n\tlogger.Printf(\"Cleaning untracked files on bumping repo\")\n\t// TODO replace this when go-git adds advanced clean abilities so we could clean specific paths\n\tcleanArgs := append([]string{\"-C\", cnaoRepoOps.gitRepo.localDir, \"clean\", \"-fd\", \"--\"}, allowList...)\n\terr := runExternalGitCommand(cleanArgs)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"Failed to clean bumping repo\")\n\t}\n\n\tlogger.Printf(\"Resetting modified files in allowed list on bumping repo\")\n\t// TODO replace this when go-git adds advanced checkout/restore abilities so we could checkout specific paths\n\tcheckoutArgs := append([]string{\"-C\", cnaoRepoOps.gitRepo.localDir, \"checkout\", \"HEAD\", \"--\"}, allowList...)\n\terr = runExternalGitCommand(checkoutArgs)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"Failed to checkout bumping repo\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "5f257feba0e50a7ec7a354b9073e7de2", "score": "0.45920172", "text": "func CreateModifyDomainBlackWhiteListRequest() (request *ModifyDomainBlackWhiteListRequest) {\n\trequest = &ModifyDomainBlackWhiteListRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"DDoSPro\", \"2017-07-25\", \"ModifyDomainBlackWhiteList\", \"\", \"\")\n\treturn\n}", "title": "" }, { "docid": "fdaa9dbc7da53598fde16fc0ef3b0879", "score": "0.4589558", "text": "func (myapi Api) IsWhiteList(response *JsonRecord) (status bool) {\n\n\t// Is the record a whitelist?\n\tif response.Results[0].Wl == true {\n\t\treturn true\n\t}\n\n\treturn false\n\n}", "title": "" }, { "docid": "1651206756f4c851279dee1a5ce3abf7", "score": "0.45840728", "text": "func (m *SharepointSettings) SetSharingAllowedDomainList(value []string)() {\n err := m.GetBackingStore().Set(\"sharingAllowedDomainList\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "be568725eec292b9e3d97248310880c4", "score": "0.45790938", "text": "func safeListAfterDeniedRegex(namespacesDeniedRegex string, safeList []string) []string {\n\tvar updatedSafeList []string\n\n\t// for every namespace on SafeList\n\t// check that against deniedRegex\n\tfor _, namespace := range safeList {\n\t\trp := regexp.MustCompile(namespacesDeniedRegex)\n\n\t\tfound := rp.MatchString(namespace)\n\t\t// if it does not match then append\n\t\tif !found {\n\t\t\tupdatedSafeList = append(updatedSafeList, namespace)\n\t\t}\n\t}\n\n\treturn updatedSafeList\n\n}", "title": "" }, { "docid": "ce27a16d331a99fa1e7f22b7df25d483", "score": "0.45664474", "text": "func sanitizeBackendMetricList(list *threescaleapi.MetricJSONList) {\n\tfor i := range list.Metrics {\n\t\tlist.Metrics[i].Element.SystemName = SanitizeBackendSystemName(list.Metrics[i].Element.SystemName)\n\t}\n}", "title": "" }, { "docid": "ff8b90e40742369eb9b7554efc997c29", "score": "0.455636", "text": "func ForbidList(v string) predicate.LoadBalance {\n\treturn predicate.LoadBalance(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldForbidList), v))\n\t})\n}", "title": "" }, { "docid": "d3fca69a29b462a73c334d1037d6a9cd", "score": "0.4547166", "text": "func whitelistKey(whiteKeys []string, key string) bool {\n\tfor _, whiteKey := range whiteKeys {\n\t\tif key == whiteKey {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "decda4219320a51069b215b2192bde1d", "score": "0.45344132", "text": "func ForbidListEqualFold(v string) predicate.LoadBalance {\n\treturn predicate.LoadBalance(func(s *sql.Selector) {\n\t\ts.Where(sql.EqualFold(s.C(FieldForbidList), v))\n\t})\n}", "title": "" }, { "docid": "d247e018ac2ed2fa13e133353d4df8a3", "score": "0.45315725", "text": "func (db *dbw) SaveWhitelistRecord(w WhitelistRecord) error {\n\t// NOTE: Not using INSERT IGNORE because it ignores all errors\n\t// Thanks: http://stackoverflow.com/questions/2366813/on-duplicate-key-ignore\n\tquery := \"INSERT INTO whitelist \" +\n\t\t\"(`client`, `approved`) \" +\n\t\t\"VALUES (?, ?) \" +\n\t\t\"ON DUPLICATE KEY UPDATE `client`=`client`;\"\n\n\ttx := db.MustBegin()\n\ttx.Exec(query, w.Client, w.Approved)\n\n\treturn tx.Commit()\n}", "title": "" }, { "docid": "743c55c90968838bcd7dcc4974898de1", "score": "0.45113525", "text": "func (wl HostStub) Remove(ip net.IP) {\n\tlog.Printf(\"WARNING: IP %s removed from whitelist but whitelisting is stubbed\", ip)\n}", "title": "" }, { "docid": "4bedc92d228dacbc35d2e39c9628dee9", "score": "0.44989514", "text": "func (m *AppRole) SetAllowedMemberTypes(value []string)() {\n err := m.GetBackingStore().Set(\"allowedMemberTypes\", value)\n if err != nil {\n panic(err)\n }\n}", "title": "" }, { "docid": "2b47a764920862202ce78ee1a720fb98", "score": "0.4487887", "text": "func (f *CIDRFilter) Allow(vals ...string) *CIDRFilter {\r\n\tf.allowVals = append(f.allowVals, vals...)\r\n\treturn f\r\n}", "title": "" }, { "docid": "d47128e605e2c224f77245efa63405c0", "score": "0.4486907", "text": "func (manager *Manager) whitelistSize() int {\n\tmanager.mu.RLock()\n\tdefer manager.mu.RUnlock()\n\tmanager.logger.Debugf(\"Current count of online miner is %d\", len(manager.whiteList))\n\treturn len(manager.whiteList)\n}", "title": "" }, { "docid": "d521dbe44ced17fb17f0b5611548c4b5", "score": "0.4484107", "text": "func CheckWhiteList(w http.ResponseWriter, r *http.Request) {\n\n\t// parse the posted JSON which is of type WhiteListRequest\n\tdata := &WhiteListRequest{}\n\tif err := render.Bind(r, data); err != nil {\n\t\trender.Render(w, r, ErrInvalidRequest(err))\n\t\treturn\n\t}\n\n\tip := data.IPAddress\n\tcountry := db.CountryProvider.GetCountry(ip)\n\n\twhiteListed := contains(data.CountryCodes, country)\n\n\trender.JSON(w, r, WhitelistResponse{\n\t\tWhiteListed: whiteListed,\n\t\tIP: ip,\n\t\tCountryName: country,\n\t})\n}", "title": "" }, { "docid": "25d204cd3c57adcf9f8f68ee5cb11fa7", "score": "0.44717097", "text": "func GenerateSafeList(allowedRegex string, deniedRegex string, nsList *corev1.NamespaceList) []string {\n\n\tsafeList := allowedNamespacesList(allowedRegex, nsList)\n\tupdatedSafeList := safeListAfterDeniedRegex(deniedRegex, safeList)\n\n\treturn updatedSafeList\n\n}", "title": "" }, { "docid": "759107043f3a35a0a71ee07c7e50f0c4", "score": "0.44703507", "text": "func (m *AndroidGeneralDeviceConfiguration) SetWiFiBlocked(value *bool)() {\n m.wiFiBlocked = value\n}", "title": "" }, { "docid": "613f9e71a05e7bb1bbb3f8973bf9e53d", "score": "0.44605398", "text": "func (ft *firewallTweaker) set(cidrs []string, routes, localRoutes []netip.Prefix) {\n\tft.mu.Lock()\n\tdefer ft.mu.Unlock()\n\n\tif len(cidrs) == 0 {\n\t\tft.logf(\"marking for removal\")\n\t} else {\n\t\tft.logf(\"marking allowed %v\", cidrs)\n\t}\n\tft.wantLocal = cidrs\n\tft.localRoutes = localRoutes\n\tft.wantKillswitch = hasDefaultRoute(routes)\n\tif ft.running {\n\t\t// The doAsyncSet goroutine will check ft.wantLocal/wantKillswitch\n\t\t// before returning.\n\t\treturn\n\t}\n\tft.logf(\"starting netsh goroutine\")\n\tft.running = true\n\tgo ft.doAsyncSet()\n}", "title": "" }, { "docid": "9373d438fc5885d2fc0e26bf842794fe", "score": "0.44541982", "text": "func sanitizeBackendMethodList(list *threescaleapi.MethodList) {\n\tfor i := range list.Methods {\n\t\tlist.Methods[i].Element.SystemName = SanitizeBackendSystemName(list.Methods[i].Element.SystemName)\n\t}\n}", "title": "" }, { "docid": "4ba49861c9dfd4800586b8c0e088e1b9", "score": "0.44522738", "text": "func InWhiteList(d string) bool {\n\td = dns.Fqdn(d)\n\tfor _, x := range RC.Domains {\n\t\tif d == dns.Fqdn(x) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "b978c9aa6dd5e141d8d963f5b9d70d0a", "score": "0.4438857", "text": "func ForbidListLT(v string) predicate.LoadBalance {\n\treturn predicate.LoadBalance(func(s *sql.Selector) {\n\t\ts.Where(sql.LT(s.C(FieldForbidList), v))\n\t})\n}", "title": "" }, { "docid": "1fa9e6d7004c871376b2b08df18b75b7", "score": "0.44374755", "text": "func isWhitelisted(addr net.Addr) bool {\n\tif len(cfg.whitelists) == 0 {\n\t\treturn false\n\t}\n\n\thost, _, err := net.SplitHostPort(addr.String())\n\tif err != nil {\n\t\tlog.Printf(\"Unable to SplitHostPort on '%s': %v\", addr, err)\n\t\treturn false\n\t}\n\tip := net.ParseIP(host)\n\tif ip == nil {\n\t\tlog.Printf(\"Unable to parse IP '%s'\", addr)\n\t\treturn false\n\t}\n\n\tfor _, ipnet := range cfg.whitelists {\n\t\tif ipnet.Contains(ip) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "title": "" }, { "docid": "4295f2b32744ea9f71c4d6db0c02460d", "score": "0.44340008", "text": "func setRenterAllowances(renters map[*TestNode]struct{}) error {\n\tfor renter := range renters {\n\t\t// Set allowance\n\t\tif renter.params.SkipSetAllowance {\n\t\t\tcontinue\n\t\t}\n\t\tallowance := DefaultAllowance\n\t\tif !reflect.DeepEqual(renter.params.Allowance, modules.Allowance{}) {\n\t\t\tallowance = renter.params.Allowance\n\t\t}\n\t\tif err := renter.RenterPostAllowance(allowance); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "1ee3886799ee45e7ac32d3ee5045bc31", "score": "0.4429731", "text": "func WithHeaderWhitelist(whitelist ...string) Option {\n\theaderTransformer, err := keepHeaders(whitelist...)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn option{\n\t\tcache: func(c *Cache) error {\n\t\t\tc.matcher.policy.HeaderTransformers = append(c.matcher.policy.HeaderTransformers, headerTransformer)\n\t\t\treturn nil\n\t\t},\n\t\tmatcher: func(m *SimpleMatcher) {\n\t\t\tm.policy.HeaderTransformers = append(m.policy.HeaderTransformers, headerTransformer)\n\t\t},\n\t}\n}", "title": "" }, { "docid": "5cad2bb48383a9cfe4e1adf0ad261246", "score": "0.44091365", "text": "func (m *MockIPKeeper) GetWhitelist() []*net.IPNet {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetWhitelist\")\n\tret0, _ := ret[0].([]*net.IPNet)\n\treturn ret0\n}", "title": "" }, { "docid": "2ea756f6796ba26eb3c3a6f6e0d0432d", "score": "0.44042206", "text": "func SetListPollsFilter(options ...func(*ListPollsFilter) (err error)) (*ListPollsFilter, error) {\n\n\t//var defaultActive int64 = 1\n\t//args := ListPollsFilter{MaxResult: 20, Page: 1, Sort: \"-created_at\", Active: &defaultActive}\n\tvar args ListPollsFilter\n\n\tfor _, option := range options {\n\t\tif err := option(&args); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn &args, nil\n}", "title": "" }, { "docid": "89ba5fb5d74e1faa1527f5c2b76cb21c", "score": "0.44000104", "text": "func (c *Connection) BlacklistIP(value string) error {\n\tc.lock.Lock()\n\tdefer c.lock.Unlock()\n\n\tdebugLogger.Print(\"blacklisting ip {\", value, \"}\")\n\n\tcurrentNetworks := c.Instance.Settings.IPConfiguration.AuthorizedNetworks\n\n\tvar updatedNetworks []AuthorizedNetwork\n\tfor _, network := range currentNetworks {\n\t\tif network.Value != value {\n\t\t\tupdatedNetworks = append(updatedNetworks, network)\n\t\t}\n\t}\n\n\treturn c.updateAuthorizedNetworks(updatedNetworks)\n}", "title": "" }, { "docid": "9a85e4d8c32c4b74f551e3c713bfb9f3", "score": "0.43841588", "text": "func (e *BaseExchange) SetWatchlist(pairs ...Pair) *Watchlist {\n\tif len(pairs) == 0 {\n\t\tpairs = append(pairs, e.GetDefaultPairs()...)\n\t}\n\n\tw := NewWatchlist(pairs...)\n\n\te.watchlist = w\n\n\treturn w\n}", "title": "" }, { "docid": "d3d73b5ed5f87bbe9db239e0325595a3", "score": "0.43781933", "text": "func WhiteListHostFilter(hosts ...string) HostFilter {\n\thostInfos, err := addrsToHosts(hosts, 9042, nopLogger{})\n\tif err != nil {\n\t\t// dont want to panic here, but rather not break the API\n\t\tpanic(fmt.Errorf(\"unable to lookup host info from address: %v\", err))\n\t}\n\n\tm := make(map[string]bool, len(hostInfos))\n\tfor _, host := range hostInfos {\n\t\tm[host.ConnectAddress().String()] = true\n\t}\n\n\treturn HostFilterFunc(func(host *HostInfo) bool {\n\t\treturn m[host.ConnectAddress().String()]\n\t})\n}", "title": "" }, { "docid": "43aeb639ef14994ccd927af88d050aeb", "score": "0.4374646", "text": "func (wl *Basic) UnmarshalJSON(in []byte) error {\n\tif in[0] != '\"' || in[len(in)-1] != '\"' {\n\t\treturn errors.New(\"whitelist: invalid whitelist\")\n\t}\n\n\tif wl.lock == nil {\n\t\twl.lock = new(sync.Mutex)\n\t}\n\n\twl.lock.Lock()\n\tdefer wl.lock.Unlock()\n\n\tnetString := strings.TrimSpace(string(in[1 : len(in)-1]))\n\tnets := strings.Split(netString, \",\")\n\n\twl.whitelist = map[string]bool{}\n\tfor i := range nets {\n\t\taddr := strings.TrimSpace(nets[i])\n\t\tif addr == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tip := net.ParseIP(addr)\n\t\tif ip == nil {\n\t\t\twl.whitelist = nil\n\t\t\treturn errors.New(\"whitelist: invalid IP address \" + addr)\n\t\t}\n\t\twl.whitelist[addr] = true\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "b46ba51869fcfd9d2d841dc44641c20f", "score": "0.43700144", "text": "func (s *MetricsStore) Replace(list []interface{}, name string) error {\n\ts.mutex.Lock()\n\ts.metrics = map[types.UID][]*metrics.Metric{}\n\ts.mutex.Unlock()\n\n\tfor _, o := range list {\n\t\terr := s.Add(o)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "581e31446c8e814024e5a994ee2b3452", "score": "0.43677688", "text": "func restoreLists() {\n articleList = tmpArticleList\n}", "title": "" }, { "docid": "78528ef98f6d81e8013e11ab972b2582", "score": "0.4354237", "text": "func WithBlacklist(blacklist []*regexp.Regexp) Option {\n\treturn optionFunc(func(c *config) {\n\t\tc.blacklist = blacklist\n\t})\n}", "title": "" }, { "docid": "b13d89f4fafcec9c28b9fc386ff85c97", "score": "0.4353488", "text": "func (t *NetworkInstance_Mpls_TeGlobalAttributes_Srlg) RenameMembersList(oldK, newK string) error {\n\te, ok := t.MembersList[oldK]\n\tif !ok {\n\t\treturn fmt.Errorf(\"key %v not found in MembersList\", oldK)\n\t}\n\te.FromAddress = &newK\n\n\tt.MembersList[newK] = e\n\tdelete(t.MembersList, oldK)\n\treturn nil\n}", "title": "" }, { "docid": "77d9df6ec84dfbb2ed20c97ebfdc1123", "score": "0.43506372", "text": "func setattrlist(path *byte, list unsafe.Pointer, buf unsafe.Pointer, size uintptr, options int) (err error) {\n\t_, _, e1 := syscall.Syscall6(syscall.SYS_SETATTRLIST, uintptr(unsafe.Pointer(path)), uintptr(list), uintptr(buf), uintptr(size), uintptr(options), 0)\n\tif e1 != 0 {\n\t\terr = e1\n\t}\n\treturn\n}", "title": "" }, { "docid": "c5f9b23baa4bc4c9bec7b530d2485560", "score": "0.4344675", "text": "func replaceChain(newBlocks []Block) {\n\tif len(newBlocks) > len(Blockchain) {\n\t\tBlockchain = newBlocks\n\t}\n}", "title": "" }, { "docid": "c5f9b23baa4bc4c9bec7b530d2485560", "score": "0.4344675", "text": "func replaceChain(newBlocks []Block) {\n\tif len(newBlocks) > len(Blockchain) {\n\t\tBlockchain = newBlocks\n\t}\n}", "title": "" }, { "docid": "cfdfe4239774460a75174262c2fd15e9", "score": "0.43406236", "text": "func (t *Mpls_TeGlobalAttributes_Srlg) RenameMembersList(oldK, newK string) error {\n\te, ok := t.MembersList[oldK]\n\tif !ok {\n\t\treturn fmt.Errorf(\"key %v not found in MembersList\", oldK)\n\t}\n\te.FromAddress = &newK\n\n\tt.MembersList[newK] = e\n\tdelete(t.MembersList, oldK)\n\treturn nil\n}", "title": "" }, { "docid": "526248725c53d1a15cac98cc1203426a", "score": "0.4336582", "text": "func setAllowHeader(headers http.Header, isItem bool, conf resource.Conf) {\n\tmethods := []string{}\n\tif isItem {\n\t\t// Methods are sorted\n\t\tif conf.IsModeAllowed(resource.Update) {\n\t\t\tmethods = append(methods, \"DELETE\")\n\t\t}\n\t\tif conf.IsModeAllowed(resource.Read) {\n\t\t\tmethods = append(methods, \"GET, HEAD\")\n\t\t}\n\t\tif conf.IsModeAllowed(resource.Update) {\n\t\t\tmethods = append(methods, \"PATCH\")\n\t\t\t// See http://tools.ietf.org/html/rfc5789#section-3\n\t\t\theaders.Set(\"Allow-Patch\", \"application/json\")\n\t\t}\n\t\tif conf.IsModeAllowed(resource.Create) || conf.IsModeAllowed(resource.Replace) {\n\t\t\tmethods = append(methods, \"PUT\")\n\t\t}\n\t} else {\n\t\t// Methods are sorted\n\t\tif conf.IsModeAllowed(resource.Clear) {\n\t\t\tmethods = append(methods, \"DELETE\")\n\t\t}\n\t\tif conf.IsModeAllowed(resource.List) {\n\t\t\tmethods = append(methods, \"GET, HEAD\")\n\t\t}\n\t\tif conf.IsModeAllowed(resource.Create) {\n\t\t\tmethods = append(methods, \"POST\")\n\t\t}\n\t}\n\tif len(methods) > 0 {\n\t\theaders.Set(\"Allow\", strings.Join(methods, \", \"))\n\t}\n}", "title": "" }, { "docid": "416526490ee7e8ba8c5294c79a8e006c", "score": "0.43329337", "text": "func NewIdentityWhitelist(policies []config.Policy, admins []string) IdentityValidator {\n\tmetrics.AddPolicyCountCallback(\"authorize\", func() int64 {\n\t\treturn int64(len(policies))\n\t})\n\treturn newIdentityWhitelistMap(policies, admins)\n}", "title": "" }, { "docid": "102da00d524b77ad756e111925801827", "score": "0.4320796", "text": "func (k *MemIPKeeper) GetWhitelist() []*net.IPNet {\n\twhitelist := make([]*net.IPNet, len(k.whitelist))\n\tcopy(whitelist, k.whitelist)\n\n\treturn whitelist\n}", "title": "" }, { "docid": "d34b4d0d21ac37906b77477b7fdd46e8", "score": "0.43205172", "text": "func VerifyAgainstWhitelist(tolerations, whitelist []api.Toleration) bool {\n\tif len(whitelist) == 0 || len(tolerations) == 0 {\n\t\treturn true\n\t}\n\nnext:\n\tfor _, t := range tolerations {\n\t\tfor _, w := range whitelist {\n\t\t\tif isSuperset(w, t) {\n\t\t\t\tcontinue next\n\t\t\t}\n\t\t}\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "6495e89294f1bc684883367623fe55a0", "score": "0.4315374", "text": "func (a *UpdateSender) ReplaceFilterChain(c filter.Chain) {\n\n}", "title": "" }, { "docid": "70a031a9ef50a9ef776369ba6b8acac2", "score": "0.43091744", "text": "func SetClientAccessListType(s string) func(*SAMClientForwarder) error {\n\treturn func(c *SAMClientForwarder) error {\n\t\tif s == \"whitelist\" {\n\t\t\tc.accessListType = \"whitelist\"\n\t\t\treturn nil\n\t\t} else if s == \"blacklist\" {\n\t\t\tc.accessListType = \"blacklist\"\n\t\t\treturn nil\n\t\t} else if s == \"none\" {\n\t\t\tc.accessListType = \"\"\n\t\t\treturn nil\n\t\t} else if s == \"\" {\n\t\t\tc.accessListType = \"\"\n\t\t\treturn nil\n\t\t}\n\t\treturn fmt.Errorf(\"Invalid Access list type(whitelist, blacklist, none)\")\n\t}\n}", "title": "" }, { "docid": "a2c693655e2f71eb86a8fe4542ebed52", "score": "0.43081322", "text": "func Updateaddresswhitelistmailsettings() {\n\tapiKey := os.Getenv(\"SENDGRID_API_KEY\")\n\thost := \"https://api.sendgrid.com\"\n\trequest := sendgrid.GetRequest(apiKey, \"/v3/mail_settings/address_whitelist\", host)\n\trequest.Method = \"PATCH\"\n\trequest.Body = []byte(` {\n \"enabled\": true, \n \"list\": [\n \"email1@example.com\", \n \"example.com\"\n ]\n}`)\n\tresponse, err := sendgrid.API(request)\n\tif err != nil {\n\t\tlog.Println(err)\n\t} else {\n\t\tfmt.Println(response.StatusCode)\n\t\tfmt.Println(response.Body)\n\t\tfmt.Println(response.Headers)\n\t}\n}", "title": "" }, { "docid": "b2ffbeb1991ac6405198e7c6834cdaae", "score": "0.43073502", "text": "func (l *Logger) SetFilter(filters string) {\n\tif l == nil {\n\t\treturn\n\t}\n\n\tl.mu.Lock()\n\n\tl.filter = newFilter(filters)\n\n\tl.mu.Unlock()\n}", "title": "" }, { "docid": "5c233daa8b800e7967d4562950d9656b", "score": "0.42999023", "text": "func (p *PDUStreamProvider) addIgnoredUsersToFilter(ctx context.Context, req *types.SyncRequest, eventFilter *gomatrixserverlib.RoomEventFilter) error {\n\tignores, err := p.DB.IgnoresForUser(ctx, req.Device.UserID)\n\tif err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\treq.IgnoredUsers = *ignores\n\tuserList := make([]string, 0, len(ignores.List))\n\tfor userID := range ignores.List {\n\t\tuserList = append(userList, userID)\n\t}\n\tif len(userList) > 0 {\n\t\teventFilter.NotSenders = &userList\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "a750da6403b8be5c3fea5c37e5207a34", "score": "0.42985722", "text": "func (e *Event) ReplaceFacilitators(facilitators []User) {\n\te.Facilitators = nil\n\tfor _, facilitator := range facilitators {\n\t\te.AddFacilitator(&facilitator)\n\t}\n}", "title": "" }, { "docid": "f0720cb53cbde5b4c730cde3569d9067", "score": "0.4293829", "text": "func (c *SimpleCompleter) SetList(list []string) {\n\tsort.Strings(list)\n\tc.list = list\n}", "title": "" }, { "docid": "1ecd178fc438f4026fd007dde452a661", "score": "0.4290697", "text": "func updateAWSLBList(kclient k8s.Client, oldLBList []machineapi.LoadBalancerReference, newLBList []machineapi.LoadBalancerReference, machineToPatch machineapi.Machine, providerSpecDecoded *machineapi.AWSMachineProviderConfig) error {\n\tbaseToPatch := k8s.MergeFrom(machineToPatch.DeepCopy())\n\tif !reflect.DeepEqual(oldLBList, newLBList) {\n\t\tproviderSpecDecoded.LoadBalancers = newLBList\n\n\t\trawEncodedSpec, err := encodeAWSMachineProviderSpec(providerSpecDecoded, kclient.Scheme())\n\t\tif err != nil {\n\t\t\tlog.Error(err, \"Error encoding provider spec for machine\", \"machine\", machineToPatch.Name)\n\t\t\treturn err\n\t\t}\n\t\tmachineToPatch.Spec.ProviderSpec.Value = rawEncodedSpec\n\t\tmachineObj := machineToPatch.DeepCopy()\n\t\tif err := kclient.Patch(context.Background(), machineObj, baseToPatch); err != nil {\n\t\t\tlog.Error(err, \"Failed to update LBs in machine's providerSpec\", \"machine\", machineToPatch.Name)\n\t\t\treturn err\n\t\t}\n\t\tlog.Info(\"Updated master machine's LBs in providerSpec\", \"masterMachine\", machineToPatch.Name)\n\t\treturn nil\n\t}\n\tlog.Info(\"No need to update LBs for master machine\", \"masterMachine\", machineToPatch.Name)\n\treturn nil\n}", "title": "" }, { "docid": "f00e6b9d3da69868d7b46e2ddf9cdda2", "score": "0.4288091", "text": "func SetAllowedWords(allowed map[string]struct{}) {\n\tallowedWords = allowed\n}", "title": "" }, { "docid": "f2c7b0f2971f769d82727104611c0b65", "score": "0.42832437", "text": "func CreateModifyDomainBlackWhiteListResponse() (response *ModifyDomainBlackWhiteListResponse) {\n\tresponse = &ModifyDomainBlackWhiteListResponse{\n\t\tBaseResponse: &responses.BaseResponse{},\n\t}\n\treturn\n}", "title": "" }, { "docid": "15d3ca9f0c35877caa63130feb81cbc3", "score": "0.42778355", "text": "func (pm *PatternManager) SetCurrentPatterns(servedPatterns map[string]exchange.ServedPattern, policyPath string) error {\n\tpm.patMapLock.Lock()\n\tdefer pm.patMapLock.Unlock()\n\n\t// Exit early if nothing to do\n\tif len(pm.OrgPatterns) == 0 && len(pm.ServedPatterns) == 0 && len(servedPatterns) == 0 {\n\t\treturn nil\n\t}\n\n\t// save the served patterns in the pm\n\tpm.setServedPatterns(servedPatterns)\n\n\t// Create a new map of maps\n\tif len(pm.OrgPatterns) == 0 {\n\t\tpm.OrgPatterns = make(map[string]map[string]*PatternEntry)\n\t}\n\n\t// For each org that this agbot is supposed to be serving, check if it is already in the pm.\n\t// If not add to it. The patterns will be added later in the UpdatePatternPolicies function.\n\tfor _, served := range servedPatterns {\n\t\t// If we have encountered a new org in the served pattern list, create a map of patterns for it.\n\t\tif !pm.hasOrg(served.PatternOrg) {\n\t\t\tpm.OrgPatterns[served.PatternOrg] = make(map[string]*PatternEntry)\n\t\t}\n\t}\n\n\t// For each org in the existing PatternManager, check to see if its in the new map. If not, then\n\t// this agbot is no longer serving any patterns in that org, we can get rid of everything in that org.\n\tfor org, _ := range pm.OrgPatterns {\n\t\tif !pm.serveOrg(org) {\n\t\t\t// delete org and all policy files in it.\n\t\t\tglog.V(5).Infof(\"Deleting the org %v from the pattern manager and all its policy files because it is no longer hosted by the agbot.\", org)\n\t\t\tif err := pm.deleteOrg(policyPath, org); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" } ]
389a6758c6226373a8c49ca3552bba46
WithContext adds the context to the upload template params
[ { "docid": "fcb036b388d01f1930e5f38db02b3709", "score": "0.65983874", "text": "func (o *UploadTemplateParams) WithContext(ctx context.Context) *UploadTemplateParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" } ]
[ { "docid": "5bcdd9ab2a254d71d35ac5335cb59dab", "score": "0.6335294", "text": "func (o *UploadParams) WithContext(ctx context.Context) *UploadParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "9745828bf4861b64d27963eb9d6aea07", "score": "0.6112385", "text": "func (_obj *Apiaccount) Account_uploadThemeWithContext(tarsCtx context.Context, params *TLaccount_uploadTheme, _opt ...map[string]string) (ret Document, err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = params.WriteBlock(_os, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"account_uploadTheme\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = ret.ReadBlock(_is, 0, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}", "title": "" }, { "docid": "4c75ef8bf7c1f1e8d3232746f25a5b7b", "score": "0.6065761", "text": "func (o *GeneratedUploadURLParams) WithContext(ctx context.Context) *GeneratedUploadURLParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "f11547d5e5ee09e65204e3c68102ff43", "score": "0.5879445", "text": "func (_obj *Apiaccount) Account_uploadWallPaperWithContext(tarsCtx context.Context, params *TLaccount_uploadWallPaper, _opt ...map[string]string) (ret WallPaper, err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = params.WriteBlock(_os, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"account_uploadWallPaper\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = ret.ReadBlock(_is, 0, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}", "title": "" }, { "docid": "8b109bbb7f7c03fa77a559c24b1aca5b", "score": "0.57188237", "text": "func (o *UploadTemplateParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "8f99f392a9b326f23e63d7b0020086c1", "score": "0.5582335", "text": "func (o *UploadReportLibraryParams) WithContext(ctx context.Context) *UploadReportLibraryParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "234d7421005e15c0329f5a379d8104f0", "score": "0.5490509", "text": "func (o *UploadParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "1bc3e766b345104cbdf2657531c7c280", "score": "0.5476211", "text": "func (o *GeneratedUploadURLParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "f92254600fb6adf66261aebf8d7299a0", "score": "0.54716295", "text": "func (a *AzureBlobStorage) WriteWithContext(wctx context.Context, msg types.Message) error {\n\tctx, cancel := context.WithTimeout(wctx, a.timeout)\n\tdefer cancel()\n\n\treturn IterateBatchedSend(msg, func(i int, p types.Part) error {\n\t\tc, err := a.getContainer(a.container.String(i, msg))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := a.uploadToBlob(ctx, p.Get(), a.path.String(i, msg), a.blobType.String(i, msg), c); err != nil {\n\t\t\tif containerNotFound(err) {\n\t\t\t\tif _, cerr := c.Create(ctx, azblob.Metadata{}, azblob.PublicAccessNone); cerr != nil {\n\t\t\t\t\ta.log.Errorf(\"error creating container: %v.\", cerr)\n\t\t\t\t} else {\n\t\t\t\t\ta.log.Infof(\"created container: %s.\", c.String())\n\t\t\t\t\t// Retry upload to blob\n\t\t\t\t\terr = a.uploadToBlob(ctx, p.Get(), a.path.String(i, msg), a.blobType.String(i, msg), c)\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t})\n}", "title": "" }, { "docid": "5bd2dc3b1a1a73fddf748dda8be77574", "score": "0.54557043", "text": "func (_obj *Apiaccount) Account_uploadThemeOneWayWithContext(tarsCtx context.Context, params *TLaccount_uploadTheme, _opt ...map[string]string) (ret Document, err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = params.WriteBlock(_os, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 1, \"account_uploadTheme\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}", "title": "" }, { "docid": "a9fb660e603a3d3d301b073298d1b263", "score": "0.5428048", "text": "func NewUploadOpmlContext(ctx context.Context, r *http.Request, service *goa.Service) (*UploadOpmlContext, error) {\n\tvar err error\n\tresp := goa.ContextResponse(ctx)\n\tresp.Service = service\n\treq := goa.ContextRequest(ctx)\n\treq.Request = r\n\trctx := UploadOpmlContext{Context: ctx, ResponseData: resp, RequestData: req}\n\treturn &rctx, err\n}", "title": "" }, { "docid": "9b85875580d1598d8ed5c3feec5c61db", "score": "0.53459597", "text": "func (c *MediaUploadCall) Context(ctx context.Context) *MediaUploadCall {\n\tc.ctx_ = ctx\n\treturn c\n}", "title": "" }, { "docid": "9b85875580d1598d8ed5c3feec5c61db", "score": "0.53459597", "text": "func (c *MediaUploadCall) Context(ctx context.Context) *MediaUploadCall {\n\tc.ctx_ = ctx\n\treturn c\n}", "title": "" }, { "docid": "42eb27d89a795d8e1fbc3a5e21c76a5d", "score": "0.53381747", "text": "func (c *IndexingDatasourcesItemsUploadCall) Context(ctx context.Context) *IndexingDatasourcesItemsUploadCall {\n\tc.ctx_ = ctx\n\treturn c\n}", "title": "" }, { "docid": "9db66d9b070f449c72cdb7f9ce4d1e29", "score": "0.5319483", "text": "func (_obj *UserInfoService) SignUpWithContext(tarsCtx context.Context, wxId string, userInfo *UserInfo, errCode *ErrorCode, _opt ...map[string]string) (ret int32, err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = _os.Write_string(wxId, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = userInfo.WriteBlock(_os, 2)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = _os.Write_int32(int32((*errCode)), 3)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"SignUp\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = _is.Read_int32(&ret, 0, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = _is.Read_int32((*int32)(&(*errCode)), 3, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}", "title": "" }, { "docid": "13c928907f801c00588927cd71d2d9cb", "score": "0.5317977", "text": "func (o *RequestBackupUploadParams) WithContext(ctx context.Context) *RequestBackupUploadParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "93178aba58a775415e80837aaf048bd4", "score": "0.5315949", "text": "func withContext(ctx context.Context, req *http.Request, apiGwRequest events.APIGatewayProxyRequest) *http.Request {\n\treturn req.WithContext(WithContext(ctx, apiGwRequest))\n}", "title": "" }, { "docid": "5145ba2810ca141ca1e9ca18e4c99dec", "score": "0.5289755", "text": "func (req StorageInfoRequest) Context(ctx context.Context) StorageInfoRequest {\n\treq.impl = req.impl.Context(ctx)\n\n\treturn req\n}", "title": "" }, { "docid": "505754c5960a7743b73a652d52462d60", "score": "0.525795", "text": "func (_obj *Apiaccount) Account_uploadWallPaperOneWayWithContext(tarsCtx context.Context, params *TLaccount_uploadWallPaper, _opt ...map[string]string) (ret WallPaper, err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = params.WriteBlock(_os, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 1, \"account_uploadWallPaper\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}", "title": "" }, { "docid": "4abf0fba0f7e2df68a03f83debd7d61a", "score": "0.51706463", "text": "func (api *Client) UploadFileContext(ctx context.Context, params FileUploadParameters) (file *File, err error) {\n\t// Test if user token is valid. This helps because client.Do doesn't like this for some reason. XXX: More\n\t// investigation needed, but for now this will do.\n\t_, err = api.AuthTestContext(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresponse := &fileResponseFull{}\n\tvalues := url.Values{}\n\tif params.Filetype != \"\" {\n\t\tvalues.Add(\"filetype\", params.Filetype)\n\t}\n\tif params.Filename != \"\" {\n\t\tvalues.Add(\"filename\", params.Filename)\n\t}\n\tif params.Title != \"\" {\n\t\tvalues.Add(\"title\", params.Title)\n\t}\n\tif params.InitialComment != \"\" {\n\t\tvalues.Add(\"initial_comment\", params.InitialComment)\n\t}\n\tif params.ThreadTimestamp != \"\" {\n\t\tvalues.Add(\"thread_ts\", params.ThreadTimestamp)\n\t}\n\tif len(params.Channels) != 0 {\n\t\tvalues.Add(\"channels\", strings.Join(params.Channels, \",\"))\n\t}\n\tif params.Content != \"\" {\n\t\tvalues.Add(\"content\", params.Content)\n\t\tvalues.Add(\"token\", api.token)\n\t\terr = api.postMethod(ctx, \"files.upload\", values, response)\n\t} else if params.File != \"\" {\n\t\terr = postLocalWithMultipartResponse(ctx, api.httpclient, api.endpoint+\"files.upload\", params.File, \"file\", api.token, values, response, api)\n\t} else if params.Reader != nil {\n\t\tif params.Filename == \"\" {\n\t\t\treturn nil, fmt.Errorf(\"files.upload: FileUploadParameters.Filename is mandatory when using FileUploadParameters.Reader\")\n\t\t}\n\t\terr = postWithMultipartResponse(ctx, api.httpclient, api.endpoint+\"files.upload\", params.Filename, \"file\", api.token, values, params.Reader, response, api)\n\t}\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &response.File, response.Err()\n}", "title": "" }, { "docid": "8e8e2a824a3b09e20105dee5ad8d56b2", "score": "0.51660514", "text": "func (o *FilesPutParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "db77b40584f0366c92b587434271ff39", "score": "0.51523525", "text": "func (ml MockLog) AddWithContext(_ error, _ map[string]any) {}", "title": "" }, { "docid": "c0548e3256d0414ad3c7252df1d62299", "score": "0.5149524", "text": "func (_obj *Log) LoggerWithContext(ctx context.Context, App string, Server string, File string, Format string, Buffer []string, _opt ...map[string]string) (err error) {\r\n\r\n\tvar length int32\r\n\tvar have bool\r\n\tvar ty byte\r\n\t_os := codec.NewBuffer()\r\n\terr = _os.Write_string(App, 1)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\r\n\terr = _os.Write_string(Server, 2)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\r\n\terr = _os.Write_string(File, 3)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\r\n\terr = _os.Write_string(Format, 4)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\r\n\terr = _os.WriteHead(codec.LIST, 5)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\terr = _os.Write_int32(int32(len(Buffer)), 0)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\tfor _, v := range Buffer {\r\n\r\n\t\terr = _os.Write_string(v, 0)\r\n\t\tif err != nil {\r\n\t\t\treturn err\r\n\t\t}\r\n\t}\r\n\r\n\tvar _status map[string]string\r\n\tvar _context map[string]string\r\n\tif len(_opt) == 1 {\r\n\t\t_context = _opt[0]\r\n\t} else if len(_opt) == 2 {\r\n\t\t_context = _opt[0]\r\n\t\t_status = _opt[1]\r\n\t}\r\n\t_resp := new(requestf.ResponsePacket)\r\n\terr = _obj.s.Tars_invoke(ctx, 0, \"logger\", _os.ToBytes(), _status, _context, _resp)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\r\n\tif len(_opt) == 1 {\r\n\t\tfor k := range _context {\r\n\t\t\tdelete(_context, k)\r\n\t\t}\r\n\t\tfor k, v := range _resp.Context {\r\n\t\t\t_context[k] = v\r\n\t\t}\r\n\t} else if len(_opt) == 2 {\r\n\t\tfor k := range _context {\r\n\t\t\tdelete(_context, k)\r\n\t\t}\r\n\t\tfor k, v := range _resp.Context {\r\n\t\t\t_context[k] = v\r\n\t\t}\r\n\t\tfor k := range _status {\r\n\t\t\tdelete(_status, k)\r\n\t\t}\r\n\t\tfor k, v := range _resp.Status {\r\n\t\t\t_status[k] = v\r\n\t\t}\r\n\r\n\t}\r\n\t_ = length\r\n\t_ = have\r\n\t_ = ty\r\n\treturn nil\r\n}", "title": "" }, { "docid": "3f2a27aad596be444d2ebf7956254b2a", "score": "0.51251763", "text": "func (o *RequestBackupUploadParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "d4abe916548a01e912a062b799ede390", "score": "0.50956285", "text": "func RFCWithContext(ctx context.Context) ROption {\n\treturn func(w *rfc5424) {\n\t\tw.ctx = ctx\n\t}\n}", "title": "" }, { "docid": "845499a385208e19087f7ea92ee1bfde", "score": "0.5072165", "text": "func SignOptionWithContext(c context.Context) SignOption {\n\treturn func(o *signOptions) error {\n\t\tif o == nil {\n\t\t\treturn errors.New(errors.KsiInvalidArgumentError).AppendMessage(\"Missing sign options object.\")\n\t\t}\n\t\tif c == nil {\n\t\t\treturn errors.New(errors.KsiInvalidArgumentError).AppendMessage(\"Missing context.\")\n\t\t}\n\t\to.context = c\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "681e2d884c11092006964b8ae0a366fa", "score": "0.5071075", "text": "func NewUploadImageContext(ctx context.Context, r *http.Request, service *goa.Service) (*UploadImageContext, error) {\n\tvar err error\n\tresp := goa.ContextResponse(ctx)\n\tresp.Service = service\n\treq := goa.ContextRequest(ctx)\n\treq.Request = r\n\trctx := UploadImageContext{Context: ctx, ResponseData: resp, RequestData: req}\n\tparamEntity := req.Params[\"entity\"]\n\tif len(paramEntity) > 0 {\n\t\trawEntity := paramEntity[0]\n\t\trctx.Entity = rawEntity\n\t}\n\tparamID := req.Params[\"id\"]\n\tif len(paramID) > 0 {\n\t\trawID := paramID[0]\n\t\trctx.ID = rawID\n\t}\n\treturn &rctx, err\n}", "title": "" }, { "docid": "c17884feea1ca683bc818ca81db7daba", "score": "0.50699276", "text": "func (mock *S3SDKUploaderMock) UploadWithContextCalls() []struct {\n\tCtx context.Context\n\tIn *s3manager.UploadInput\n\tOptions []func(*s3manager.Uploader)\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tIn *s3manager.UploadInput\n\t\tOptions []func(*s3manager.Uploader)\n\t}\n\tmock.lockUploadWithContext.RLock()\n\tcalls = mock.calls.UploadWithContext\n\tmock.lockUploadWithContext.RUnlock()\n\treturn calls\n}", "title": "" }, { "docid": "01350ca7d64742010bd4c67cc342a203", "score": "0.5022364", "text": "func (c *Client) StoreContext(ctx context.Context, req Request, dest string) error {\n\tif hasWebhook(req) {\n\t\treturn errors.New(\"cannot use Store method with a webhook\")\n\t}\n\tresp, err := c.PostContext(ctx, req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn errors.New(\"failed to generate the result PDF\")\n\t}\n\treturn writeNewFile(dest, resp.Body)\n}", "title": "" }, { "docid": "22af7693fc5b6011efb8bb4f63a107fc", "score": "0.5007457", "text": "func (r *ExtenderReq) WithContext(ctx context.Context) *ExtenderReq {\n\tif r == nil {\n\t\treturn nil\n\t}\n\n\tswitch {\n\tcase ctx == nil:\n\t\tr.ctx = context.Background()\n\tdefault:\n\t\tr.ctx = ctx\n\t}\n\treturn r\n}", "title": "" }, { "docid": "a6ea7d946fcbcbd15832918c7a3925bf", "score": "0.49913698", "text": "func (o *FilesPutParams) WithContext(ctx context.Context) *FilesPutParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "894058eec0963f84ef06e9f41dbb20eb", "score": "0.4970093", "text": "func (c *ProjectsLocationsFunctionsGenerateUploadUrlCall) Context(ctx context.Context) *ProjectsLocationsFunctionsGenerateUploadUrlCall {\n\tc.ctx_ = ctx\n\treturn c\n}", "title": "" }, { "docid": "435f492d26ff1974e008c17b2c2ee108", "score": "0.496718", "text": "func PublishWithContext(ctx context.Context, event AuditEvent) error {\n\tc := GetContextFromContext(ctx)\n\tfor key, value := range c {\n\t\tif _, ok := event[key]; ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tevent[key] = value\n\t}\n\n\treturn Publish(event)\n}", "title": "" }, { "docid": "dfeedc30a1e164c8d3cd2843aeb24579", "score": "0.4914935", "text": "func (mr *MockEC2MockRecorder) ModifyTransitGatewayVpcAttachmentWithContext(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{arg0, arg1}, arg2...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"ModifyTransitGatewayVpcAttachmentWithContext\", reflect.TypeOf((*MockEC2)(nil).ModifyTransitGatewayVpcAttachmentWithContext), varargs...)\n}", "title": "" }, { "docid": "71d698f4ed1a7668e7ee2ec92330870a", "score": "0.49138007", "text": "func (mr *MockEC2MockRecorder) ModifyLaunchTemplateWithContext(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{arg0, arg1}, arg2...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"ModifyLaunchTemplateWithContext\", reflect.TypeOf((*MockEC2)(nil).ModifyLaunchTemplateWithContext), varargs...)\n}", "title": "" }, { "docid": "6bb9d66118b1e4ec23bfe4b331cf44b8", "score": "0.49131328", "text": "func NewUploadParamsWithContext(ctx context.Context) *UploadParams {\n\treturn &UploadParams{\n\t\tContext: ctx,\n\t}\n}", "title": "" }, { "docid": "1d727a2dc74b2c675f08c3a074868bdd", "score": "0.49046218", "text": "func (req ReplaceObjectManyRequest) Context(ctx context.Context) ReplaceObjectManyRequest {\n\treq.impl = req.impl.Context(ctx)\n\n\treturn req\n}", "title": "" }, { "docid": "8e722027252d1c583d255a920b5fd72f", "score": "0.48675895", "text": "func (wtw *writeTxWrap) RunWithContext(ctx aws.Context) error {\n\treturn wtw.writeTx.RunWithContext(ctx)\n}", "title": "" }, { "docid": "b3d2cc130c094687b2906d336689d875", "score": "0.48675498", "text": "func (o *UploadHardwareProfileJposPathsPropertiesParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "6395c74af590fc9c04950ac7e7d14e7a", "score": "0.48666382", "text": "func NewUploadImageContext(ctx context.Context, r *http.Request, service *goa.Service) (*UploadImageContext, error) {\n\tvar err error\n\tresp := goa.ContextResponse(ctx)\n\tresp.Service = service\n\treq := goa.ContextRequest(ctx)\n\treq.Request = r\n\trctx := UploadImageContext{Context: ctx, ResponseData: resp, RequestData: req}\n\treturn &rctx, err\n}", "title": "" }, { "docid": "e9737b7e5f252068746ac2258e73678b", "score": "0.4864284", "text": "func (c Client) UpdateWithContext(context context.Context, input *UpdatePublicKeyInput) (*UpdatePublicKeyResponse, error) {\n\top := client.Operation{\n\t\tMethod: http.MethodPost,\n\t\tURI: \"/Credentials/PublicKeys/{sid}\",\n\t\tContentType: client.URLEncoded,\n\t\tPathParams: map[string]string{\n\t\t\t\"sid\": c.sid,\n\t\t},\n\t}\n\n\tif input == nil {\n\t\tinput = &UpdatePublicKeyInput{}\n\t}\n\n\tresponse := &UpdatePublicKeyResponse{}\n\tif err := c.client.Send(context, op, input, response); err != nil {\n\t\treturn nil, err\n\t}\n\treturn response, nil\n}", "title": "" }, { "docid": "4fa19df9e0b64beed8577123b87c89ad", "score": "0.48632893", "text": "func (c Client) ArchiveWithContext(context context.Context) (*ArchivePluginResponse, error) {\n\top := client.Operation{\n\t\tMethod: http.MethodPost,\n\t\tURI: \"/PluginService/Plugins/{sid}/Archive\",\n\t\tPathParams: map[string]string{\n\t\t\t\"sid\": c.sid,\n\t\t},\n\t}\n\n\tresponse := &ArchivePluginResponse{}\n\tif err := c.client.Send(context, op, nil, response); err != nil {\n\t\treturn nil, err\n\t}\n\treturn response, nil\n}", "title": "" }, { "docid": "13a2887012d0077d4b4ff53f5a7f7a64", "score": "0.48523793", "text": "func (c *Creator) NewWithContext(ctx context.Context) (filestorage.FileStorage, error) {\n\tfs := NewWithClient(ctx, c.client)\n\terr := fs.SetBucket(c.defaultBucket)\n\treturn fs, err\n}", "title": "" }, { "docid": "21dcfda49675d4b030ed04f0a8dc1929", "score": "0.4851558", "text": "func (tmpl *ContainerTemplate) ValidateContext(context *TemplateBuildContext) *axerror.AXError {\n\treturn nil\n}", "title": "" }, { "docid": "5890f0c34a67eaaa588d875d051b4f3d", "score": "0.4851326", "text": "func WithContext(ctx context.Context, apiGwRequest events.APIGatewayProxyRequest) context.Context {\n\tlc, _ := lambdacontext.FromContext(ctx)\n\treturn context.WithValue(ctx, contextKey{}, requestContext{\n\t\tlambdaContext: lc,\n\t\tgatewayProxyContext: apiGwRequest.RequestContext,\n\t\tstageVars: apiGwRequest.StageVariables,\n\t})\n}", "title": "" }, { "docid": "c64c95f5bf7e0b08c6f091f2d552f342", "score": "0.48429835", "text": "func WithContext(ctx context.Context) Opt {\n\treturn func(t *Tortoise) {\n\t\tt.ctx = ctx\n\t}\n}", "title": "" }, { "docid": "cc9382f5f0238ae3be29212dcc026313", "score": "0.483936", "text": "func (p *stream) sendWithContext(ctx context.Context, data []byte) error {\n\tresult := make(chan error)\n\n\tgo func() {\n\t\terr := p.send(data)\n\t\tselect {\n\t\tcase result <- errors.Wrap(err, \"send failed\"):\n\t\tcase <-ctx.Done():\n\t\t}\n\t}()\n\n\tselect {\n\tcase err := <-result:\n\t\treturn err\n\tcase <-ctx.Done():\n\t\treturn ctx.Err()\n\t}\n}", "title": "" }, { "docid": "67361e5c2f2553b506d894977926b355", "score": "0.4810568", "text": "func (o *UploadReportLibraryParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "b8a5284d7fb538e71153ea191ba1f573", "score": "0.48065358", "text": "func (req ReplaceManyRequest) Context(ctx context.Context) ReplaceManyRequest {\n\treq.impl = req.impl.Context(ctx)\n\n\treturn req\n}", "title": "" }, { "docid": "90bf358970b58f74f59d03924b58996f", "score": "0.48039556", "text": "func (o *ListTransactionFilesRequest) WithContext(ctx context.Context) *ListTransactionFilesRequest {\n\to.Context = ctx\n\treturn o\n}", "title": "" }, { "docid": "583f0470ddbd05c11f3aafe9c015d044", "score": "0.48004773", "text": "func (o *UploadTemplateParams) WithHTTPClient(client *http.Client) *UploadTemplateParams {\n\to.SetHTTPClient(client)\n\treturn o\n}", "title": "" }, { "docid": "ca21cd95c053d8eba710007700789044", "score": "0.47953966", "text": "func (o *CreatePackageRepositoryUploadParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "2b8d15c1bfac828ed41d20107257d91d", "score": "0.47880244", "text": "func WithContext(ctx context.Context) RequestOption {\n\treturn func(cfg *RequestConfig) {\n\t\tcfg.Request = cfg.Request.WithContext(ctx)\n\t}\n}", "title": "" }, { "docid": "dc619b0b0be197d077a5e3e70984fc95", "score": "0.47773963", "text": "func (mr *MockEC2MockRecorder) ImportImageWithContext(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{arg0, arg1}, arg2...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"ImportImageWithContext\", reflect.TypeOf((*MockEC2)(nil).ImportImageWithContext), varargs...)\n}", "title": "" }, { "docid": "d4cd0a351b0134281a0c194fcaa3f8ba", "score": "0.47751337", "text": "func (_obj *UserInfoService) SignInWithContext(tarsCtx context.Context, wxId string, userInfo *UserInfo, errCode *ErrorCode, _opt ...map[string]string) (ret int32, err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = _os.Write_string(wxId, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = (*userInfo).WriteBlock(_os, 2)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = _os.Write_int32(int32((*errCode)), 3)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"SignIn\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = _is.Read_int32(&ret, 0, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = (*userInfo).ReadBlock(_is, 2, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\terr = _is.Read_int32((*int32)(&(*errCode)), 3, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}", "title": "" }, { "docid": "aedb288fbba696489340fcc698391bbb", "score": "0.4772596", "text": "func (o *PostMeInstallationTemplateParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "77ae85de4629b902fe62dd2d7998327e", "score": "0.4771604", "text": "func (o *EmployeeEntitlementGrantClientEntitlementsByTemplateGrantClientEntitlementsByTemplateParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "b0487f3a99e2836a399f70b0773f9d46", "score": "0.47473505", "text": "func (o *UsageParams) WithContext(ctx context.Context) *UsageParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "2442233c2efa5e3bfed7d689293fc8ae", "score": "0.47358793", "text": "func (obj *Auth) AuthProcessWithContext(tarsCtx context.Context, request *AuthRequest, opts ...map[string]string) (ret int32, err error) {\n\tvar (\n\t\tlength int32\n\t\thave bool\n\t\tty byte\n\t)\n\tbuf := codec.NewBuffer()\n\terr = request.WriteBlock(buf, 1)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tvar statusMap map[string]string\n\tvar contextMap map[string]string\n\tif len(opts) == 1 {\n\t\tcontextMap = opts[0]\n\t} else if len(opts) == 2 {\n\t\tcontextMap = opts[0]\n\t\tstatusMap = opts[1]\n\t}\n\n\ttarsResp := new(requestf.ResponsePacket)\n\terr = obj.servant.TarsInvoke(tarsCtx, 0, \"authProcess\", buf.ToBytes(), statusMap, contextMap, tarsResp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\treadBuf := codec.NewReader(tools.Int8ToByte(tarsResp.SBuffer))\n\terr = readBuf.ReadInt32(&ret, 0, true)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tif len(opts) == 1 {\n\t\tfor k := range contextMap {\n\t\t\tdelete(contextMap, k)\n\t\t}\n\t\tfor k, v := range tarsResp.Context {\n\t\t\tcontextMap[k] = v\n\t\t}\n\t} else if len(opts) == 2 {\n\t\tfor k := range contextMap {\n\t\t\tdelete(contextMap, k)\n\t\t}\n\t\tfor k, v := range tarsResp.Context {\n\t\t\tcontextMap[k] = v\n\t\t}\n\t\tfor k := range statusMap {\n\t\t\tdelete(statusMap, k)\n\t\t}\n\t\tfor k, v := range tarsResp.Status {\n\t\t\tstatusMap[k] = v\n\t\t}\n\t}\n\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn ret, nil\n}", "title": "" }, { "docid": "eb153e298214ba622f60807c38badcbd", "score": "0.4728365", "text": "func OCSFormatCtx(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tswitch r.URL.Query().Get(\"format\") {\n\t\tcase \"\", \"xml\":\n\t\t\tr.Header.Set(\"Accept\", \"application/xml\")\n\t\t\tr = r.WithContext(context.WithValue(r.Context(), render.ContentTypeCtxKey, render.ContentTypeXML))\n\t\tcase \"json\":\n\t\t\tr.Header.Set(\"Accept\", \"application/json\")\n\t\t\tr = r.WithContext(context.WithValue(r.Context(), render.ContentTypeCtxKey, render.ContentTypeJSON))\n\t\t}\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "title": "" }, { "docid": "b5c6d6110656699e88eaa52839cfd82f", "score": "0.47205082", "text": "func Context(ctx context.Context) IStep {\n\treturn &hitStep{\n\t\tTrace: ett.Prepare(),\n\t\tWhen: requestCreateStep,\n\t\tCallPath: nil,\n\t\tExec: func(hit *hitImpl) error {\n\t\t\thit.request.Request = hit.request.Request.WithContext(ctx)\n\t\t\treturn nil\n\t\t},\n\t}\n}", "title": "" }, { "docid": "e40608b6263dba28b874a49513213a87", "score": "0.47192007", "text": "func (mr *MockEC2MockRecorder) CopyFpgaImageWithContext(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{arg0, arg1}, arg2...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"CopyFpgaImageWithContext\", reflect.TypeOf((*MockEC2)(nil).CopyFpgaImageWithContext), varargs...)\n}", "title": "" }, { "docid": "80b8dd492437a0717584a9f449eac225", "score": "0.47146055", "text": "func WithContext(ctx context.Context) func(opts *Options) {\n\treturn func(opts *Options) {\n\t\topts.ctx = ctx\n\t}\n}", "title": "" }, { "docid": "af0e00786df2ba3308f7ba94c9629a46", "score": "0.47101477", "text": "func (a *Application) PutContext(r *http.Request) *http.Request {\n\tctx := r.Context()\n\tctx = PutContext(ctx)\n\tr = r.WithContext(ctx)\n\t// be sure to call the superclass version so the goradd framework can operate\n\treturn a.Application.PutContext(r)\n}", "title": "" }, { "docid": "a6b03d51b3bf0ee038783feb170e17c6", "score": "0.47091186", "text": "func NewUploadTemplateParamsWithHTTPClient(client *http.Client) *UploadTemplateParams {\n\treturn &UploadTemplateParams{\n\t\tHTTPClient: client,\n\t}\n}", "title": "" }, { "docid": "05f61818f82c57be92170f37211fa2c0", "score": "0.47045374", "text": "func (o *BulkImportDataParams) WithContext(ctx context.Context) *BulkImportDataParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "5f5d3b3d4537e9e9088f7fd105e9352f", "score": "0.47032622", "text": "func (_m *MockDynamoDBAPI) UntagResourceWithContext(_param0 aws.Context, _param1 *dynamodb.UntagResourceInput, _param2 ...request.Option) (*dynamodb.UntagResourceOutput, error) {\n\t_s := []interface{}{_param0, _param1}\n\tfor _, _x := range _param2 {\n\t\t_s = append(_s, _x)\n\t}\n\tret := _m.ctrl.Call(_m, \"UntagResourceWithContext\", _s...)\n\tret0, _ := ret[0].(*dynamodb.UntagResourceOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "72d83c0a3018ff83a9d103e1f8b7c157", "score": "0.46982718", "text": "func (c *Client) PostWithContext(\n\tctx context.Context,\n\turl string,\n\tqueryParams interface{},\n\treqBody interface{},\n\trespBody interface{},\n) (*http.Response, error) {\n\treq, err := c.NewRequest(http.MethodPost, url, queryParams, reqBody, respBody)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq.WithContext(ctx)\n\n\treturn c.Do(req)\n}", "title": "" }, { "docid": "57677dc053cda45dc17a8e3d7bb27631", "score": "0.46862644", "text": "func (_m *MockDynamoDBAPI) TagResourceWithContext(_param0 aws.Context, _param1 *dynamodb.TagResourceInput, _param2 ...request.Option) (*dynamodb.TagResourceOutput, error) {\n\t_s := []interface{}{_param0, _param1}\n\tfor _, _x := range _param2 {\n\t\t_s = append(_s, _x)\n\t}\n\tret := _m.ctrl.Call(_m, \"TagResourceWithContext\", _s...)\n\tret0, _ := ret[0].(*dynamodb.TagResourceOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "abe24d260771dcdbe9367ea54e71119c", "score": "0.46854535", "text": "func (c Client) UpdateWithContext(context context.Context, input *UpdateServiceInput) (*UpdateServiceResponse, error) {\n\top := client.Operation{\n\t\tMethod: http.MethodPost,\n\t\tURI: \"/Services/{sid}\",\n\t\tContentType: client.URLEncoded,\n\t\tPathParams: map[string]string{\n\t\t\t\"sid\": c.sid,\n\t\t},\n\t}\n\n\tif input == nil {\n\t\tinput = &UpdateServiceInput{}\n\t}\n\n\tresponse := &UpdateServiceResponse{}\n\tif err := c.client.Send(context, op, input, response); err != nil {\n\t\treturn nil, err\n\t}\n\treturn response, nil\n}", "title": "" }, { "docid": "abe24d260771dcdbe9367ea54e71119c", "score": "0.46854535", "text": "func (c Client) UpdateWithContext(context context.Context, input *UpdateServiceInput) (*UpdateServiceResponse, error) {\n\top := client.Operation{\n\t\tMethod: http.MethodPost,\n\t\tURI: \"/Services/{sid}\",\n\t\tContentType: client.URLEncoded,\n\t\tPathParams: map[string]string{\n\t\t\t\"sid\": c.sid,\n\t\t},\n\t}\n\n\tif input == nil {\n\t\tinput = &UpdateServiceInput{}\n\t}\n\n\tresponse := &UpdateServiceResponse{}\n\tif err := c.client.Send(context, op, input, response); err != nil {\n\t\treturn nil, err\n\t}\n\treturn response, nil\n}", "title": "" }, { "docid": "abe24d260771dcdbe9367ea54e71119c", "score": "0.46854535", "text": "func (c Client) UpdateWithContext(context context.Context, input *UpdateServiceInput) (*UpdateServiceResponse, error) {\n\top := client.Operation{\n\t\tMethod: http.MethodPost,\n\t\tURI: \"/Services/{sid}\",\n\t\tContentType: client.URLEncoded,\n\t\tPathParams: map[string]string{\n\t\t\t\"sid\": c.sid,\n\t\t},\n\t}\n\n\tif input == nil {\n\t\tinput = &UpdateServiceInput{}\n\t}\n\n\tresponse := &UpdateServiceResponse{}\n\tif err := c.client.Send(context, op, input, response); err != nil {\n\t\treturn nil, err\n\t}\n\treturn response, nil\n}", "title": "" }, { "docid": "6fc0ff1162059150b6a50d2b9b80b6fe", "score": "0.4685186", "text": "func (o *SendTestTemplateParams) WithContext(ctx context.Context) *SendTestTemplateParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "713d228d14ab3a61d87b5c97de259470", "score": "0.46842086", "text": "func (m *Upload) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateBytes(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateContentType(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateCreatedAt(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateFilename(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateID(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateStatus(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateUpdatedAt(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateURL(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f3ac9105ca5bfde3499b0149b2bc6593", "score": "0.46812704", "text": "func wrapContext(ctx context.Context, f *FS) context.Context {\n\tctx = context.WithValue(ctx, libfs.CtxAppIDKey, f)\n\tlogTags := make(logger.CtxLogTags)\n\tlogTags[CtxIDKey] = CtxOpID\n\tctx = logger.NewContextWithLogTags(ctx, logTags)\n\treturn ctx\n}", "title": "" }, { "docid": "1017eb318ff114de7a679f03e1a2e45e", "score": "0.46799773", "text": "func (mr *MockEC2MockRecorder) ModifyTransitGatewayWithContext(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{arg0, arg1}, arg2...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"ModifyTransitGatewayWithContext\", reflect.TypeOf((*MockEC2)(nil).ModifyTransitGatewayWithContext), varargs...)\n}", "title": "" }, { "docid": "d3e8b3792e645dff9947c62b44a38de8", "score": "0.46759793", "text": "func templateContext(realpath string) *hb.DataFrame {\n\tframe := hb.NewDataFrame()\n\tframe.Set(FIELD_REALPATH, realpath)\n\n\t_, filename := filepath.Split(realpath)\n\tframe.Set(FIELD_TEMPLATE_NAME, filename)\n\n\treturn frame\n}", "title": "" }, { "docid": "a081e1c04cc26418cfd277db5c91c20a", "score": "0.4661032", "text": "func WithContext(ctx context.Context) interface {\n\tGetOption\n\tPutOption\n\tDelOption\n\tWatchOption\n} {\n\treturn &contextOption{Context: ctx}\n}", "title": "" }, { "docid": "c45ca1d728d789294dabbd2622b7604b", "score": "0.46581927", "text": "func (o *PostAPIV3MachinesRfqdnAttachmentsParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "cf52a46c065a05a482bb3293e4c1c498", "score": "0.46568882", "text": "func (mr *MockLoggerMockRecorder) WithContext(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WithContext\", reflect.TypeOf((*MockLogger)(nil).WithContext), arg0)\n}", "title": "" }, { "docid": "5e92e7ff1ab2d64ea5b2cfa45e665b73", "score": "0.46551424", "text": "func (o *PostMeInstallationTemplateParams) WithContext(ctx context.Context) *PostMeInstallationTemplateParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "f7070707962964134d045bc2a18796b8", "score": "0.46547934", "text": "func (mr *MockEC2MockRecorder) ImportKeyPairWithContext(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{arg0, arg1}, arg2...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"ImportKeyPairWithContext\", reflect.TypeOf((*MockEC2)(nil).ImportKeyPairWithContext), varargs...)\n}", "title": "" }, { "docid": "162c3c51a463cd666fe923b28c60e95d", "score": "0.46536607", "text": "func WithContext(ctx context.Context, store *Store) context.Context {\n\treturn context.WithValue(ctx, storeKey{}, store)\n}", "title": "" }, { "docid": "bb2a9d39bf164353385f735975e691ff", "score": "0.46512946", "text": "func TagContext(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\ttag := chi.URLParam(request, \"tag\")\n\t\tif tag == \"\" {\n\t\t\trender.Render(writer, request, api.ErrorInvalidRequest(fmt.Errorf(\"tag must be set\"))) //nolint\n\t\t\treturn\n\t\t}\n\t\tctx := context.WithValue(request.Context(), tagKey, tag)\n\t\tnext.ServeHTTP(writer, request.WithContext(ctx))\n\t})\n}", "title": "" }, { "docid": "1d912d0122cf1b9a19558f816d9a6084", "score": "0.4649974", "text": "func (o *CreateContentLibraryImageParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "3aeed031dde733ce0f84ba79dde7b03c", "score": "0.4647255", "text": "func (m *MockEC2) ImportImageWithContext(arg0 context.Context, arg1 *ec2.ImportImageInput, arg2 ...request.Option) (*ec2.ImportImageOutput, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"ImportImageWithContext\", varargs...)\n\tret0, _ := ret[0].(*ec2.ImportImageOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "de0aeb3c517360e63d94f68f8ab12f6f", "score": "0.46462125", "text": "func (o *SendTestTemplateParams) SetContext(ctx context.Context) {\n\to.Context = ctx\n}", "title": "" }, { "docid": "a653fa41978c5a1c7d0852316491210b", "score": "0.46414343", "text": "func (mr *MockEC2MockRecorder) ModifyIdFormatWithContext(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{arg0, arg1}, arg2...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"ModifyIdFormatWithContext\", reflect.TypeOf((*MockEC2)(nil).ModifyIdFormatWithContext), varargs...)\n}", "title": "" }, { "docid": "d91b135b93830ab6f6f6e0bead4a0130", "score": "0.4629328", "text": "func withContext(logger seelog.LoggerInterface, context ...string) (contextLogger T) {\n\tLoggerInstance.BaseLoggerInstance = logger\n\tformatFilter := &ContextFormatFilter{Context: context}\n\tcontextLogger = &Wrapper{Format: formatFilter, M: PkgMutex, Delegate: LoggerInstance}\n\n\tlogger.SetAdditionalStackDepth(1)\n\treturn contextLogger\n}", "title": "" }, { "docid": "7455e896f1f28d238949bb51e51f55d4", "score": "0.46266973", "text": "func (m *MockEC2) CopyFpgaImageWithContext(arg0 context.Context, arg1 *ec2.CopyFpgaImageInput, arg2 ...request.Option) (*ec2.CopyFpgaImageOutput, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"CopyFpgaImageWithContext\", varargs...)\n\tret0, _ := ret[0].(*ec2.CopyFpgaImageOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "ebc0f4f319135a33e05554fed5703552", "score": "0.46236965", "text": "func (o *ImportNodeUsingPOSTParams) WithContext(ctx context.Context) *ImportNodeUsingPOSTParams {\n\to.SetContext(ctx)\n\treturn o\n}", "title": "" }, { "docid": "2962c12a9561579039137e4170f7365d", "score": "0.46234804", "text": "func FormatWithContextTags(ctx context.Context, format string, args ...interface{}) string {\n\tvar buf strings.Builder\n\tformatTags(ctx, true /* brackets */, &buf)\n\tformatArgs(&buf, format, args...)\n\treturn buf.String()\n}", "title": "" }, { "docid": "96a1b707f1d38b3da314164d531d56fc", "score": "0.46231616", "text": "func (f *FS) WithContext(ctx context.Context) (context.Context, context.CancelFunc) {\n\tid, err := libkbfs.MakeRandomRequestID()\n\tif err != nil {\n\t\tf.log.CErrorf(ctx, \"Couldn't make request ID: %v\", err)\n\t\treturn ctx, func() {}\n\t}\n\n\tctx, cancel := context.WithCancel(ctx)\n\n\t// context.WithDeadline uses clock from `time` package, so we are not using\n\t// f.config.Clock() here\n\tstart := time.Now()\n\tctx, err = libcontext.NewContextWithCancellationDelayer(\n\t\tlibcontext.NewContextReplayable(ctx, func(ctx context.Context) context.Context {\n\t\t\tctx = wrapContext(context.WithValue(ctx, CtxIDKey, id), f)\n\t\t\tctx, _ = context.WithDeadline(ctx, start.Add(29*time.Second))\n\t\t\treturn ctx\n\t\t}))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn ctx, cancel\n}", "title": "" }, { "docid": "2a7aa8b005a83444f5cb1412d6e7912b", "score": "0.46229574", "text": "func WithContext(f func(*service.Context, <-chan *log15.Record)) func() {\n\treturn func() {\n\t\tlogChan := make(chan *log15.Record, logChanBufferSize)\n\t\tlog := log15.New()\n\t\tlog.SetHandler(log15.ChannelHandler(logChan))\n\n\t\tvar cancel context.CancelFunc\n\t\tbaseCtx := context.Background()\n\t\tbaseCtx, cancel = context.WithCancel(baseCtx)\n\n\t\tctx, err := service.NewContext(context.Background(), config.DefaultConfig(), log)\n\t\tSo(err, ShouldBeNil)\n\n\t\tctx.APIKeychain().AddBinKey([]byte(\"test\"))\n\t\tSo(ctx.APIKeychain().KeyCount(), ShouldEqual, 1)\n\n\t\tf(ctx, logChan)\n\n\t\tReset(func() {\n\t\t\tcancel()\n\t\t})\n\t}\n}", "title": "" }, { "docid": "73c038d2eb39d7603459843893535a07", "score": "0.4621086", "text": "func (req *Req) AddContext(key, val interface{}) {\n\treq.context = context.WithValue(req.context, key, val)\n}", "title": "" }, { "docid": "052164f23e6377278e4a471190be6db9", "score": "0.46185645", "text": "func (r *Request) WithContext(ctx context.Context) {\n\tr.request.WithContext(ctx)\n}", "title": "" }, { "docid": "d1a0b8149d3e1fe3205b179b5b8568a7", "score": "0.46129817", "text": "func (c *FilesCreateCall) Context(ctx context.Context) *FilesCreateCall {\n\tc.ctx_ = ctx\n\treturn c\n}", "title": "" }, { "docid": "e3a551d3470ad8a02fb13a0f0b5b2161", "score": "0.46109307", "text": "func (c *Client) PutWithContext(\n\tctx context.Context,\n\turl string,\n\tqueryParams interface{},\n\treqBody interface{},\n\trespBody interface{},\n) (*http.Response, error) {\n\treq, err := c.NewRequest(http.MethodPut, url, queryParams, reqBody, respBody)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq.WithContext(ctx)\n\n\treturn c.Do(req)\n}", "title": "" }, { "docid": "34cef2e4bda6bbe7031eb04753bfb819", "score": "0.46045962", "text": "func buildUploadRequest(action, url string, templateJSON *bytes.Buffer) (rest.Request, error) {\n\tuploadBody := &bytes.Buffer{}\n\twriter := multipart.NewWriter(uploadBody)\n\n\tpart, err := writer.CreateFormFile(\"tpl\", \"unusedFileNameValue\")\n\tif err != nil {\n\t\treturn rest.Request{}, fmt.Errorf(\"CreateFormFile failed: %s\", err)\n\t}\n\n\tn, err := io.Copy(part, templateJSON)\n\tif err != nil {\n\t\treturn rest.Request{}, fmt.Errorf(\"Json copy failed: %s\", err)\n\t} else if n == 0 {\n\t\treturn rest.Request{}, fmt.Errorf(\"Json copy failed: 0 bytes copied\")\n\t}\n\n\twriter.Close()\n\thttpRequest, err := http.NewRequest(action, url, uploadBody)\n\tif err != nil {\n\t\treturn rest.Request{}, fmt.Errorf(\"Could not build new request: %s\", err)\n\t}\n\n\thttpRequest.Header.Add(\"Content-Type\", writer.FormDataContentType())\n\treturn rest.Request{\n\t\thttpRequest,\n\t\tmap[string]string{},\n\t}, nil\n}", "title": "" }, { "docid": "b41f9295eff402793e9f0a9534d2e3d1", "score": "0.4600635", "text": "func (s *Cloudinary) WriteCtx(ctx context.Context, src io.Reader, destPath string) error {\n\tendpointURL := fmt.Sprintf(\"%s/upload\", s.apiBaseURL(\"auto\"))\n\n\t// REQUEST\n\tbody := bytes.NewBufferString(\"\")\n\t// attach the file\n\twriter := multipart.NewWriter(body)\n\tdefer writer.Close()\n\tpart, err := writer.CreateFormFile(\"file\", path.Base(destPath))\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = io.Copy(part, src)\n\t// Attach the fields\n\tsignature, timestamp := s.signature(destPath, false)\n\tfields := map[string]string{\n\t\t\"api_key\": s.apiKey,\n\t\t\"public_id\": destPath,\n\t\t\"timestamp\": timestamp,\n\t\t\"signature\": signature,\n\t}\n\tfor k, v := range fields {\n\t\tif err := writer.WriteField(k, v); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// Close the writer\n\tif err := writer.Close(); err != nil {\n\t\treturn err\n\t}\n\t// Make the request\n\treq, err := http.NewRequest(\"POST\", endpointURL, body)\n\tif err != nil {\n\t\treturn err\n\t}\n\treq.Header.Set(\"Content-Type\", writer.FormDataContentType())\n\n\t// Parse the response\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\tif resp.StatusCode != 200 {\n\t\t// Parse the error\n\t\tvar pld *cdnrErrorResponse\n\t\tif err := json.NewDecoder(resp.Body).Decode(&pld); err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn errors.New(pld.Error.Message)\n\t}\n\t// If the upload succeed we parse the response to cache the URL\n\tvar pld *cdnrUploadSuccessResponse\n\tif err := json.NewDecoder(resp.Body).Decode(&pld); err != nil {\n\t\treturn err\n\t}\n\ts.cache[destPath] = pld.SecureURL\n\treturn nil\n}", "title": "" } ]
f7d37c2cd39a0bd18eafcc2fb2c8631b
/ Undocumented Args: [], Returns: void
[ { "docid": "aa2f1f569fcaf740edef3e371dc9f8dc", "score": "0.0", "text": "func (o *FileDialog) X_MakeDir() {\n\t//log.Println(\"Calling FileDialog.X_MakeDir()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"FileDialog\", \"_make_dir\")\n\n\t// Call the parent method.\n\t// void\n\tretPtr := gdnative.NewEmptyVoid()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n}", "title": "" } ]
[ { "docid": "09736ca4aeb063fc475ec63557215d27", "score": "0.6141894", "text": "func Noop() {}", "title": "" }, { "docid": "9a01b23cc156f963ea392589c9f28d36", "score": "0.57752395", "text": "func Example() {}", "title": "" }, { "docid": "9a01b23cc156f963ea392589c9f28d36", "score": "0.57752395", "text": "func Example() {}", "title": "" }, { "docid": "4152c4014fa94e5461cb51ae56e3f48d", "score": "0.5718045", "text": "func main() {\n\t// TODO:\n}", "title": "" }, { "docid": "74b9190c3ca01a8a4c099dff3a094096", "score": "0.5634835", "text": "func Run() {\n\t// TODO: restructure\n}", "title": "" }, { "docid": "35aeced929b52ed664511d2ad70ceb92", "score": "0.5573457", "text": "func Dummy() {\n\tfmt.Println(\"dummy call, finished!\")\n}", "title": "" }, { "docid": "aa8517ec1c6473a3dc59a9a98d9701f7", "score": "0.55508447", "text": "func main() {\n\t//todo:\n}", "title": "" }, { "docid": "2ebbbe2c925c08ef2dfa111e4c083768", "score": "0.55291873", "text": "func setup() {\n\n}", "title": "" }, { "docid": "491be82390fc675181785326dd7550a5", "score": "0.552051", "text": "func DoSomething() {\n\tfmt.Println(\"Ok, ok, I'm doing it!\")\n}", "title": "" }, { "docid": "3a069eb6b9a7a518dcb65a93ab784418", "score": "0.550969", "text": "func NotUsed() {\n\tdisplay.ConsoleLogString(\"I am of no use\")\n}", "title": "" }, { "docid": "891394de93324e0f403a2aba77dd8335", "score": "0.5506299", "text": "func doSomething(){\n fmt.Println(\"Doing Something!\")\n}", "title": "" }, { "docid": "41b3a23f9da9b932738bca69c7d5fbf5", "score": "0.55054766", "text": "func ClearUnused() {}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.5463183", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.5463183", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.5463183", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.5463183", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.5463183", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.5463183", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.5463183", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "7ffde0564ffdbbd6410d7be59ca2b64a", "score": "0.53943366", "text": "func Reset() { ClearLoaded() }", "title": "" }, { "docid": "d977ffb97560ca81dc2cef5f0824e6ad", "score": "0.53851795", "text": "func xx() {\n\tpanic(\"Operation not supported\")\n}", "title": "" }, { "docid": "128aca90003524ad07b3c6c57bd50572", "score": "0.5366467", "text": "func initialize() {\n\n}", "title": "" }, { "docid": "0d6d009c2190ab71e1170b0e1be6485d", "score": "0.53427655", "text": "func doNothing(details *gophercloud.Server) error {\n\treturn nil\n}", "title": "" }, { "docid": "ae40f5cf6ad2eee6d7f2c21d1f8ab24a", "score": "0.5297625", "text": "func ShowUsage() {\n}", "title": "" }, { "docid": "54474bb6cd8451a5e7abd2f022df0c1d", "score": "0.5279271", "text": "func Fraud() {\n\n}", "title": "" }, { "docid": "36b4904c4d33bbd563546a781845f2ef", "score": "0.52585787", "text": "func setUpGraphics() {\n\n}", "title": "" }, { "docid": "1dca339e1b353d8bda352f43ac4813df", "score": "0.52252674", "text": "func init() {}", "title": "" }, { "docid": "1dca339e1b353d8bda352f43ac4813df", "score": "0.52252674", "text": "func init() {}", "title": "" }, { "docid": "1dca339e1b353d8bda352f43ac4813df", "score": "0.52252674", "text": "func init() {}", "title": "" }, { "docid": "ff2ef683a80f0a68df11c814c67e18bf", "score": "0.52184564", "text": "func main_main()", "title": "" }, { "docid": "f4c96c93a8b06c12b3b9a3c057291ea3", "score": "0.52151823", "text": "func ExampleWrite() {}", "title": "" }, { "docid": "eed2a3ad5c8dd95e5dad71694760de75", "score": "0.52136266", "text": "func CreateDocTest() {\n\n}", "title": "" }, { "docid": "d171fc3efd1993cddce24268b2889888", "score": "0.51994574", "text": "func SetUsage(f func()) { std.SetUsage(f) }", "title": "" }, { "docid": "17d7b5b54a5d03ef7c9f627883a732a9", "score": "0.5197363", "text": "func init() {\r\n}", "title": "" }, { "docid": "b5aa236d7d65e51a374daad5e0fd036c", "score": "0.5196158", "text": "func DoOtherStuff() {\n\n}", "title": "" }, { "docid": "6735847df02f27e29ca4faeade741027", "score": "0.5187718", "text": "func init() {\r\n\r\n}", "title": "" }, { "docid": "a4124c7348ef655c5fc3a3e14f1afe05", "score": "0.5166663", "text": "func main() { /* do nothing */ }", "title": "" }, { "docid": "70117fe78046fdb2d621bc5f3e2a5adf", "score": "0.5160826", "text": "func PrintHi() {\n fmt.Println(\"Hi\")\n}", "title": "" }, { "docid": "2da659646b6edc0593666ba1de7d5832", "score": "0.51505125", "text": "func main() {\n\t\n}", "title": "" }, { "docid": "5ed8f0b013db388a8ab90411f7ac647f", "score": "0.5149406", "text": "func Foo() {\n\n}", "title": "" }, { "docid": "926949b5db468fcc2395f6201b2346a4", "score": "0.51468974", "text": "func Load() {\n\t//do nothing...\n}", "title": "" }, { "docid": "1631a67a71f9d5fc35ea6502fe7c4f04", "score": "0.5141839", "text": "func Noop(...interface{}) {}", "title": "" }, { "docid": "a34322c4f12957e458b39e33c58cd55a", "score": "0.51300853", "text": "func Hello(){\n fmt.Println(\"Hello World\")\n}", "title": "" }, { "docid": "ce4bc4b4ea33fc9adf01cf482456fb41", "score": "0.51296103", "text": "func init() {\n\twidestNameSeen = 0\n\twidestTagsSeen = 0\n}", "title": "" }, { "docid": "22a70155a204dad6eae8529477c3dcaa", "score": "0.5128259", "text": "func ExampleAbsorb() {}", "title": "" }, { "docid": "43f26576928567e70ca57fbd75ea7706", "score": "0.5109628", "text": "func Whatever() {\n\n}", "title": "" }, { "docid": "39534d4845f3952814cea100fdc1f188", "score": "0.510472", "text": "func start() {\n\t// TODO: restructure\n}", "title": "" }, { "docid": "989ca11f9aa825e611b65ccc9c8e9878", "score": "0.50989926", "text": "func main(){\n\t// Ini komentar single line\n}", "title": "" }, { "docid": "9795f832e21e38d49868e8253fe21435", "score": "0.50793797", "text": "func init() {\n\n}", "title": "" }, { "docid": "9795f832e21e38d49868e8253fe21435", "score": "0.50793797", "text": "func init() {\n\n}", "title": "" }, { "docid": "9795f832e21e38d49868e8253fe21435", "score": "0.50793797", "text": "func init() {\n\n}", "title": "" }, { "docid": "9795f832e21e38d49868e8253fe21435", "score": "0.50793797", "text": "func init() {\n\n}", "title": "" }, { "docid": "9795f832e21e38d49868e8253fe21435", "score": "0.50793797", "text": "func init() {\n\n}", "title": "" }, { "docid": "9795f832e21e38d49868e8253fe21435", "score": "0.50793797", "text": "func init() {\n\n}", "title": "" }, { "docid": "9795f832e21e38d49868e8253fe21435", "score": "0.50793797", "text": "func init() {\n\n}", "title": "" }, { "docid": "9795f832e21e38d49868e8253fe21435", "score": "0.50793797", "text": "func init() {\n\n}", "title": "" }, { "docid": "9b801427a41611b6eda09b64d681c566", "score": "0.5077579", "text": "func Please(i interface{}) {\n\t// Nothing to do, just a marker\n}", "title": "" }, { "docid": "22262302ea296ebbdd002525a773db59", "score": "0.50581396", "text": "func main() {\n\t\n}", "title": "" }, { "docid": "22262302ea296ebbdd002525a773db59", "score": "0.50581396", "text": "func main() {\n\t\n}", "title": "" }, { "docid": "8f5de0e287745ab2d521e7f984518bac", "score": "0.5053033", "text": "func noopTeardown() {}", "title": "" }, { "docid": "d28035a7c55eab7d883dc661ba75a9d1", "score": "0.50494635", "text": "func main() {\n\t// stackIt()\n\t// _ = stackIt2()\n\tslogTest()\n}", "title": "" }, { "docid": "7372c3930b6edf689166ffdbaad9e3db", "score": "0.50485814", "text": "func init() {\n\tSetOutput(DefaultOutput)\n}", "title": "" }, { "docid": "c97bc8c495770ef69756b0254748cd73", "score": "0.5042331", "text": "func (p Point) NoArgs() {\n\t// Exercise no-argument/no-result paths.\n}", "title": "" }, { "docid": "a48b47bf03909333672750882ed31513", "score": "0.50344676", "text": "func FlushCoverage() {\n\n}", "title": "" }, { "docid": "e8f778369af46de9304d15b183996169", "score": "0.5033806", "text": "func Foo() {}", "title": "" }, { "docid": "a43945e2b05ee44d5b8748958a793bee", "score": "0.50323933", "text": "func main() {\n \n}", "title": "" }, { "docid": "bd8e0ef455ac2b555f0421ff20d0d64a", "score": "0.50252676", "text": "func Usage() {\n\tfmt.Println(usage())\n}", "title": "" }, { "docid": "82085285e51d0af2fc68ab33793d875b", "score": "0.50235724", "text": "func doSomething1() {\r\n\tfmt.Println(\"Hello Gopher1!!\")\r\n}", "title": "" }, { "docid": "2c59f7e7cebb43e6f65899dee0c325ee", "score": "0.50199485", "text": "func init() {\n Init()\n}", "title": "" }, { "docid": "e28512a64506b94e21ffb03d77f66a5d", "score": "0.5000791", "text": "func Main() {\n\tmain()\n}", "title": "" }, { "docid": "53a59fdbc7eb7fd0c26c3a482bc52680", "score": "0.49962845", "text": "func main() {\n\tfmt.Println(doStuff()) // TODO: add more things\n}", "title": "" }, { "docid": "f98cf68ee01cc5d8596a24c84595f129", "score": "0.4994509", "text": "func main() {\n\twriterInterfaceExample()\n\tincrementorExample()\n\tinterfaceConversion()\n\temptyInterface()\n\ttypeSwitch()\n}", "title": "" }, { "docid": "6279250bf4115a05a9be0b46c9615cbb", "score": "0.4991075", "text": "func usage() {\n\tlog.Printf(\"%s\\n\", usageStr)\n\tos.Exit(0)\n}", "title": "" }, { "docid": "28a4ebb00c379594fcdced3dcfcd2a34", "score": "0.49847955", "text": "func main() {\n\texample1()\n\tprintln(\"====================\")\n\t//example2()\n}", "title": "" }, { "docid": "5278494995aaa3de117bc01b87500a37", "score": "0.49832186", "text": "func GolintRepetitive() {}", "title": "" }, { "docid": "119f32076f140fcd154e3dde986b307a", "score": "0.49741888", "text": "func tapped() {\n\t// Nothing to do\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "fe37a56468760f9b7e2d909fa708e300", "score": "0.49678192", "text": "func main() {\n\n}", "title": "" }, { "docid": "72d0b9724112fda2445326da1a666b2f", "score": "0.4967183", "text": "func init() {\n\tResetHistory()\n}", "title": "" }, { "docid": "2c0cf8a87451702db328d18448743fed", "score": "0.4963828", "text": "func main() {\r\n}", "title": "" }, { "docid": "7b33c47d75b3d97c163ba46ec829c77c", "score": "0.49585012", "text": "func Baz() {}", "title": "" }, { "docid": "347b0018518d08b36fd156c83336dbf9", "score": "0.49523374", "text": "func (provider *Ristretto) Reset() {}", "title": "" }, { "docid": "14eef4f871d6e0f666c6e2fc1bef98a6", "score": "0.49517018", "text": "func Hello(name string) {\n\tfmt.Println(\"Hello \" + name)\n}", "title": "" }, { "docid": "cd295f99fdcc3a4b44ada1f12e83cc8f", "score": "0.4944399", "text": "func help() {\n}", "title": "" }, { "docid": "7bbb7b306307ec052fab61492dfd973a", "score": "0.49422768", "text": "func InitPlatform() {\n\n}", "title": "" }, { "docid": "9f27a12d27bf3d90445a16aee62298c9", "score": "0.49362755", "text": "func Hello() {\n\tfmt.Println(\"hello\")\n}", "title": "" }, { "docid": "dd78c9cdc2f6a5c6c57f64a5fd1c7a9d", "score": "0.49353242", "text": "func godocUnexported() {\r\n\tlog.Fatalln(\"This is a godoc UNEXPORTED experiment\")\r\n}", "title": "" }, { "docid": "67e3a79b4e7127338ecbfbe7ecbe5e55", "score": "0.49351326", "text": "func main () {\n}", "title": "" }, { "docid": "b3d93e6d7deb8791e194d9dccd038fad", "score": "0.49334842", "text": "func (self *UtilsDebug) Reset() {\n self.Object.Call(\"reset\")\n}", "title": "" } ]
4a2f7936dad0824fad683ea13138e609
HasCreatedDate returns a boolean if a field has been set.
[ { "docid": "d6c9b6e9f433fc1827b0cf1b56855c07", "score": "0.74844885", "text": "func (o *InlineResponse20036Notebook) HasCreatedDate() bool {\n\tif o != nil && o.CreatedDate != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" } ]
[ { "docid": "52cb3e02d117a96bab4529789ffd46ca", "score": "0.8093728", "text": "func (o *ViewTimelog) HasDateCreated() bool {\n\tif o != nil && o.DateCreated != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "0c74bff47abc3764560f860fc2d0ce22", "score": "0.7871765", "text": "func (o *GeoipConfig) HasDateCreated() bool {\n\tif o != nil && o.DateCreated != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "d776d065070105830d85e0d6d0b7c378", "score": "0.7840262", "text": "func (o *KubernetesNodeMetadata) HasCreatedDate() bool {\n\tif o != nil && o.CreatedDate != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "015ec3eabc0ca3460623746f18f904c2", "score": "0.76707834", "text": "func (o *InlineResponse20030Timers) HasDateCreated() bool {\n\tif o != nil && o.DateCreated != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "fb08eaa2c3fe44efcdc30d99a538f6e0", "score": "0.7448807", "text": "func (o *Drive) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "497684afa128a1a87b1cb80f43bd2934", "score": "0.74352366", "text": "func (o *PatchedInvestorFee) HasCreated() bool {\n\tif o != nil && o.Created != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "9e07021100a83b75fbe29313e0b0d9a7", "score": "0.73973715", "text": "func (o *DriveItem) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "8f59abc7428c8592927d3394100f2ff7", "score": "0.7375586", "text": "func (o *MicrosoftGraphSignIn) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ddeb39654db0d443759056cb4e027a84", "score": "0.7303654", "text": "func (o *PlannerTask) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "405f9a1ea8f210538059580d6bada8a3", "score": "0.7295896", "text": "func (o *AuthToken) HasCreatedOn() bool {\n\tif o != nil && o.CreatedOn != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "001543bf29901b2855d90bdc5ab8a936", "score": "0.7264346", "text": "func (o *PatchedAssessment) HasCreated() bool {\n\tif o != nil && o.Created != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "8b1851fdaaeda5b6e67797a48b989f0c", "score": "0.7263977", "text": "func (o *MicrosoftGraphRemoteItem) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "0ec628338adf4e8d19a2a88dfaf5c070", "score": "0.7250818", "text": "func (o *DeviceEnrollmentConfiguration) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "73ba17c3eb3dec1778609b50d289c3c8", "score": "0.72433376", "text": "func (o *MicrosoftGraphIosCompliancePolicy) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "db3263b51160fcca351c76ff9e9e6094", "score": "0.72016746", "text": "func (o *TokenMetadata) HasCreated() bool {\n\tif o != nil && o.Created != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "d6f03d4f95369765661db8c03c1c07c8", "score": "0.7179416", "text": "func (o *UMGroupPreferenceAllOf) HasCreated() bool {\n\tif o != nil && o.Created != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "44cca6f2189415f908766076bd9f1310", "score": "0.7082096", "text": "func (o *User) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "25b5d670cbbcb795a84aaf5eca1e800c", "score": "0.7081422", "text": "func (o *DeploymentRelease) HasCreatedOn() bool {\n\tif o != nil && o.CreatedOn != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "9f6623a83f7507933834d71c5975f05f", "score": "0.70611036", "text": "func (o *View) HasCreatedAt() bool {\n\tif o != nil && !IsNil(o.CreatedAt) {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "6ca110fdb5c4ed5ea9738ad2a1916ec1", "score": "0.7040972", "text": "func (o *PatchedApplicationClientUpdate) HasCreated() bool {\n\tif o != nil && o.Created != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "1d9b8e0b03e8331068c8030c08d177e8", "score": "0.70253646", "text": "func (o *MicrosoftGraphWindows81CompliancePolicy) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "d091ac307dec84101fb4e4706fe4e5c1", "score": "0.69988656", "text": "func (o *Event) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "e88e5259a71d6fee65d43b87fcc1ac64", "score": "0.69801056", "text": "func (o *ViewTimelog) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "d733f2bc620a8b867ff1ce331699e8db", "score": "0.6945165", "text": "func (o *Integration) HasCreatedAt() bool {\n\tif o != nil && !IsNil(o.CreatedAt) {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "5f449559fb9ee923196792b0ba85774a", "score": "0.6920752", "text": "func (o *ModelsVersion) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "70ad53bf9ce9cedcb5b7e20792455e5b", "score": "0.6909693", "text": "func (o *Workspace) HasCreatedOn() bool {\n\tif o != nil && o.CreatedOn != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "3f5e8a9fce16d6d00f98e75d8352b211", "score": "0.6902275", "text": "func (o *TrialPlThreeYearsResponseTrialPlThreeYears) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "14e7c7f813f5a60f0b323566b3948862", "score": "0.6892981", "text": "func (o *Command) HasCreated() bool {\n\tif o != nil && o.Created != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "d0b260d5b61f6eab03e59e3e2ebc1d73", "score": "0.68745005", "text": "func (o *ApplianceMetaManifest) HasCreationDate() bool {\n\tif o != nil && o.CreationDate != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "6fee89b954c01e9d1f110f087ba39a09", "score": "0.6864816", "text": "func (o *Volume) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "27ebac0a96fc63094fd653bc51434e37", "score": "0.6854992", "text": "func (o *KeyGet) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "7087bceba954d231890ebe67e4b188d7", "score": "0.68534565", "text": "func (o *Identity) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "98479bf5748c4d7404d6d3b08fb1fcb4", "score": "0.6852768", "text": "func (o *MicrosoftGraphDeviceEnrollmentWindowsHelloForBusinessConfiguration) HasCreatedDateTime() bool {\n\tif o != nil && o.CreatedDateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "66823bf3a800ed0c486775e77dc06ecd", "score": "0.6842054", "text": "func (o *Organization) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "8fe96e58b7d33cb3316f25180a6c9547", "score": "0.68040186", "text": "func (o *AdminMessagesListDataInner) HasCreated() bool {\n\tif o != nil && o.Created != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "6b9d1c55ff773febb776d51afc363342", "score": "0.6803328", "text": "func (o *NewUserAccount) GetCreated() bool {\n\tif o == nil {\n\t\tvar ret bool\n\t\treturn ret\n\t}\n\n\treturn o.Created\n}", "title": "" }, { "docid": "2879d083875d7c170597890ca10624f6", "score": "0.67966896", "text": "func (o *Ga4ghSequencing) HasCreated() bool {\n\tif o != nil && o.Created != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "85830bd98901963a1b780e9bb88cc995", "score": "0.67947906", "text": "func (o *WebhookMessage) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "b93be1e5671efb8f5ee557c674c29da4", "score": "0.6794736", "text": "func (o *UserResponse) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "f3ff0a67e7102d26aeae02e57a2340e2", "score": "0.67928654", "text": "func (o *StackStack) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "08b00fe0a5c17f0ddb3bc815738fe9d4", "score": "0.6782867", "text": "func (o *ImageAnalysisRequest) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "bef59afdc3b5a2cc26128f09c69313b6", "score": "0.6778629", "text": "func (o *FeedMetadata) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ba231092e06af4764c5d9c3c445026df", "score": "0.67295986", "text": "func (o *CdnScopeRule) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "5ab83679b0adf3b1dcc70c83596f4d55", "score": "0.6705338", "text": "func (o *ResourceQuotaAllOf) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "c26a590460f20f6b39e33e7679e5c134", "score": "0.66842234", "text": "func (o *Comment) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "813ebe5d4f8e1e687a50d4687bacbc5a", "score": "0.66796553", "text": "func (o *LoginFlow) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "b9b91d3d64fb4716d56a9854adc172dc", "score": "0.6670916", "text": "func (o *MQTTPOSTResponse) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "00d78e51925ab61bbad92e517f1c7bee", "score": "0.665575", "text": "func (o *ViewTimelog) GetDateCreatedOk() (*string, bool) {\n\tif o == nil || o.DateCreated == nil {\n\t\treturn nil, false\n\t}\n\treturn o.DateCreated, true\n}", "title": "" }, { "docid": "007774c50bfa7f116bf797875308f223", "score": "0.66471237", "text": "func (o *NetworkMetadata) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt.IsSet() {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "236998fe6542e13ff2e0e3d6f7294445", "score": "0.6606815", "text": "func (o *ImageImportContentResponse) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "4aac240ed314d25091f978b4bcae1219", "score": "0.6588921", "text": "func (o *RuleAction) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "393519b5cf647a9accaa4cb2eb8810be", "score": "0.6550059", "text": "func (o *StorageDiskResponse) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "21577c716691de0731e94aed759800ec", "score": "0.64795315", "text": "func (o *TransactionsCategoryRule) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "bedd8d3c3b26e899120bcece447f7a02", "score": "0.6429976", "text": "func (o *BsuCreated) HasLinkDate() bool {\n\tif o != nil && o.LinkDate != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "591a501fb2ef3ef05d17a21f057eab06", "score": "0.6409755", "text": "func (o *KafkaRequest) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "e90905ccf19d78f2d199485edb7e20eb", "score": "0.6365174", "text": "func (o *GeoipConfig) GetDateCreatedOk() (*time.Time, bool) {\n\tif o == nil || o.DateCreated == nil {\n\t\treturn nil, false\n\t}\n\treturn o.DateCreated, true\n}", "title": "" }, { "docid": "482452e91d35bcdaab686ad6c12106a5", "score": "0.62968796", "text": "func (o *KubernetesNodeMetadata) GetCreatedDateOk() (*time.Time, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\n\tif o.CreatedDate == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.CreatedDate.Time, true\n\n}", "title": "" }, { "docid": "2b31b87890688ac6cd406aa04d3fb8eb", "score": "0.62841415", "text": "func (o *NewUserAccount) SetCreated(v bool) {\n\to.Created = v\n}", "title": "" }, { "docid": "e53db7aa6787d029bdb8642d27c731a6", "score": "0.627043", "text": "func (o *InlineResponse20030Timers) GetDateCreatedOk() (*string, bool) {\n\tif o == nil || o.DateCreated == nil {\n\t\treturn nil, false\n\t}\n\treturn o.DateCreated, true\n}", "title": "" }, { "docid": "d5e17b341da409e742a252b057521a89", "score": "0.62465096", "text": "func (o *AppInfo) HasCreatedAt() bool {\n\tif o != nil && o.CreatedAt != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "175438367f25fb9c8d14d454119dcad4", "score": "0.62122566", "text": "func (s *AlertChannel) IsCreated() bool {\n\treturn s.Status.IsCreated()\n}", "title": "" }, { "docid": "46179671f43efd35632fa59b69153d03", "score": "0.6211007", "text": "func (r *Role) IsCreated() bool {\n\tif r.Status == nil {\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "2dc165606f2312d6a0d922a3c7b85224", "score": "0.61280805", "text": "func (o *Filing) HasFiledDate() bool {\n\tif o != nil && o.FiledDate != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "e215b52dbf198e14c227220157a3b1dd", "score": "0.6074841", "text": "func (o LookupDomainResultOutput) Created() pulumi.BoolOutput {\n\treturn o.ApplyT(func(v LookupDomainResult) bool { return v.Created }).(pulumi.BoolOutput)\n}", "title": "" }, { "docid": "062c4b82899a18a7c728f9af2c535038", "score": "0.6069236", "text": "func (o *InlineResponse200124Deliveries) HasDateCalled() bool {\n\tif o != nil && o.DateCalled != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "75372c494dd8833a198a3f6dfacfb757", "score": "0.60201705", "text": "func (o *MicrosoftGraphWorkbookFilterDatetime) HasDate() bool {\n\tif o != nil && o.Date != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "3abff79c6af9281272510c892e9a001e", "score": "0.6011019", "text": "func (o *InvestorFee) GetCreatedOk() (*time.Time, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Created, true\n}", "title": "" }, { "docid": "63f8a8b09736a98a90ee82f4551910f9", "score": "0.5993024", "text": "func (o *PortfolioDetail) GetCreatedOk() (*time.Time, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Created, true\n}", "title": "" }, { "docid": "ba8e6ec96a457c49b2697a6800df3be4", "score": "0.59769267", "text": "func (o *DAGDetail) HasStartDate() bool {\n\tif o != nil && o.StartDate.IsSet() {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ac6eed9293b8b5fe14bd46a80b41c8a0", "score": "0.59703267", "text": "func (o *PlannerTask) HasCreatedBy() bool {\n\tif o != nil && o.CreatedBy != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "866417eb138613679ba3be74e59868ae", "score": "0.59588206", "text": "func (o *NewUserAccount) GetCreatedOk() (*bool, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Created, true\n}", "title": "" }, { "docid": "12c60dce65d5abfd2dba7df94dd71e37", "score": "0.5909283", "text": "func (o *InvestorFee) HasDateFrom() bool {\n\tif o != nil && o.DateFrom.IsSet() {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "92cd87fdd79b58ad4f285a118833cb86", "score": "0.5905938", "text": "func (o *PatchedInvestorFee) GetCreatedOk() (*time.Time, bool) {\n\tif o == nil || o.Created == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Created, true\n}", "title": "" }, { "docid": "5130587ab742fef9240a2e5853d4bd14", "score": "0.5904903", "text": "func (o *AccessLog) GetCreatedOk() (*time.Time, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Created, true\n}", "title": "" }, { "docid": "3bfa969fe9bb001b28a1819a3cfd807b", "score": "0.5901854", "text": "func (o *LastModifiedFieldFieldSerializerWithRelatedFields) HasDateFormat() bool {\n\tif o != nil && !IsNil(o.DateFormat) {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "fd984dbccf59ac34ce9c90f539da09f5", "score": "0.5901334", "text": "func (o *InlineResponse20036Notebook) GetCreatedDateOk() (*string, bool) {\n\tif o == nil || o.CreatedDate == nil {\n\t\treturn nil, false\n\t}\n\treturn o.CreatedDate, true\n}", "title": "" }, { "docid": "41c80f01936eb78baa207e57842281b1", "score": "0.5888791", "text": "func (o *Timestamped) GetCreatedOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Created, true\n}", "title": "" }, { "docid": "cbc6da619bb4434ddaa58405621b351c", "score": "0.58873093", "text": "func (o *Drive) HasCreatedBy() bool {\n\tif o != nil && o.CreatedBy != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "baf768a36bcc7796e6e992baf84ab2f2", "score": "0.58852804", "text": "func (o *MicrosoftGraphSignIn) GetCreatedDateTimeOk() (time.Time, bool) {\n\tif o == nil || o.CreatedDateTime == nil {\n\t\tvar ret time.Time\n\t\treturn ret, false\n\t}\n\treturn *o.CreatedDateTime, true\n}", "title": "" }, { "docid": "ff84f3e2bb52c96ef86973a432334cd7", "score": "0.58623445", "text": "func (o *Tenant) HasCreationTs() bool {\n\tif o != nil && o.CreationTs != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "ee23faeab77d89c30e4f5050fd8e6d91", "score": "0.58481807", "text": "func (o *ApiKey) HasCreationTs() bool {\n\tif o != nil && o.CreationTs != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "9697a4b83e0cd90cdd21a0bf9975ef34", "score": "0.5834188", "text": "func (o *PatchedInvestorFee) HasDateFrom() bool {\n\tif o != nil && o.DateFrom.IsSet() {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "fd636393d4002abf7370c6692c364e49", "score": "0.5830029", "text": "func (host *Host) IsCreated() (created bool, err error) {\n\tcreated, err = host.ResourceBase.IsCreated()\n\tif err != nil {\n\t\treturn false, errors.Trace(err)\n\t}\n\tif created {\n\t\ttime.Sleep(5 * time.Second)\n\t}\n\treturn created, nil\n}", "title": "" }, { "docid": "e631976f90e62397711f6faf99ecedf9", "score": "0.58248496", "text": "func (o *AuthToken) HasCreatedBy() bool {\n\tif o != nil && o.CreatedBy != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "9f5683381d4a6e585f71cd8be897b5f5", "score": "0.5821753", "text": "func (o *DeviceEnrollmentConfiguration) GetCreatedDateTimeOk() (time.Time, bool) {\n\tif o == nil || o.CreatedDateTime == nil {\n\t\tvar ret time.Time\n\t\treturn ret, false\n\t}\n\treturn *o.CreatedDateTime, true\n}", "title": "" }, { "docid": "46b5c6b4fa119dac04484c34319b9e3c", "score": "0.57962924", "text": "func (o *GeoipConfig) SetDateCreated(v time.Time) {\n\to.DateCreated = &v\n}", "title": "" }, { "docid": "ee64bd3b606ddae45ebcd60635a50b82", "score": "0.57960415", "text": "func (o *VirtualizationVmwareVirtualMachineSnapshotAllOf) HasCreationTime() bool {\n\tif o != nil && o.CreationTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "bdd619ba38b1effadb5b7107c4f350bb", "score": "0.57837707", "text": "func (o *PlannerTask) GetCreatedDateTimeOk() (time.Time, bool) {\n\tif o == nil || o.CreatedDateTime == nil {\n\t\tvar ret time.Time\n\t\treturn ret, false\n\t}\n\treturn *o.CreatedDateTime, true\n}", "title": "" }, { "docid": "541a8bd5bdc21c1962d119fa4b1a0576", "score": "0.57836103", "text": "func (o *MicrosoftGraphIosCompliancePolicy) GetCreatedDateTimeOk() (time.Time, bool) {\n\tif o == nil || o.CreatedDateTime == nil {\n\t\tvar ret time.Time\n\t\treturn ret, false\n\t}\n\treturn *o.CreatedDateTime, true\n}", "title": "" }, { "docid": "57f34879a6ec7359c5e52d7941635343", "score": "0.5779369", "text": "func (o *RecurringTransfer) GetCreatedOk() (*time.Time, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Created, true\n}", "title": "" }, { "docid": "6f0c3994351849495e4cb7241925d3b6", "score": "0.5771826", "text": "func (o *DowntimeRelationships) HasCreatedBy() bool {\n\treturn o != nil && o.CreatedBy != nil\n}", "title": "" }, { "docid": "cb2fdc62ec6c975a976d9e5de4d1a33c", "score": "0.5752723", "text": "func (o *PatchedAssessment) GetCreatedOk() (*time.Time, bool) {\n\tif o == nil || o.Created == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Created, true\n}", "title": "" }, { "docid": "ebdb2c3198bdb9475462188cd994755e", "score": "0.5735889", "text": "func (o *MicrosoftGraphDeviceEnrollmentWindowsHelloForBusinessConfiguration) GetCreatedDateTimeOk() (time.Time, bool) {\n\tif o == nil || o.CreatedDateTime == nil {\n\t\tvar ret time.Time\n\t\treturn ret, false\n\t}\n\treturn *o.CreatedDateTime, true\n}", "title": "" }, { "docid": "0718287b1230081cceef699d96a501c5", "score": "0.5732321", "text": "func (o *ViewTimelog) HasDateDeleted() bool {\n\tif o != nil && o.DateDeleted != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "1b21668a29a0d10ce184be4862243276", "score": "0.5731881", "text": "func (o *KubernetesNodeMetadata) GetCreatedDate() *time.Time {\n\tif o == nil {\n\t\treturn nil\n\t}\n\n\tif o.CreatedDate == nil {\n\t\treturn nil\n\t}\n\treturn &o.CreatedDate.Time\n\n}", "title": "" }, { "docid": "087622a687ee65d7e923ffc63663535e", "score": "0.57292056", "text": "func (o *GeoipConfig) GetDateCreated() time.Time {\n\tif o == nil || o.DateCreated == nil {\n\t\tvar ret time.Time\n\t\treturn ret\n\t}\n\treturn *o.DateCreated\n}", "title": "" }, { "docid": "421683c47299f5ff0896ceb7fa513329", "score": "0.5700715", "text": "func (o *DeploymentRelease) GetCreatedOnOk() (*time.Time, bool) {\n\tif o == nil || o.CreatedOn == nil {\n\t\treturn nil, false\n\t}\n\treturn o.CreatedOn, true\n}", "title": "" }, { "docid": "59482e877fb041f47bf66031c4f08ed7", "score": "0.56976175", "text": "func (o *ViewStatus) HasDateTime() bool {\n\tif o != nil && o.DateTime != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "936ed2601f1d28b4ae437c2dbb52f980", "score": "0.5695992", "text": "func (o *MicrosoftGraphRemoteItem) GetCreatedDateTimeOk() (time.Time, bool) {\n\tif o == nil || o.CreatedDateTime == nil {\n\t\tvar ret time.Time\n\t\treturn ret, false\n\t}\n\treturn *o.CreatedDateTime, true\n}", "title": "" }, { "docid": "9b67937ad8f539fc843a72f5aff93c49", "score": "0.56948376", "text": "func (o *InlineResponse20036Notebook) SetCreatedDate(v string) {\n\to.CreatedDate = &v\n}", "title": "" } ]
9c678e18f6160cea00afc256f3bc0262
Build builds a template by: asserting that function.yaml exists check for Makefile and execute if found check for a Dockerfile and build it with the image name from function.yaml
[ { "docid": "31359683b3ea7e27cdb0174352b541b8", "score": "0.7756209", "text": "func Build(directory string) error {\n\tinfos, err := ioutil.ReadDir(directory)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !checkForFunctionYaml(infos) {\n\t\treturn errors.New(\"no function.yaml in template\")\n\t}\n\n\tif checkForMakefile(infos) {\n\t\tscript := \"cd %v; make\"\n\t\tscript = fmt.Sprintf(script, directory)\n\t\tcmd := exec.Command(\"bash\", \"-c\", script)\n\t\tcmd.Stdout = os.Stdout\n\t\tcmd.Stderr = os.Stderr\n\t\tif err = cmd.Run(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif checkForDockerfile(infos) {\n\t\timageName, err := getImageNameFromFunctionYaml(directory)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tscript := \"cd %v; docker build -t %v .\"\n\t\tscript = fmt.Sprintf(script, directory, imageName)\n\t\tcmd := exec.Command(\"sh\", \"-c\", script)\n\t\tcmd.Stdout = os.Stdout\n\t\tcmd.Stderr = os.Stderr\n\t\tif err = cmd.Run(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" } ]
[ { "docid": "c9b44f687b922aa4f9a2978005977619", "score": "0.6427295", "text": "func PublishImage(image string, handler string, functionName string, language string, nocache bool, squash bool, shrinkwrap bool, buildArgMap map[string]string,\n\tbuildOptions []string, tagMode schema.BuildFormat, buildLabelMap map[string]string, quietBuild bool, copyExtraPaths []string, platforms string, extraTags []string) error {\n\n\tif stack.IsValidTemplate(language) {\n\t\tpathToTemplateYAML := fmt.Sprintf(\"./template/%s/template.yml\", language)\n\t\tif _, err := os.Stat(pathToTemplateYAML); os.IsNotExist(err) {\n\t\t\treturn err\n\t\t}\n\n\t\tlangTemplate, err := stack.ParseYAMLForLanguageTemplate(pathToTemplateYAML)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error reading language template: %s\", err.Error())\n\t\t}\n\n\t\tbranch, version, err := GetImageTagValues(tagMode)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\timageName := schema.BuildImageName(tagMode, image, version, branch)\n\n\t\tif err := ensureHandlerPath(handler); err != nil {\n\t\t\treturn fmt.Errorf(\"building %s, %s is an invalid path\", imageName, handler)\n\t\t}\n\n\t\ttempPath, buildErr := createBuildContext(functionName, handler, language, isLanguageTemplate(language), langTemplate.HandlerFolder, copyExtraPaths)\n\t\tfmt.Printf(\"Building: %s with %s template. Please wait..\\n\", imageName, language)\n\t\tif buildErr != nil {\n\t\t\treturn buildErr\n\t\t}\n\n\t\tif shrinkwrap {\n\t\t\tfmt.Printf(\"%s shrink-wrapped to %s\\n\", functionName, tempPath)\n\t\t\treturn nil\n\t\t}\n\n\t\tbuildOptPackages, buildPackageErr := getBuildOptionPackages(buildOptions, language, langTemplate.BuildOptions)\n\n\t\tif buildPackageErr != nil {\n\t\t\treturn buildPackageErr\n\n\t\t}\n\n\t\tdockerBuildVal := dockerBuild{\n\t\t\tImage: imageName,\n\t\t\tNoCache: nocache,\n\t\t\tSquash: squash,\n\t\t\tHTTPProxy: os.Getenv(\"http_proxy\"),\n\t\t\tHTTPSProxy: os.Getenv(\"https_proxy\"),\n\t\t\tBuildArgMap: buildArgMap,\n\t\t\tBuildOptPackages: buildOptPackages,\n\t\t\tBuildLabelMap: buildLabelMap,\n\t\t\tPlatforms: platforms,\n\t\t\tExtraTags: extraTags,\n\t\t}\n\n\t\tcommand, args := getDockerBuildxCommand(dockerBuildVal)\n\t\tfmt.Printf(\"Publishing with command: %v %v\\n\", command, args)\n\n\t\ttask := v1execute.ExecTask{\n\t\t\tCwd: tempPath,\n\t\t\tCommand: command,\n\t\t\tArgs: args,\n\t\t\tStreamStdio: !quietBuild,\n\t\t}\n\n\t\tres, err := task.Execute()\n\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif res.ExitCode != 0 {\n\t\t\treturn fmt.Errorf(\"[%s] received non-zero exit code from build, error: %s\", functionName, res.Stderr)\n\t\t}\n\n\t\tfmt.Printf(\"Image: %s built.\\n\", imageName)\n\n\t} else {\n\t\treturn fmt.Errorf(\"language template: %s not supported, build a custom Dockerfile\", language)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f3579c24e96f03313996a56196ceade0", "score": "0.64114416", "text": "func (c *Controller) BuildTemplate(template *Template, fetcher Fetcher) error {\n var reader DefinitionReader\n if err := reader.Init(fetcher, template); err != nil {\n return err\n }\n\n var tarBuffer bytes.Buffer\n if err := reader.WriteImageTar(&tarBuffer); err != nil {\n return err\n }\n\n var outputBuffer bytes.Buffer\n imageName := c.TemplateImageName(template)\n err := c.docker.BuildImage(dockerclient.BuildImageOptions{\n Name: imageName, InputStream: &tarBuffer, OutputStream: &outputBuffer,\n })\n if err != nil {\n return err\n }\n if err := template.SetBuildOutput(outputBuffer.Bytes()); err != nil {\n return err\n }\n\n image, err := c.docker.InspectImage(imageName)\n if err != nil {\n return err\n }\n\n if err := template.SetBaseImage(image.ID); err != nil {\n return err\n }\n\n return nil\n}", "title": "" }, { "docid": "3adda4301f652396bde7ad0f7deab368", "score": "0.62429523", "text": "func Run(ctx context.Context, namespace, buildKitHost string, isOktetoCluster bool, path, dockerFile, tag, target string, noCache bool, cacheFrom, buildArgs, secrets []string, progress string) error {\n\tlog.Infof(\"building your image on %s\", buildKitHost)\n\tbuildkitClient, err := getBuildkitClient(ctx, isOktetoCluster, buildKitHost)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif buildKitHost == okteto.CloudBuildKitURL && dockerFile != \"\" {\n\t\tdockerFile, err = registry.GetDockerfile(path, dockerFile)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer os.Remove(dockerFile)\n\t}\n\n\tif tag != \"\" {\n\t\terr = validateImage(tag)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif registry.IsDevRegistry(tag) {\n\t\ttag, err = registry.ExpandOktetoDevRegistry(ctx, namespace, tag)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor i := range cacheFrom {\n\t\t\tcacheFrom[i], err = registry.ExpandOktetoDevRegistry(ctx, namespace, cacheFrom[i])\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\tif registry.IsGlobalRegistry(tag) {\n\t\ttag, err = registry.ExpandOktetoGlobalRegistry(tag)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor i := range cacheFrom {\n\t\t\tcacheFrom[i], err = registry.ExpandOktetoGlobalRegistry(cacheFrom[i])\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\topt, err := getSolveOpt(path, dockerFile, tag, target, noCache, cacheFrom, buildArgs, secrets)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to create build solver\")\n\t}\n\n\terr = solveBuild(ctx, buildkitClient, opt, progress)\n\tif err != nil {\n\t\tlog.Infof(\"Failed to build image: %s\", err.Error())\n\t}\n\tif registry.IsTransientError(err) {\n\t\tlog.Yellow(`Failed to push '%s' to the registry:\n %s,\n Retrying ...`, tag, err.Error())\n\t\tsuccess := true\n\t\terr := solveBuild(ctx, buildkitClient, opt, progress)\n\t\tif err != nil {\n\t\t\tsuccess = false\n\t\t\tlog.Infof(\"Failed to build image: %s\", err.Error())\n\t\t}\n\t\terr = registry.GetErrorMessage(err, tag)\n\t\tanalytics.TrackBuildTransientError(buildKitHost, success)\n\t\treturn err\n\t}\n\n\terr = registry.GetErrorMessage(err, tag)\n\n\treturn err\n}", "title": "" }, { "docid": "b943b2282f2619702491395fb391c574", "score": "0.61561155", "text": "func BuildFunc(directory, outputDir, imageTag, imageBuilder, packageName, channels, channelDefault string,\n\toverwrite bool) error {\n\t_, err := os.Stat(directory)\n\tif os.IsNotExist(err) {\n\t\treturn err\n\t}\n\n\t// Generate annotations.yaml and Dockerfile\n\terr = GenerateFunc(directory, outputDir, packageName, channels, channelDefault, overwrite)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Build bundle image\n\tlog.Info(\"Building bundle image\")\n\tbuildCmd, err := BuildBundleImage(imageTag, imageBuilder)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := ExecuteCommand(buildCmd); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "86fc3e58bfa5e57e985dfe829dae9359", "score": "0.6096805", "text": "func (d *Docker) buildImage(ctx context.Context, dockerfile string, image string) error {\n\tif dockerfile == \"\" || image == \"\" {\n\t\treturn fmt.Errorf(\"failed to build image %s: missing Dockerfile in Tests/scripts/ or image name in run.sh\", image)\n\t}\n\tdockerFileContents := []byte(dockerfile)\n\theader := &tar.Header{\n\t\tName: \"Dockerfile\",\n\t\tMode: 0o777,\n\t\tSize: int64(len(dockerFileContents)),\n\t\tTypeflag: tar.TypeReg,\n\t}\n\tvar buf bytes.Buffer\n\ttarWriter := tar.NewWriter(&buf)\n\tif err := tarWriter.WriteHeader(header); err != nil {\n\t\treturn err\n\t}\n\tif _, err := tarWriter.Write(dockerFileContents); err != nil {\n\t\treturn err\n\t}\n\tif err := tarWriter.Close(); err != nil {\n\t\treturn err\n\t}\n\n\treader := bytes.NewReader(buf.Bytes())\n\topts := types.ImageBuildOptions{\n\t\tContext: reader,\n\t\tDockerfile: \"Dockerfile\",\n\t\tTags: []string{image},\n\t}\n\tres, err := d.client.ImageBuild(ctx, reader, opts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer res.Body.Close()\n\n\terr = print(d.logger, res.Body)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "50e4ccf7ab8a7abf207551a2aac6faf8", "score": "0.6030819", "text": "func (b *buildcmd) build(c *cli.Context) error {\n\tdir := common.GetDir(c)\n\n\tpath := c.Args().First()\n\tif path != \"\" {\n\t\tfmt.Printf(\"Building function at: ./%s\\n\", path)\n\t\tdir = filepath.Join(dir, path)\n\t}\n\n\terr := os.Chdir(dir)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer os.Chdir(dir)\n\n\tffV, err := common.ReadInFuncFile()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tswitch common.GetFuncYamlVersion(ffV) {\n\tcase common.LatestYamlVersion:\n\t\tfpath, ff, err := common.FindAndParseFuncFileV20180708(dir)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tbuildArgs := c.StringSlice(\"build-arg\")\n\n\t\t// Passing empty shape for build command\n\t\tff, err = common.BuildFuncV20180708(common.IsVerbose(), fpath, ff, buildArgs, b.noCache, \"\")\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tfmt.Printf(\"Function %v built successfully.\\n\", ff.ImageNameV20180708())\n\t\treturn nil\n\n\tdefault:\n\t\tfpath, ff, err := common.FindAndParseFuncfile(dir)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tbuildArgs := c.StringSlice(\"build-arg\")\n\t\tff, err = common.BuildFunc(common.IsVerbose(), fpath, ff, buildArgs, b.noCache)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tfmt.Printf(\"Function %v built successfully.\\n\", ff.ImageName())\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "543b1db36b6f45c1b1ab27208e64bcac", "score": "0.599819", "text": "func BuildDockerImages(config Config, project string, definedTag string) {\n\n\tvar build Build\n\n\tfilename := fmt.Sprintf(\"%s/%s/%s\", config.ConfigFolder, project, \"build.yaml\")\n\tfile, err := ioutil.ReadFile(filename)\n\n\tif err != nil {\n\t\tpanic(\"Cannot read build.yaml|yml file\")\n\t}\n\n\terr = yaml.Unmarshal(file, &build)\n\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"Cannot parse file %s\", filename))\n\t}\n\n\tfor _, buildData := range build.Dockerfile {\n\t\t// check if the required arguments are set\n\t\tif buildData.Path == \"\" || buildData.Tag == \"\" {\n\t\t\tAlert(\"ERR\", \"Tag and path required!\", false)\n\t\t}\n\n\t\tvar context string\n\t\tvar dockerfile string\n\n\t\t// setting up the name of the dockerfile\n\t\tif buildData.Dockerfile == \"\" {\n\t\t\tdockerfile = \"Dockerfile\"\n\t\t} else {\n\t\t\tdockerfile = buildData.Dockerfile\n\t\t}\n\n\t\t// checking if string has prefix \"/\"\n\t\tif strings.HasPrefix(buildData.Path, \"/\") {\n\t\t\tcontext = buildData.Path\n\t\t} else {\n\t\t\tcontext = fmt.Sprintf(\"%s/%s\", config.ProjectFolder, buildData.Path)\n\t\t}\n\n\t\t// checking if tag is set and definition from yaml meets the criteria\n\t\tvar useTag string\n\t\tif definedTag != \"\" {\n\t\t\tif strings.Contains(buildData.Tag, \":\") {\n\t\t\t\tAlert(\"ERR\", \"Already defined an tag in the `tag` definition from yaml!\", false)\n\t\t\t}\n\t\t\tuseTag = fmt.Sprintf(\"%s:%s\", buildData.Tag, definedTag)\n\t\t} else {\n\t\t\tuseTag = buildData.Tag\n\t\t}\n\n\t\t// create actual build command and execute\n\t\tcmd := fmt.Sprintf(\"docker build -t %s -f %s/%s %s\", useTag, context, dockerfile, context)\n\t\tpush := fmt.Sprintf(\"docker push %s\", useTag)\n\t\tAlert(\"NOTICE\", \"Executing: \" + cmd, true)\n\n\t\tif appConfig.pushBuild {\n\t\t\tAlert(\"NOTICE\", \"Executing: \" + push, true)\n\t\t}\n\n\t\tif !appConfig.dryRun {\n\t\t\tExecCommand(strings.Split(cmd, \" \"))\n\t\t\tif appConfig.pushBuild {\n\t\t\t\tExecCommand(strings.Split(push, \" \"))\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "20d212c57592bd78d8ed9bef76750749", "score": "0.59923106", "text": "func Build(yamlAppConfigFileName string) {\n\n\tappInfo := appinfo.NewAppInfo(yamlAppConfigFileName)\n\tdockerClient := NewDockerClient(appInfo)\n\n\tAppFileDir := appInfo.GetAppFileDir()\n\n\tcontainerLabel := dockerClient.getDockerContainerLabel()\n\n\thashStr := dockerClient.getDockerContainerHash()\n\n\tdockerClient.build(AppFileDir, containerLabel, hashStr)\n}", "title": "" }, { "docid": "dc7f5811e38e76a5ba4041ac25c5140d", "score": "0.5953742", "text": "func build(args []string) {\n\tvar buildOut outputList\n\n\toutputTypes := []string{}\n\tfor k := range outFuns {\n\t\toutputTypes = append(outputTypes, k)\n\t}\n\tsort.Strings(outputTypes)\n\n\tbuildCmd := flag.NewFlagSet(\"build\", flag.ExitOnError)\n\tbuildCmd.Usage = func() {\n\t\tfmt.Printf(\"USAGE: %s build [options] <file>[.yml] | -\\n\\n\", os.Args[0])\n\t\tfmt.Printf(\"Options:\\n\")\n\t\tbuildCmd.PrintDefaults()\n\t}\n\tbuildName := buildCmd.String(\"name\", \"\", \"Name to use for output files\")\n\tbuildDir := buildCmd.String(\"dir\", \"\", \"Directory for output files, default current directory\")\n\tbuildSize := buildCmd.String(\"size\", \"1024M\", \"Size for output image, if supported and fixed size\")\n\tbuildPull := buildCmd.Bool(\"pull\", false, \"Always pull images\")\n\tbuildDisableTrust := buildCmd.Bool(\"disable-content-trust\", false, \"Skip image trust verification specified in trust section of config (default false)\")\n\tbuildHyperkit := buildCmd.Bool(\"hyperkit\", false, \"Use hyperkit for LinuxKit based builds where possible\")\n\tbuildCmd.Var(&buildOut, \"output\", \"Output types to create [ \"+strings.Join(outputTypes, \" \")+\" ]\")\n\n\tif err := buildCmd.Parse(args); err != nil {\n\t\tlog.Fatal(\"Unable to parse args\")\n\t}\n\tremArgs := buildCmd.Args()\n\n\tif len(remArgs) == 0 {\n\t\tfmt.Println(\"Please specify a configuration file\")\n\t\tbuildCmd.Usage()\n\t\tos.Exit(1)\n\t}\n\n\tif len(buildOut) == 0 {\n\t\tbuildOut = outputList{\"kernel+initrd\"}\n\t}\n\n\tlog.Debugf(\"Outputs selected: %s\", buildOut.String())\n\n\terr := validateOutputs(buildOut)\n\tif err != nil {\n\t\tlog.Errorf(\"Error parsing outputs: %v\", err)\n\t\tbuildCmd.Usage()\n\t\tos.Exit(1)\n\t}\n\n\tsize, err := getDiskSizeMB(*buildSize)\n\tif err != nil {\n\t\tlog.Fatalf(\"Unable to parse disk size: %v\", err)\n\t}\n\n\tname := *buildName\n\tvar config []byte\n\tif conf := remArgs[0]; conf == \"-\" {\n\t\tvar err error\n\t\tconfig, err = ioutil.ReadAll(os.Stdin)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Cannot read stdin: %v\", err)\n\t\t}\n\t\tif name == \"\" {\n\t\t\tname = defaultNameForStdin\n\t\t}\n\t} else {\n\t\tif !(filepath.Ext(conf) == \".yml\" || filepath.Ext(conf) == \".yaml\") {\n\t\t\tconf = conf + \".yml\"\n\t\t}\n\t\tvar err error\n\t\tconfig, err = ioutil.ReadFile(conf)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Cannot open config file: %v\", err)\n\t\t}\n\t\tif name == \"\" {\n\t\t\tname = strings.TrimSuffix(filepath.Base(conf), filepath.Ext(conf))\n\t\t}\n\t}\n\n\tm, err := NewConfig(config)\n\tif err != nil {\n\t\tlog.Fatalf(\"Invalid config: %v\", err)\n\t}\n\n\tif *buildDisableTrust {\n\t\tlog.Debugf(\"Disabling content trust checks for this build\")\n\t\tm.Trust = TrustConfig{}\n\t}\n\n\timage := buildInternal(m, *buildPull)\n\n\tlog.Infof(\"Create outputs:\")\n\terr = outputs(filepath.Join(*buildDir, name), image, buildOut, size, *buildHyperkit)\n\tif err != nil {\n\t\tlog.Fatalf(\"Error writing outputs: %v\", err)\n\t}\n}", "title": "" }, { "docid": "1f476b5f0304189a6ae825f1dc0ebcd8", "score": "0.5928955", "text": "func buildImage(ctx context.Context, apiClient *dockerapi.Client, basePath, imageName string, config *types.BuildConfig) error {\n\tlog.Debugf(\"buildImage(%s,%s)\", basePath, imageName)\n\n\tvar output bytes.Buffer\n\tbuildOptions := dockerapi.BuildImageOptions{\n\t\tContext: ctx,\n\t\tName: imageName,\n\t\tDockerfile: config.Dockerfile,\n\t\tOutputStream: &output,\n\t\tRmTmpContainer: true,\n\t\tForceRmTmpContainer: true,\n\t\tTarget: config.Target,\n\t\tRawJSONStream: true,\n\t}\n\t/*\n\t {\"stream\":\"\"}\n\t*/\n\n\t//buildOptions.NetworkMode = config.Network\n\n\tfor key, val := range config.Args {\n\t\tif val == nil {\n\t\t\targ := dockerapi.BuildArg{\n\t\t\t\tName: key,\n\t\t\t}\n\n\t\t\tbuildOptions.BuildArgs = append(buildOptions.BuildArgs, arg)\n\t\t\tcontinue\n\t\t}\n\n\t\targ := dockerapi.BuildArg{\n\t\t\tName: key,\n\t\t\tValue: *val,\n\t\t}\n\t\tbuildOptions.BuildArgs = append(buildOptions.BuildArgs, arg)\n\t}\n\n\tfor key, val := range config.Labels {\n\t\tbuildOptions.Labels[key] = val\n\t}\n\n\tfor _, val := range config.CacheFrom {\n\t\tbuildOptions.CacheFrom = append(buildOptions.CacheFrom, val)\n\t}\n\n\t//TODO: investigate []string to string\n\tif len(config.ExtraHosts) > 0 {\n\t\tbuildOptions.ExtraHosts = config.ExtraHosts[0]\n\t}\n\n\tif strings.HasPrefix(config.Context, \"http://\") || strings.HasPrefix(config.Context, \"https://\") {\n\t\tbuildOptions.Remote = config.Context\n\t} else {\n\t\tcontextDir := config.Context\n\t\tif !strings.HasPrefix(contextDir, \"/\") {\n\t\t\tcontextDir = filepath.Join(basePath, contextDir)\n\t\t}\n\n\t\tif info, err := os.Stat(contextDir); err == nil && info.IsDir() {\n\t\t\tbuildOptions.ContextDir = contextDir\n\t\t} else {\n\t\t\treturn fmt.Errorf(\"invalid context directory - %s\", contextDir)\n\t\t}\n\t}\n\n\tif err := apiClient.BuildImage(buildOptions); err != nil {\n\t\tlog.Debugf(\"buildImage: dockerapi.BuildImage() error = %v\", err)\n\t\treturn err\n\t}\n\n\tfmt.Println(\"build output:\")\n\tfmt.Println(output.String())\n\tfmt.Println(\"build output [DONE]\")\n\n\treturn nil\n}", "title": "" }, { "docid": "decccff043cdb3f0928f0ea2be1b9d82", "score": "0.59218866", "text": "func (b *Builder) BuildImage(ctx devspacecontext.Context, contextPath, dockerfilePath string, entrypoint []string, cmd []string) error {\n\tvar (\n\t\tdisplayRegistryURL = \"hub.docker.com\"\n\t)\n\n\t// Display nice registry name\n\tregistryURL, err := pullsecrets.GetRegistryFromImageName(b.helper.ImageName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif registryURL != \"\" {\n\t\tdisplayRegistryURL = registryURL\n\t}\n\n\t// We skip pushing when it is the minikube client\n\tif b.skipPushOnLocalKubernetes && ctx.KubeClient() != nil && kubectl.IsLocalKubernetes(ctx.KubeClient()) {\n\t\tb.skipPush = true\n\t}\n\n\t// Authenticate\n\tif !b.skipPush && !b.helper.ImageConf.SkipPush {\n\t\tif pullsecrets.IsAzureContainerRegistry(registryURL) {\n\t\t\tctx.Log().Warn(\"Using an Azure Container Registry(ACR), skipping authentication. You may need to refresh your credentials by running 'az acr login'\")\n\t\t\tb.authConfig, err = b.client.GetAuthConfig(ctx.Context(), registryURL, true)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\tctx.Log().Info(\"Authenticating (\" + displayRegistryURL + \")...\")\n\t\t\t_, err = b.Authenticate(ctx.Context())\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Errorf(\"Error during image registry authentication: %v\", err)\n\t\t\t}\n\n\t\t\tctx.Log().Done(\"Authentication successful (\" + displayRegistryURL + \")\")\n\t\t}\n\t}\n\n\t// create context stream\n\tbody, writer, outStream, buildOptions, err := b.helper.CreateContextStream(contextPath, dockerfilePath, entrypoint, cmd, ctx.Log())\n\tdefer writer.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Should we build with cli?\n\tuseBuildKit := false\n\tuseDockerCli := b.helper.ImageConf.Docker != nil && b.helper.ImageConf.Docker.UseCLI\n\tcliArgs := []string{}\n\tif b.helper.ImageConf.Docker != nil {\n\t\tcliArgs = b.helper.ImageConf.Docker.Args\n\t\tif b.helper.ImageConf.Docker.UseBuildKit {\n\t\t\tuseBuildKit = true\n\t\t}\n\t}\n\tif useDockerCli || useBuildKit || len(cliArgs) > 0 {\n\t\terr = b.client.ImageBuildCLI(ctx.Context(), ctx.WorkingDir(), ctx.Environ(), useBuildKit, body, writer, cliArgs, *buildOptions, ctx.Log())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\t// make sure to use the correct proxy configuration\n\t\tbuildOptions.BuildArgs = b.client.ParseProxyConfig(buildOptions.BuildArgs)\n\n\t\tresponse, err := b.client.ImageBuild(ctx.Context(), body, *buildOptions)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer response.Body.Close()\n\n\t\terr = jsonmessage.DisplayJSONMessagesStream(response.Body, outStream, outStream.FD(), outStream.IsTerminal(), nil)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Check if we skip push\n\tif !b.skipPush && !b.helper.ImageConf.SkipPush {\n\t\tfor _, tag := range buildOptions.Tags {\n\t\t\terr = b.pushImage(ctx.Context(), writer, tag)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Errorf(\"error during image push: %v\", err)\n\t\t\t}\n\n\t\t\tctx.Log().Info(\"Image pushed to registry (\" + displayRegistryURL + \")\")\n\t\t}\n\t} else if ctx.KubeClient() != nil && kubectl.GetKindContext(ctx.KubeClient().CurrentContext()) != \"\" {\n\t\t// Load image if it is a kind-context\n\t\tfor _, tag := range buildOptions.Tags {\n\t\t\tcommand := []string{\"kind\", \"load\", \"docker-image\", \"--name\", kubectl.GetKindContext(ctx.KubeClient().CurrentContext()), tag}\n\t\t\tcompleteArgs := []string{}\n\t\t\tcompleteArgs = append(completeArgs, command[1:]...)\n\t\t\terr = command2.Command(ctx.Context(), ctx.WorkingDir(), ctx.Environ(), writer, writer, nil, command[0], completeArgs...)\n\t\t\tif err != nil {\n\t\t\t\tctx.Log().Info(errors.Errorf(\"error during image load to kind cluster: %v\", err))\n\t\t\t}\n\t\t\tctx.Log().Info(\"Image loaded to kind cluster\")\n\t\t}\n\t} else {\n\t\tctx.Log().Infof(\"Skip image push for %s\", b.helper.ImageName)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "323c2320f66c78d4c2c32da263832dbc", "score": "0.5917816", "text": "func DockerfileBuild(sourceImage, dockerfile string) BuildSpec {\n\t// The params set here should match the params in\n\t// pkg/reconciler/build/resources/builtin_tasks.go\n\treturn BuildSpec{\n\t\tBuildTaskRef: dockerfileBuildTaskRef(),\n\t\tParams: []BuildParam{\n\t\t\tStringParam(SourceImageParamName, sourceImage),\n\t\t\tStringParam(\"DOCKERFILE\", dockerfile),\n\t\t},\n\t}\n}", "title": "" }, { "docid": "84b4c9ca91f75515a4f62c499088d815", "score": "0.58900917", "text": "func createImage(imageCreator ImageCreator, repoName string, appName string, revision string, baseImage string) (*SourceInfo, *DockerInfo) {\n\n\tconst validTestZip = \"../../testresources/echo-test.zip\"\n\n\tworkspace, dockerInfo := doSetup(validTestZip, repoName, appName, revision, baseImage)\n\n\t//clean up the workspace after the test. Comment this out for debugging\n\t//defer workspace.Clean()\n\n\tdockerImage := &DockerBuild{\n\t\tTarFile: workspace.TargetTarName,\n\t\tDockerInfo: dockerInfo,\n\t}\n\n\t//copy over our docker file. These tests assume io has been tested and works properly\n\n\tstream, err := imageCreator.BuildImage(dockerImage)\n\n\tExpect(err).Should(BeNil(), \"Unable to build image\", err)\n\n\tchannelToOutput(stream)\n\n\t//pull by label\n\n\tassertLocalImageExists(imageCreator, dockerImage.DockerInfo)\n\n\treturn workspace, dockerImage.DockerInfo\n\n}", "title": "" }, { "docid": "76a5532fcdd32ba6194d047552f6a692", "score": "0.58357346", "text": "func (b *buildcmd) build(c *cli.Context) error {\n\tpath, err := os.Getwd()\n\tif err != nil {\n\t\treturn err\n\t}\n\tfpath, ff, err := findAndParseFuncfile(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tff, err = buildfunc(c, fpath, ff, b.noCache)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"Function %v built successfully.\\n\", ff.ImageName())\n\treturn nil\n}", "title": "" }, { "docid": "551ce150d2cd6f2eaf6875e1f9a4d7fd", "score": "0.58312094", "text": "func (c *FunctionController) startImageBuildJob(funcObj *kubelessApi.Function, or []metav1.OwnerReference) (string, bool, error) {\n\timagePullSecret, err := c.clientset.CoreV1().Secrets(funcObj.ObjectMeta.Namespace).Get(\"kubeless-registry-credentials\", metav1.GetOptions{})\n\tif err != nil {\n\t\treturn \"\", false, fmt.Errorf(\"Unable to locate registry credentials to build function image: %v\", err)\n\t}\n\treg, err := registry.New(*imagePullSecret)\n\tif err != nil {\n\t\treturn \"\", false, fmt.Errorf(\"Unable to retrieve registry information: %v\", err)\n\t}\n\t// Use function content and deps as tag (digested)\n\ttag := fmt.Sprintf(\"%x\", sha256.Sum256([]byte(fmt.Sprintf(\"%v%v\", funcObj.Spec.Function, funcObj.Spec.Deps))))\n\timageName := fmt.Sprintf(\"%s/%s\", reg.Creds.Username, funcObj.ObjectMeta.Name)\n\t// Check if image already exists\n\texists, err := reg.ImageExists(imageName, tag)\n\tif err != nil {\n\t\treturn \"\", false, fmt.Errorf(\"Unable to check is target image exists: %v\", err)\n\t}\n\tregURL, err := url.Parse(reg.Endpoint)\n\tif err != nil {\n\t\treturn \"\", false, fmt.Errorf(\"Unable to parse registry URL: %v\", err)\n\t}\n\timage := fmt.Sprintf(\"%s/%s:%s\", regURL.Host, imageName, tag)\n\tif !exists {\n\t\ttlsVerify := true\n\t\tif c.config.Data[\"function-registry-tls-verify\"] == \"false\" {\n\t\t\ttlsVerify = false\n\t\t}\n\t\terr = utils.EnsureFuncImage(c.clientset, funcObj, c.langRuntime, or, imageName, tag, c.config.Data[\"builder-image\"], regURL.Host, imagePullSecret.Name, c.config.Data[\"provision-image\"], tlsVerify, c.imagePullSecrets)\n\t\tif err != nil {\n\t\t\treturn \"\", false, fmt.Errorf(\"Unable to create image build job: %v\", err)\n\t\t}\n\t} else {\n\t\t// Image already exists\n\t\treturn image, false, nil\n\t}\n\treturn image, true, nil\n}", "title": "" }, { "docid": "77edbe8a7a4c87fdb30de62d30f1420e", "score": "0.5799314", "text": "func createDockerComposeFile(absolutepath string, s service) error {\n\toutputPath := filepath.Join(absolutepath, \"docker-compose.yml\")\n\t// Create the file:\n\tf, err := os.Create(outputPath)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdefer f.Close() // don't forget to close the file when finished.\n\ttempl, err := template.ParseFS(dockerTempl, \"docker-compose-template.yml\")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"ERROR: parsing template file %v\", err)\n\t}\n\t// TODO: not sure is there a better way to pass data to template\n\t// A lot of this data is redundant. Already available in Service struct\n\tdata := struct {\n\t\tUserID string\n\t\tArchitecture string\n\t\tType string\n\t\tPort int\n\t\tSecret string\n\t}{\n\t\ts.UserID,\n\t\ts.Architecture,\n\t\ts.Db.Type,\n\t\ts.Db.Port,\n\t\t\"replaceme\",\n\t}\n\terr = templ.Execute(f, data)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"ERROR: executing template file %v\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "4b2bcc27a4b85b1d72d3435bb20e413f", "score": "0.57334816", "text": "func (dc *DockerClient) build(dockerBuildCtx string, label string, hashStr string) {\n\n\t// remove all previous images\n\tdc.removeImages(label)\n\n\t// create temporary Dockerfile\n\ttmpDockerFile := dc.createTempDockerFile(dockerBuildCtx, label)\n\tdefer filesystem.Remove(tmpDockerFile.Name())\n\n\tcmdDockerRun := cmd_build.NewBuildCommand(dc.dockerCli)\n\tbuildCmdArgs := dc.getBuildCmdArgs(tmpDockerFile.Name(), dockerBuildCtx, label, hashStr)\n\tcmdDockerRun.SetArgs(buildCmdArgs)\n\tcmdDockerRun.SilenceErrors = true\n\tcmdDockerRun.SilenceUsage = true\n\n\tlog.Debug(\"execute \\\"docker build \" + strings.Join(buildCmdArgs, \" \") + \"\\\"\")\n\n\terr := cmdDockerRun.Execute()\n\tutil.CheckErr(err)\n\n\ttmpDockerFile.Close()\n\tutil.CheckErr(err)\n}", "title": "" }, { "docid": "e0349ccc9c533ba4400c80cfbb05eee8", "score": "0.5717907", "text": "func buildCandidateDockerImage(app, version, tag string) Operation {\n\treturn func(pipeline *bk.Pipeline) {\n\t\timage := strings.ReplaceAll(app, \"/\", \"-\")\n\t\tlocalImage := \"sourcegraph/\" + image + \":\" + version\n\n\t\tcmds := []bk.StepOpt{\n\t\t\tbk.Cmd(fmt.Sprintf(`echo \"Building candidate %s image...\"`, app)),\n\t\t\tbk.Env(\"DOCKER_BUILDKIT\", \"1\"),\n\t\t\tbk.Env(\"IMAGE\", localImage),\n\t\t\tbk.Env(\"VERSION\", version),\n\t\t\tbk.Cmd(\"yes | gcloud auth configure-docker\"),\n\t\t}\n\n\t\tif _, err := os.Stat(filepath.Join(\"docker-images\", app)); err == nil {\n\t\t\t// Building Docker image located under $REPO_ROOT/docker-images/\n\t\t\tcmds = append(cmds, bk.Cmd(filepath.Join(\"docker-images\", app, \"build.sh\")))\n\t\t} else {\n\t\t\t// Building Docker images located under $REPO_ROOT/cmd/\n\t\t\tcmdDir := func() string {\n\t\t\t\tif _, err := os.Stat(filepath.Join(\"enterprise/cmd\", app)); err != nil {\n\t\t\t\t\tfmt.Fprintf(os.Stderr, \"github.com/sourcegraph/sourcegraph/enterprise/cmd/%s does not exist so building github.com/sourcegraph/sourcegraph/cmd/%s instead\\n\", app, app)\n\t\t\t\t\treturn \"cmd/\" + app\n\t\t\t\t}\n\t\t\t\treturn \"enterprise/cmd/\" + app\n\t\t\t}()\n\t\t\tpreBuildScript := cmdDir + \"/pre-build.sh\"\n\t\t\tif _, err := os.Stat(preBuildScript); err == nil {\n\t\t\t\tcmds = append(cmds, bk.Cmd(preBuildScript))\n\t\t\t}\n\t\t\tcmds = append(cmds, bk.Cmd(cmdDir+\"/build.sh\"))\n\t\t}\n\n\t\tdevImage := fmt.Sprintf(\"%s/%s\", images.SourcegraphDockerDevRegistry, image)\n\t\tcmds = append(cmds,\n\t\t\t// Retag the local image for dev registry\n\t\t\tbk.Cmd(fmt.Sprintf(\"docker tag %s %s:%s\", localImage, devImage, tag)),\n\t\t\t// Publish tagged image\n\t\t\tbk.Cmd(fmt.Sprintf(\"docker push %s:%s\", devImage, tag)),\n\t\t)\n\n\t\tpipeline.AddStep(fmt.Sprintf(\":docker: :construction: %s\", app), cmds...)\n\t}\n}", "title": "" }, { "docid": "cde24a41a00e65aca495acf13843bc3d", "score": "0.56922823", "text": "func getDockerfile(w http.ResponseWriter, r *http.Request) {\n\tparts := []string{\n\t\t\"ARG BASE_IMAGE\",\n\t\t\"FROM $BASE_IMAGE\",\n\t\t\"ARG URL\",\n\t\t\"ARG EXECUTABLE_SUB_PATH\",\n\t\t\"ARG BINARY_NAME\",\n\t\t\"ADD ${URL}/clients/${EXECUTABLE_SUB_PATH} /\",\n\t\t\"RUN chmod 0777 /${BINARY_NAME}\",\n\t}\n\n\tgimlet.WriteText(w, strings.Join(parts, \"\\n\"))\n}", "title": "" }, { "docid": "487e1b937063e3c4b8eb92327e036307", "score": "0.5647035", "text": "func executeContainerTemplate(info *containerInfo, options entities.GenerateSystemdOptions) (string, error) {\n\tif options.RestartPolicy != nil {\n\t\tif err := validateRestartPolicy(*options.RestartPolicy); err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tinfo.RestartPolicy = *options.RestartPolicy\n\t}\n\n\t// Make sure the executable is set.\n\tif info.Executable == \"\" {\n\t\texecutable, err := os.Executable()\n\t\tif err != nil {\n\t\t\texecutable = \"/usr/bin/podman\"\n\t\t\tlogrus.Warnf(\"Could not obtain podman executable location, using default %s\", executable)\n\t\t}\n\t\tinfo.Executable = executable\n\t}\n\n\tinfo.Type = \"forking\"\n\tinfo.EnvVariable = define.EnvVariable\n\tinfo.ExecStart = \"{{{{.Executable}}}} start {{{{.ContainerNameOrID}}}}\"\n\tinfo.ExecStop = \"{{{{.Executable}}}} stop {{{{if (ge .StopTimeout 0)}}}}-t {{{{.StopTimeout}}}}{{{{end}}}} {{{{.ContainerNameOrID}}}}\"\n\tinfo.ExecStopPost = \"{{{{.Executable}}}} stop {{{{if (ge .StopTimeout 0)}}}}-t {{{{.StopTimeout}}}}{{{{end}}}} {{{{.ContainerNameOrID}}}}\"\n\n\t// Assemble the ExecStart command when creating a new container.\n\t//\n\t// Note that we cannot catch all corner cases here such that users\n\t// *must* manually check the generated files. A container might have\n\t// been created via a Python script, which would certainly yield an\n\t// invalid `info.CreateCommand`. Hence, we're doing a best effort unit\n\t// generation and don't try aiming at completeness.\n\tif options.New {\n\t\tinfo.Type = \"notify\"\n\t\tinfo.NotifyAccess = \"all\"\n\t\tinfo.PIDFile = \"\"\n\t\tinfo.ContainerIDFile = \"%t/%n.ctr-id\"\n\t\tinfo.ExecStartPre = \"/bin/rm -f {{{{.ContainerIDFile}}}}\"\n\t\tinfo.ExecStop = \"{{{{.Executable}}}} stop --ignore --cidfile={{{{.ContainerIDFile}}}}\"\n\t\tinfo.ExecStopPost = \"{{{{.Executable}}}} rm -f --ignore --cidfile={{{{.ContainerIDFile}}}}\"\n\t\t// The create command must at least have three arguments:\n\t\t// \t/usr/bin/podman run $IMAGE\n\t\tindex := 0\n\t\tfor i, arg := range info.CreateCommand {\n\t\t\tif arg == \"run\" || arg == \"create\" {\n\t\t\t\tindex = i + 1\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif index == 0 {\n\t\t\treturn \"\", errors.Errorf(\"container's create command is too short or invalid: %v\", info.CreateCommand)\n\t\t}\n\t\t// We're hard-coding the first five arguments and append the\n\t\t// CreateCommand with a stripped command and subcommand.\n\t\tstartCommand := []string{info.Executable}\n\t\tif index > 2 {\n\t\t\t// include root flags\n\t\t\tinfo.RootFlags = strings.Join(escapeSystemdArguments(info.CreateCommand[1:index-1]), \" \")\n\t\t\tstartCommand = append(startCommand, info.CreateCommand[1:index-1]...)\n\t\t}\n\t\tstartCommand = append(startCommand,\n\t\t\t\"run\",\n\t\t\t\"--cidfile={{{{.ContainerIDFile}}}}\",\n\t\t\t\"--cgroups=no-conmon\",\n\t\t\t\"--rm\",\n\t\t)\n\t\tremainingCmd := info.CreateCommand[index:]\n\n\t\t// Presence check for certain flags/options.\n\t\tfs := pflag.NewFlagSet(\"args\", pflag.ContinueOnError)\n\t\tfs.ParseErrorsWhitelist.UnknownFlags = true\n\t\tfs.Usage = func() {}\n\t\tfs.SetInterspersed(false)\n\t\tfs.BoolP(\"detach\", \"d\", false, \"\")\n\t\tfs.String(\"name\", \"\", \"\")\n\t\tfs.Bool(\"replace\", false, \"\")\n\t\tfs.StringArrayP(\"env\", \"e\", nil, \"\")\n\t\tfs.String(\"sdnotify\", \"\", \"\")\n\t\tfs.String(\"restart\", \"\", \"\")\n\t\tfs.Parse(remainingCmd)\n\n\t\tremainingCmd = filterCommonContainerFlags(remainingCmd, fs.NArg())\n\t\t// If the container is in a pod, make sure that the\n\t\t// --pod-id-file is set correctly.\n\t\tif info.Pod != nil {\n\t\t\tpodFlags := []string{\"--pod-id-file\", \"{{{{.Pod.PodIDFile}}}}\"}\n\t\t\tstartCommand = append(startCommand, podFlags...)\n\t\t\tremainingCmd = filterPodFlags(remainingCmd, fs.NArg())\n\t\t}\n\n\t\thasDetachParam, err := fs.GetBool(\"detach\")\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\thasNameParam := fs.Lookup(\"name\").Changed\n\t\thasReplaceParam, err := fs.GetBool(\"replace\")\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\n\t\t// Default to --sdnotify=conmon unless already set by the\n\t\t// container.\n\t\thasSdnotifyParam := fs.Lookup(\"sdnotify\").Changed\n\t\tif !hasSdnotifyParam {\n\t\t\tstartCommand = append(startCommand, \"--sdnotify=conmon\")\n\t\t}\n\n\t\tif !hasDetachParam {\n\t\t\t// Enforce detaching\n\t\t\t//\n\t\t\t// since we use systemd `Type=forking` service @see\n\t\t\t// https://www.freedesktop.org/software/systemd/man/systemd.service.html#Type=\n\t\t\t// when we generated systemd service file with the\n\t\t\t// --new param, `ExecStart` will have `/usr/bin/podman\n\t\t\t// run ...` if `info.CreateCommand` has no `-d` or\n\t\t\t// `--detach` param, podman will run the container in\n\t\t\t// default attached mode, as a result, `systemd start`\n\t\t\t// will wait the `podman run` command exit until failed\n\t\t\t// with timeout error.\n\t\t\tstartCommand = append(startCommand, \"-d\")\n\n\t\t\tif fs.Changed(\"detach\") {\n\t\t\t\t// this can only happen if --detach=false is set\n\t\t\t\t// in that case we need to remove it otherwise we\n\t\t\t\t// would overwrite the previous detach arg to false\n\t\t\t\tremainingCmd = removeDetachArg(remainingCmd, fs.NArg())\n\t\t\t}\n\t\t}\n\t\tif hasNameParam && !hasReplaceParam {\n\t\t\t// Enforce --replace for named containers. This will\n\t\t\t// make systemd units more robust as it allows them to\n\t\t\t// start after system crashes (see\n\t\t\t// github.com/containers/podman/issues/5485).\n\t\t\tstartCommand = append(startCommand, \"--replace\")\n\n\t\t\tif fs.Changed(\"replace\") {\n\t\t\t\t// this can only happen if --replace=false is set\n\t\t\t\t// in that case we need to remove it otherwise we\n\t\t\t\t// would overwrite the previous replace arg to false\n\t\t\t\tremainingCmd = removeReplaceArg(remainingCmd, fs.NArg())\n\t\t\t}\n\t\t}\n\n\t\t// Unless the user explicitly set a restart policy, check\n\t\t// whether the container was created with a custom one and use\n\t\t// it instead of the default.\n\t\tif options.RestartPolicy == nil {\n\t\t\trestartPolicy, err := fs.GetString(\"restart\")\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", err\n\t\t\t}\n\t\t\tif restartPolicy != \"\" {\n\t\t\t\tif strings.HasPrefix(restartPolicy, \"on-failure:\") {\n\t\t\t\t\t// Special case --restart=on-failure:5\n\t\t\t\t\tspl := strings.Split(restartPolicy, \":\")\n\t\t\t\t\trestartPolicy = spl[0]\n\t\t\t\t\tinfo.StartLimitBurst = spl[1]\n\t\t\t\t} else if restartPolicy == libpodDefine.RestartPolicyUnlessStopped {\n\t\t\t\t\trestartPolicy = libpodDefine.RestartPolicyAlways\n\t\t\t\t}\n\t\t\t\tinfo.RestartPolicy = restartPolicy\n\t\t\t}\n\t\t}\n\n\t\tenvs, err := fs.GetStringArray(\"env\")\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tfor _, env := range envs {\n\t\t\t// if env arg does not contain a equal sign we have to add the envar to the unit\n\t\t\t// because it does try to red the value from the environment\n\t\t\tif !strings.Contains(env, \"=\") {\n\t\t\t\tfor _, containerEnv := range info.containerEnv {\n\t\t\t\t\tsplit := strings.SplitN(containerEnv, \"=\", 2)\n\t\t\t\t\tif split[0] == env {\n\t\t\t\t\t\tinfo.ExtraEnvs = append(info.ExtraEnvs, escapeSystemdArg(containerEnv))\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tstartCommand = append(startCommand, remainingCmd...)\n\t\tstartCommand = escapeSystemdArguments(startCommand)\n\t\tinfo.ExecStart = strings.Join(startCommand, \" \")\n\t}\n\n\tinfo.TimeoutStopSec = minTimeoutStopSec + info.StopTimeout\n\n\tif info.PodmanVersion == \"\" {\n\t\tinfo.PodmanVersion = version.Version.String()\n\t}\n\n\tif options.NoHeader {\n\t\tinfo.GenerateNoHeader = true\n\t\tinfo.GenerateTimestamp = false\n\t}\n\n\tif info.GenerateTimestamp {\n\t\tinfo.TimeStamp = fmt.Sprintf(\"%v\", time.Now().Format(time.UnixDate))\n\t}\n\t// Sort the slices to assure a deterministic output.\n\tsort.Strings(info.BoundToServices)\n\n\t// Generate the template and compile it.\n\t//\n\t// Note that we need a two-step generation process to allow for fields\n\t// embedding other fields. This way we can replace `A -> B -> C` and\n\t// make the code easier to maintain at the cost of a slightly slower\n\t// generation. That's especially needed for embedding the PID and ID\n\t// files in other fields which will eventually get replaced in the 2nd\n\t// template execution.\n\ttempl, err := template.New(\"container_template\").Delims(\"{{{{\", \"}}}}\").Parse(containerTemplate)\n\tif err != nil {\n\t\treturn \"\", errors.Wrap(err, \"error parsing systemd service template\")\n\t}\n\n\tvar buf bytes.Buffer\n\tif err := templ.Execute(&buf, info); err != nil {\n\t\treturn \"\", err\n\t}\n\n\t// Now parse the generated template (i.e., buf) and execute it.\n\ttempl, err = template.New(\"container_template\").Delims(\"{{{{\", \"}}}}\").Parse(buf.String())\n\tif err != nil {\n\t\treturn \"\", errors.Wrap(err, \"error parsing systemd service template\")\n\t}\n\n\tbuf = bytes.Buffer{}\n\tif err := templ.Execute(&buf, info); err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn buf.String(), nil\n}", "title": "" }, { "docid": "46c33b797d6d6d63d829f9d849b47598", "score": "0.56163985", "text": "func generateBuildCommand(buildargs map[string]string, dockerfile string) string {\n\n\t// The build should be relative to where the Dockerfile is\n\tfilename := filepath.Base(dockerfile)\n\n\t// Start the command (use environment variable for name)\n\tcommand := \"docker build -f \" + filename\n\n\t// Add each buildarg\n\tfor key, value := range buildargs {\n\t\tcommand += \" --build-arg \" + key + \"=\" + value\n\t}\n\treturn command\n}", "title": "" }, { "docid": "d5254e8f9cf4bab21f531db904409800", "score": "0.5609245", "text": "func (c *BuildContext) Build() (err error) {\n\t// create tempdir to build in\n\ttmpDir, err := fs.TempDir(\"\", \"kind-base-image\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer os.RemoveAll(tmpDir)\n\n\t// populate with image sources\n\t// if SourceDir is unset then try to autodetect source dir\n\tbuildDir := tmpDir\n\tif c.sourceDir == \"\" {\n\t\tpkg, err := build.Default.Import(\"k8s.io/kubeadm/kinder\", build.Default.GOPATH, build.FindOnly)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to locate sources\")\n\t\t}\n\t\tc.sourceDir = filepath.Join(pkg.Dir, \"images\", \"base\", \"docker\")\n\t}\n\n\terr = fs.Copy(c.sourceDir, buildDir)\n\tif err != nil {\n\t\tlog.Errorf(\"failed to copy sources to build dir %v\", err)\n\t\treturn err\n\t}\n\n\tlog.Infof(\"Building base image in: %s\", buildDir)\n\n\t// build the entrypoint binary first\n\tif err := c.buildEntrypoint(buildDir); err != nil {\n\t\treturn err\n\t}\n\n\t// then the actual docker image\n\treturn c.buildImage(buildDir)\n}", "title": "" }, { "docid": "f02c43b35885e39957a5adc3481d9806", "score": "0.5583957", "text": "func createImage(t *testing.T, cli ce.ContainerInterface, files []struct{ Name, Body string }) string {\n\tr := bytes.NewReader(generateTAR(t, files))\n\ttag := strings.ToLower(t.Name())\n\n\ttmpDir, err := os.MkdirTemp(\"\", \"tmp\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tdefer os.RemoveAll(tmpDir)\n\n\t//Write files to temp directory\n\tfor _, file := range files {\n\t\t//Add tag to file name to allow parallel testing\n\t\tf, err := os.Create(filepath.Join(tmpDir, file.Name))\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\tdefer f.Close()\n\n\t\tbody := []byte(file.Body)\n\t\t_, err = f.Write(body)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\t_, err = cli.ImageBuild(r, tag, filepath.Join(tmpDir, files[0].Name))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\treturn tag\n}", "title": "" }, { "docid": "5fb0978bb552a1bcc2ec35767445a6f3", "score": "0.5578889", "text": "func (b *DefaultDocker) BuildImage(name string, context string, tags []string, buildArgs map[string]string, target string, cacheFrom string, file string, output bool) error {\n\targs := []string{\"build\"}\n\n\tif len(target) > 0 {\n\t\targs = append(args, fmt.Sprintf(\"--target=%s\", target))\n\t}\n\n\tfor _, tag := range tags {\n\t\targs = append(args, fmt.Sprintf(\"--tag=%s\", tag))\n\t}\n\n\tfor arg, value := range buildArgs {\n\t\targs = append(args, fmt.Sprintf(\"--build-arg=\\\"%s=%s\\\"\", arg, value))\n\t}\n\n\tif len(cacheFrom) > 0 {\n\t\targs = append(args, fmt.Sprintf(\"--cache-from=%s\", cacheFrom))\n\t}\n\n\tif len(file) > 0 {\n\t\targs = append(args, fmt.Sprintf(\"--file=%s\", file))\n\t}\n\n\tif len(context) == 0 {\n\t\targs = append(args, \".\")\n\t} else {\n\t\targs = append(args, context)\n\t}\n\n\tif output {\n\t\tfmt.Println(fmt.Sprintf(\"===> [%s] Docker Args: %s\", name, args))\n\t}\n\n\tcmd := exec.Command(\"docker\", args...)\n\n\tout, err := cmd.CombinedOutput()\n\n\tif output {\n\t\tlines := strings.Split(string(out), \"\\n\")\n\n\t\tfor _, line := range lines {\n\t\t\tfmt.Println(fmt.Sprintf(\"===> [%s] \", name) + line)\n\t\t}\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "8f9449d4c3b8e35075d2fea8b2faef1a", "score": "0.557659", "text": "func BuildImage() {\n\n\targs := []string{\"build\"}\n\targs = append(args, getProxyArgs(\"--build-arg\")...)\n\targs = append(args, \"-t\", \"meta_crosstools_bitbake\", \"-\")\n\n\tcmd := exec.Command(\"docker\", args...)\n\n\t// Connect dockerFile to stdin\n\tstdin, err := cmd.StdinPipe()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tgo func() {\n\t\tdefer stdin.Close()\n\t\tio.WriteString(stdin, dockerFile)\n\t}()\n\n\thandleCmdOutput(cmd, \"docker build\")\n}", "title": "" }, { "docid": "aa19d925a2d3dfd278b94cc72b124416", "score": "0.55633307", "text": "func ApplyTemplate(k kubectl.Command, asset string, data interface{}) error {\n\ttmpdir, _ := os.MkdirTemp(\"\", \"fleet-\")\n\tdefer os.RemoveAll(tmpdir)\n\n\toutput := path.Join(tmpdir, RandomFilename(asset, rand.New(rand.NewSource(ginkgo.GinkgoRandomSeed())))) // nolint:gosec // test code\n\tif err := Template(output, testenv.AssetPath(asset), data); err != nil {\n\t\treturn err\n\t}\n\tout, err := k.Apply(\"-f\", output)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"%w: %s\", err, out)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8f9d59a4e1ca58dc0afb9622d54031db", "score": "0.5546693", "text": "func (d *Dockerfile) Build() string {\n\tbuffer := bytes.NewBufferString(\"\")\n\tfor _, i := range d.Instructions {\n\t\tbuffer.WriteString(i.Command + \" \" + strings.Join(i.Arguments, \" \") + \"\\n\")\n\t}\n\treturn buffer.String()\n}", "title": "" }, { "docid": "1731311c584102b89f232cb3c1774561", "score": "0.55075943", "text": "func (f *Fissile) GenerateBaseDockerImage(targetPath, baseImage, metricsPath string, noBuild bool, repository string) error {\n\tif metricsPath != \"\" {\n\t\tstampy.Stamp(metricsPath, \"fissile\", \"create-role-base\", \"start\")\n\t\tdefer stampy.Stamp(metricsPath, \"fissile\", \"create-role-base\", \"done\")\n\t}\n\n\tdockerManager, err := docker.NewImageManager()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error connecting to docker: %s\", err.Error())\n\t}\n\n\tbaseImageName := builder.GetBaseImageName(repository, f.Version)\n\n\timage, err := dockerManager.FindImage(baseImageName)\n\tif err == docker.ErrImageNotFound {\n\t\tf.UI.Println(\"Image doesn't exist, it will be created ...\")\n\t} else if err != nil {\n\t\treturn fmt.Errorf(\"Error looking up image: %s\", err.Error())\n\t} else {\n\t\tf.UI.Println(color.GreenString(\n\t\t\t\"Base role image %s with ID %s already exists. Doing nothing.\",\n\t\t\tcolor.YellowString(baseImageName),\n\t\t\tcolor.YellowString(image.ID),\n\t\t))\n\t\treturn nil\n\t}\n\n\tif !strings.HasSuffix(targetPath, string(os.PathSeparator)) {\n\t\ttargetPath = fmt.Sprintf(\"%s%c\", targetPath, os.PathSeparator)\n\t}\n\n\tbaseImageBuilder := builder.NewBaseImageBuilder(baseImage)\n\n\tif noBuild {\n\t\tf.UI.Println(\"Skipping image build because of flag.\")\n\t\treturn nil\n\t}\n\n\tf.UI.Println(\"Building base docker image ...\")\n\tlog := new(bytes.Buffer)\n\tstdoutWriter := docker.NewFormattingWriter(\n\t\tlog,\n\t\tdocker.ColoredBuildStringFunc(baseImageName),\n\t)\n\n\ttarPopulator := baseImageBuilder.NewDockerPopulator()\n\terr = dockerManager.BuildImageFromCallback(baseImageName, stdoutWriter, tarPopulator)\n\tif err != nil {\n\t\tlog.WriteTo(f.UI)\n\t\treturn fmt.Errorf(\"Error building base image: %s\", err)\n\t}\n\tf.UI.Println(color.GreenString(\"Done.\"))\n\n\treturn nil\n}", "title": "" }, { "docid": "eff5b34d596a6366c033bc772531cab0", "score": "0.5496713", "text": "func buildDockerRun(ctx context.Context, fnc stack.Function, opts runOptions) (*exec.Cmd, error) {\n\targs := []string{\"run\", \"--rm\", \"-i\", fmt.Sprintf(\"-p=%d:8080\", opts.port)}\n\n\tif opts.network != \"\" {\n\t\targs = append(args, fmt.Sprintf(\"--network=%s\", opts.network))\n\t}\n\n\tfprocess, err := deriveFprocess(fnc)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor name, value := range fnc.Environment {\n\t\targs = append(args, fmt.Sprintf(\"-e=%s=%s\", name, value))\n\t}\n\n\tmoreEnv, err := readFiles(fnc.EnvironmentFile)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor name, value := range moreEnv {\n\t\targs = append(args, fmt.Sprintf(\"-e=%s=%s\", name, value))\n\t}\n\n\tfor name, value := range opts.extraEnv {\n\t\targs = append(args, fmt.Sprintf(\"-e=%s=%s\", name, value))\n\t}\n\n\tif fnc.ReadOnlyRootFilesystem {\n\t\targs = append(args, \"--read-only\")\n\t}\n\n\tif fnc.Limits != nil {\n\t\tif fnc.Limits.Memory != \"\" {\n\t\t\t// use a soft limit for debugging\n\t\t\targs = append(args, fmt.Sprintf(\"--memory-reservation=%s\", fnc.Limits.Memory))\n\t\t}\n\n\t\tif fnc.Limits.CPU != \"\" {\n\t\t\targs = append(args, fmt.Sprintf(\"--cpus=%s\", fnc.Limits.CPU))\n\t\t}\n\t}\n\n\tif len(fnc.Secrets) > 0 {\n\t\tsecretsPath, err := filepath.Abs(localSecretsDir)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"can't determine secrets folder: %w\", err)\n\t\t}\n\n\t\terr = os.MkdirAll(secretsPath, 0700)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"can't create local secrets folder %q: %w\", secretsPath, err)\n\t\t}\n\n\t\tif !opts.print {\n\t\t\terr = dirContainsFiles(secretsPath, fnc.Secrets...)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"missing files: %w\", err)\n\t\t\t}\n\t\t}\n\n\t\targs = append(args, fmt.Sprintf(\"--volume=%s:/var/openfaas/secrets\", secretsPath))\n\t}\n\n\targs = append(args, fmt.Sprintf(\"-e=fprocess=%s\", fprocess))\n\targs = append(args, fnc.Image)\n\n\tcmd := exec.CommandContext(ctx, \"docker\", args...)\n\n\treturn cmd, nil\n}", "title": "" }, { "docid": "069b9e3b28d3bd30fe05e6ca439240df", "score": "0.54868776", "text": "func generateBuildDescription(buildargs map[string]string, dockerfile string) string {\n\n\t// Assume for now the Dockerfile directory is an identifier\n\tdirname := filepath.Dir(dockerfile)\n\n\t// Start the command (use environment variable for name)\n\tdescription := dirname\n\n\t// Add each buildarg\n\tfor key, value := range buildargs {\n\t\tdescription += \" \" + key + \":\" + value\n\t}\n\treturn description\n}", "title": "" }, { "docid": "8063d592eb0a74e334116ae8d60ebe6e", "score": "0.54744035", "text": "func (m *DockerConfigBuild) Build(ctx context.Context, contextPath string) error {\n\targs := []string{\"build\"}\n\n\tif m.Pull {\n\t\targs = append(args, \"--pull\")\n\t}\n\n\tif m.Compress {\n\t\targs = append(args, \"--compress\")\n\t}\n\n\tif m.NoCache {\n\t\targs = append(args, \"--no-cache\")\n\t}\n\n\tif len(m.Target) > 0 {\n\t\targs = append(args, \"--target\", m.Target)\n\t}\n\n\tfor key, value := range m.BuildArgs {\n\t\targs = append(args, \"--build-arg\", fmt.Sprint(key, \"=\", value))\n\t}\n\n\tfor _, value := range m.CacheFrom {\n\t\targs = append(args, \"--cache-from\", value)\n\t}\n\n\tfor _, value := range m.Tag {\n\t\targs = append(args, \"-t\", value)\n\t}\n\n\tif len(m.ContextCopyConfigs) != 0 {\n\t\tnewContextPath, err := ioutil.TempDir(\"\", \"docker-context\")\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\n\t\tfor _, c := range m.ContextCopyConfigs {\n\t\t\t// We want any copy operation to be relative to our context destination directory\n\t\t\tc.Destination = filepath.Join(newContextPath, c.Destination)\n\n\t\t\terr = cp.Copy(c)\n\t\t\tif err != nil {\n\t\t\t\treturn trace.Wrap(err)\n\t\t\t}\n\t\t}\n\n\t\tif len(m.Dockerfile) > 0 {\n\t\t\tif filepath.IsAbs(m.Dockerfile) {\n\t\t\t\terr = cp.Copy(cp.Config{\n\t\t\t\t\tSource: m.Dockerfile,\n\t\t\t\t\tDestination: filepath.Join(newContextPath, \"Dockerfile\"),\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn trace.Wrap(err)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\terr = cp.Copy(cp.Config{\n\t\t\t\t\tSource: filepath.Join(contextPath, m.Dockerfile),\n\t\t\t\t\tDestination: filepath.Join(newContextPath, \"Dockerfile\"),\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn trace.Wrap(err)\n\t\t\t\t}\n\t\t\t}\n\n\t\t} else {\n\t\t\terr = cp.Copy(cp.Config{\n\t\t\t\tSource: filepath.Join(contextPath, \"Dockerfile\"),\n\t\t\t\tDestination: filepath.Join(newContextPath, \"Dockerfile\"),\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn trace.Wrap(err)\n\t\t\t}\n\t\t}\n\n\t\tcontextPath = newContextPath\n\t} else {\n\t\tif len(m.Dockerfile) > 0 {\n\t\t\targs = append(args, \"-f\", m.Dockerfile)\n\t\t}\n\t}\n\n\targs = append(args, contextPath)\n\n\t_, err := m.target.Exec().SetEnvs(m.Env).Run(ctx, \"docker\", args...)\n\n\treturn trace.Wrap(err)\n}", "title": "" }, { "docid": "2d553a79eba12231e5b4f398ab6ff1f6", "score": "0.5467838", "text": "func Generate(configPath string, flagAdvanced bool, flagRun bool, flagDetached bool, flagForce bool, flagWithInstructions bool, flagWithDockerfile bool) {\n\t// Clear screen if in interactive mode\n\tif configPath == \"\" {\n\t\tutil.ClearScreen()\n\t}\n\n\t// Load config file if available\n\tvar configFile model.GenerateConfig\n\tprojectName := \"Example Project\"\n\tif configPath != \"\" {\n\t\tif util.FileExists(configPath) {\n\t\t\tyamlFile, err1 := os.Open(configPath)\n\t\t\tcontent, err2 := ioutil.ReadAll(yamlFile)\n\t\t\tif err1 != nil {\n\t\t\t\tutil.Error(\"Could not load config file. Permissions granted?\", err1, true)\n\t\t\t}\n\t\t\tif err2 != nil {\n\t\t\t\tutil.Error(\"Could not load config file. Permissions granted?\", err2, true)\n\t\t\t}\n\t\t\t// Parse yaml\n\t\t\tyaml.Unmarshal(content, &configFile)\n\t\t\tprojectName = configFile.ProjectName\n\t\t} else {\n\t\t\tutil.Error(\"Config file could not be found\", nil, true)\n\t\t}\n\t} else {\n\t\t// Welcome Message\n\t\tutil.Heading(\"Welcome to Compose Generator! 👋\")\n\t\tutil.Pl(\"Please continue by answering a few questions:\")\n\t\tutil.Pel()\n\n\t\t// Ask for project name\n\t\tprojectName = util.TextQuestion(\"What is the name of your project:\")\n\t\tif projectName == \"\" {\n\t\t\tutil.Error(\"Error. You must specify a project name!\", nil, true)\n\t\t}\n\t}\n\n\t// Generate dynamic stack\n\tgenerateDynamicStack(configFile, projectName, flagAdvanced, flagForce, flagWithInstructions, flagWithDockerfile)\n\n\t// Run if the corresponding flag is set\n\tif flagRun || flagDetached {\n\t\tutil.DockerComposeUp(flagDetached)\n\t} else {\n\t\t// Print success message\n\t\tutil.Pel()\n\t\tutil.SuccessMessage(\"🎉 Done! You now can execute \\\"$ docker-compose up\\\" to launch your app! 🎉\")\n\t}\n}", "title": "" }, { "docid": "d4a00cc2ee636f1bc7b601503f99cb17", "score": "0.5459696", "text": "func (d *DockerBuilder) Build() error {\n\n\tvar err error\n\tctx := timing.NewContext(context.Background())\n\tdefer func() {\n\t\td.build.Status.Stages = timing.AppendStageAndStepInfo(d.build.Status.Stages, timing.GetStages(ctx))\n\t\tHandleBuildStatusUpdate(d.build, d.client, nil)\n\t}()\n\n\tif d.build.Spec.Source.Git == nil && d.build.Spec.Source.Binary == nil &&\n\t\td.build.Spec.Source.Dockerfile == nil && d.build.Spec.Source.Images == nil {\n\t\treturn fmt.Errorf(\"must provide a value for at least one of source, binary, images, or dockerfile\")\n\t}\n\tvar push bool\n\tpushTag := d.build.Status.OutputDockerImageReference\n\n\t// this is where the git-fetch container put the code during the clone operation\n\tbuildDir := d.inputDir\n\n\tglog.V(4).Infof(\"Starting Docker build from build config %s ...\", d.build.Name)\n\t// if there is no output target, set one up so the docker build logic\n\t// (which requires a tag) will still work, but we won't push it at the end.\n\tif d.build.Spec.Output.To == nil || len(d.build.Spec.Output.To.Name) == 0 {\n\t\td.build.Status.OutputDockerImageReference = d.build.Name\n\t} else {\n\t\tpush = true\n\t}\n\n\tbuildTag := randomBuildTag(d.build.Namespace, d.build.Name)\n\tdockerfilePath := getDockerfilePath(buildDir, d.build)\n\n\timageNames, multiStage, err := findReferencedImages(dockerfilePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif len(imageNames) == 0 {\n\t\treturn fmt.Errorf(\"no FROM image in Dockerfile\")\n\t}\n\tfor _, imageName := range imageNames {\n\t\tif imageName == \"scratch\" {\n\t\t\tglog.V(4).Infof(\"\\nSkipping image \\\"scratch\\\"\")\n\t\t\tcontinue\n\t\t}\n\t\timageExists := true\n\t\t_, err = d.dockerClient.InspectImage(imageName)\n\t\tif err != nil {\n\t\t\tif err != docker.ErrNoSuchImage {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\timageExists = false\n\t\t}\n\t\t// if forcePull or the image does not exist on the node we should pull the image first\n\t\tif d.build.Spec.Strategy.DockerStrategy.ForcePull || !imageExists {\n\t\t\tpullAuthConfig, _ := dockercfg.NewHelper().GetDockerAuth(\n\t\t\t\timageName,\n\t\t\t\tdockercfg.PullAuthType,\n\t\t\t)\n\t\t\tglog.V(0).Infof(\"\\nPulling image %s ...\", imageName)\n\t\t\tstartTime := metav1.Now()\n\t\t\terr = pullImage(d.dockerClient, imageName, pullAuthConfig)\n\n\t\t\ttiming.RecordNewStep(ctx, buildapiv1.StagePullImages, buildapiv1.StepPullBaseImage, startTime, metav1.Now())\n\n\t\t\tif err != nil {\n\t\t\t\td.build.Status.Phase = buildapiv1.BuildPhaseFailed\n\t\t\t\td.build.Status.Reason = buildapiv1.StatusReasonPullBuilderImageFailed\n\t\t\t\td.build.Status.Message = builderutil.StatusMessagePullBuilderImageFailed\n\t\t\t\tHandleBuildStatusUpdate(d.build, d.client, nil)\n\t\t\t\treturn fmt.Errorf(\"failed to pull image: %v\", err)\n\t\t\t}\n\n\t\t}\n\t}\n\n\tif s := d.build.Spec.Strategy.DockerStrategy; s != nil && multiStage {\n\t\tif s.ImageOptimizationPolicy == nil {\n\t\t\tpolicy := buildapiv1.ImageOptimizationSkipLayers\n\t\t\ts.ImageOptimizationPolicy = &policy\n\t\t\tglog.V(2).Infof(\"Detected multi-stage Dockerfile, image will be built with imageOptimizationPolicy set to SkipLayers\")\n\t\t}\n\t}\n\n\tstartTime := metav1.Now()\n\terr = d.dockerBuild(buildDir, buildTag)\n\n\ttiming.RecordNewStep(ctx, buildapiv1.StageBuild, buildapiv1.StepDockerBuild, startTime, metav1.Now())\n\n\tif err != nil {\n\t\td.build.Status.Phase = buildapiv1.BuildPhaseFailed\n\t\td.build.Status.Reason = buildapiv1.StatusReasonDockerBuildFailed\n\t\td.build.Status.Message = builderutil.StatusMessageDockerBuildFailed\n\t\tHandleBuildStatusUpdate(d.build, d.client, nil)\n\t\treturn err\n\t}\n\n\tcname := containerName(\"docker\", d.build.Name, d.build.Namespace, \"post-commit\")\n\n\terr = execPostCommitHook(ctx, d.dockerClient, d.build.Spec.PostCommit, buildTag, cname)\n\n\tif err != nil {\n\t\td.build.Status.Phase = buildapiv1.BuildPhaseFailed\n\t\td.build.Status.Reason = buildapiv1.StatusReasonPostCommitHookFailed\n\t\td.build.Status.Message = builderutil.StatusMessagePostCommitHookFailed\n\t\tHandleBuildStatusUpdate(d.build, d.client, nil)\n\t\treturn err\n\t}\n\n\tif push {\n\t\tif err := tagImage(d.dockerClient, buildTag, pushTag); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif err := removeImage(d.dockerClient, buildTag); err != nil {\n\t\tglog.V(0).Infof(\"warning: Failed to remove temporary build tag %v: %v\", buildTag, err)\n\t}\n\n\tif push {\n\t\t// Get the Docker push authentication\n\t\tpushAuthConfig, authPresent := dockercfg.NewHelper().GetDockerAuth(\n\t\t\tpushTag,\n\t\t\tdockercfg.PushAuthType,\n\t\t)\n\t\tif authPresent {\n\t\t\tglog.V(4).Infof(\"Authenticating Docker push with user %q\", pushAuthConfig.Username)\n\t\t}\n\t\tglog.V(0).Infof(\"\\nPushing image %s ...\", pushTag)\n\t\tstartTime = metav1.Now()\n\t\tdigest, err := pushImage(d.dockerClient, pushTag, pushAuthConfig)\n\n\t\ttiming.RecordNewStep(ctx, buildapiv1.StagePushImage, buildapiv1.StepPushDockerImage, startTime, metav1.Now())\n\n\t\tif err != nil {\n\t\t\td.build.Status.Phase = buildapiv1.BuildPhaseFailed\n\t\t\td.build.Status.Reason = buildapiv1.StatusReasonPushImageToRegistryFailed\n\t\t\td.build.Status.Message = builderutil.StatusMessagePushImageToRegistryFailed\n\t\t\tHandleBuildStatusUpdate(d.build, d.client, nil)\n\t\t\treturn reportPushFailure(err, authPresent, pushAuthConfig)\n\t\t}\n\n\t\tif len(digest) > 0 {\n\t\t\td.build.Status.Output.To = &buildapiv1.BuildStatusOutputTo{\n\t\t\t\tImageDigest: digest,\n\t\t\t}\n\t\t\tHandleBuildStatusUpdate(d.build, d.client, nil)\n\t\t}\n\t\tglog.V(0).Infof(\"Push successful\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "145b21360069797b57791fcbd1594e16", "score": "0.5451435", "text": "func (cmd *BuildCommand) Execute(args []string) error {\n // Set vars\n Logger.Info(\"Building... using information from dir:\", cmd.Options.Dir)\n\n // Check Config and Buildfiles\n configFile, buildFile, err := cmd.Controls.CheckConfigs(cmd.App, cmd.Options)\n if err != nil {\n return err\n }\n\n // Api interaction here\n dc := NewDockerApi(cmd.App.AppState.Meta, configFile, buildFile)\n dc.ShowInfo()\n\n // Generate Templates\n // TODO: fails here with filemode\n if err := dc.createTestTemplates(*cmd.Options); err != nil {\n return err\n }\n\n // Register channel so we can watch for events as they happen\n eventsChannel := make(ApiChannel)\n go watchForEventsOn(eventsChannel)\n dc.RegisterChannel(eventsChannel)\n\n fqImageName := buildFile.ImageName + \":\" + buildFile.Tag\n\n image, err := dc.CreateDockerImage(fqImageName, cmd.Options)\n if err != nil {\n return err\n }\n\n msg := \"Created Docker Image: \" + image\n Logger.Console(msg)\n return nil\n}", "title": "" }, { "docid": "e8a79cacc8496642b3145571a9cd9e20", "score": "0.5446183", "text": "func Build(pkg *pkgen.PackageGenerator, opts Options) (err error) {\n\t// prepare build configuration\n\terr = opts.fix(pkg)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif opts.closeDocker {\n\t\tdefer func() {\n\t\t\tcerr := opts.Docker.Close()\n\t\t\tif cerr != nil && err == nil {\n\t\t\t\terr = cerr\n\t\t\t}\n\t\t}()\n\t}\n\n\t// add scope-cancelled context\n\tvar cancel context.CancelFunc\n\topts.Ctx, cancel = context.WithCancel(opts.Ctx)\n\tdefer cancel()\n\n\t// create container\n\terr = opts.Log.Log(buildlog.Line{\n\t\tStream: buildlog.StreamBuild,\n\t\tText: \"Creating container. . .\",\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tcontainerCreate, err := opts.Docker.ContainerCreate(\n\t\topts.Ctx,\n\t\t&container.Config{\n\t\t\tImage: opts.DockerImage.Image,\n\t\t\tCmd: []string{\"/root/build/build.sh\"},\n\t\t},\n\t\tnil, nil, \"\",\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer func() {\n\t\t// remove container\n\t\trctx, cancel := context.WithTimeout(context.Background(), 20*time.Second)\n\t\tdefer cancel()\n\t\tcerr := opts.Docker.ContainerRemove(rctx, containerCreate.ID, types.ContainerRemoveOptions{\n\t\t\tForce: true,\n\t\t})\n\t\tif cerr != nil && err == nil {\n\t\t\terr = cerr\n\t\t}\n\t}()\n\n\t// prepare to create build inputs\n\terr = opts.Log.Log(buildlog.Line{\n\t\tStream: buildlog.StreamBuild,\n\t\tText: \"Preparing build payload. . .\",\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// stream content to docker\n\tpr, pw := io.Pipe()\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\tdefer wg.Wait()\n\tvar dcerr error\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tdefer pr.Close()\n\t\tdcerr = opts.Docker.CopyToContainer(\n\t\t\topts.Ctx,\n\t\t\tcontainerCreate.ID,\n\t\t\t\"/root/build/\",\n\t\t\tpr,\n\t\t\ttypes.CopyToContainerOptions{},\n\t\t)\n\t}()\n\tdefer pw.Close()\n\n\t// generate tar data\n\ttw := tar.NewWriter(pw)\n\n\t// write source\n\terr = tw.WriteHeader(&tar.Header{\n\t\tName: \"src\",\n\t\tMode: 0644 | int64(os.ModeDir),\n\t\tTypeflag: tar.TypeDir,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = pkg.WriteSourceTar(opts.Ctx, \"src\", tw, opts.Loader, 0)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// symlink Makefile into base dir\n\terr = tw.WriteHeader(&tar.Header{\n\t\tName: \"Makefile\",\n\t\tMode: 0644 | int64(os.ModeSymlink),\n\t\tTypeflag: tar.TypeSymlink,\n\t\tLinkname: \"src/Makefile\",\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// send dependencies\n\terr = tw.WriteHeader(&tar.Header{\n\t\tName: \"deps\",\n\t\tMode: 0644 | int64(os.ModeDir),\n\t\tTypeflag: tar.TypeDir,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tdlst := []string{}\n\tdeps, err := opts.Dependencies.FindDependencies(pkg.BuildDependencies...)\n\tif err != nil {\n\t\treturn err\n\t}\ndloop:\n\tfor _, v := range deps {\n\t\tfor _, p := range opts.DockerImage.Packages {\n\t\t\tif v == p {\n\t\t\t\tcontinue dloop\n\t\t\t}\n\t\t}\n\n\t\trc, l, err := opts.Packages.GetPkg(v, pkg.BuildArch)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tname := filepath.Join(\"deps\", v+\".tar.gz\")\n\n\t\terr = tw.WriteHeader(&tar.Header{\n\t\t\tName: name,\n\t\t\tMode: 0644,\n\t\t\tSize: l,\n\t\t})\n\t\tif err != nil {\n\t\t\trc.Close()\n\t\t\treturn err\n\t\t}\n\n\t\t_, err = io.Copy(tw, rc)\n\t\tcerr := rc.Close()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif cerr != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tdlst = append(dlst, name)\n\t}\n\tdtxt := []byte(strings.Join(dlst, \"\\n\"))\n\terr = tw.WriteHeader(&tar.Header{\n\t\tName: \"deps/deps.list\",\n\t\tMode: 0644,\n\t\tSize: int64(len(dtxt)),\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = tw.Write(dtxt)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// inject build script\n\terr = tw.WriteHeader(&tar.Header{\n\t\tName: \"build.sh\",\n\t\tMode: 0744,\n\t\tSize: int64(len(buildScript)),\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = tw.Write(buildScript)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// commit data to container filesystem\n\terr = tw.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = pw.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\twg.Wait()\n\tif dcerr != nil {\n\t\treturn dcerr\n\t}\n\n\t// start build\n\terr = opts.Log.Log(buildlog.Line{\n\t\tStream: buildlog.StreamBuild,\n\t\tText: \"Starting build. . .\",\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = opts.Docker.ContainerStart(opts.Ctx, containerCreate.ID, types.ContainerStartOptions{})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// log build\n\tlr, err := opts.Docker.ContainerLogs(opts.Ctx, containerCreate.ID, types.ContainerLogsOptions{\n\t\tShowStdout: true,\n\t\tShowStderr: true,\n\t\tFollow: true,\n\t})\n\tdefer func() {\n\t\tcerr := lr.Close()\n\t\tif cerr != nil && err == nil {\n\t\t\terr = cerr\n\t\t}\n\t}()\n\tif err != nil {\n\t\treturn err\n\t}\n\topts.Log = buildlog.MutexedLogHandler(opts.Log)\n\tsow := buildlog.LogWriter(opts.Log, buildlog.StreamStdout)\n\tsew := buildlog.LogWriter(opts.Log, buildlog.StreamStderr)\n\tdefer sow.Close()\n\tdefer sew.Close()\n\t_, err = stdcopy.StdCopy(sow, sew, lr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// check if completed ok\n\tinfo, err := opts.Docker.ContainerInspect(opts.Ctx, containerCreate.ID)\n\tif err != nil {\n\t\treturn err\n\t}\n\tstate := info.ContainerJSONBase.State\n\tif state.Running {\n\t\treturn errors.New(\"container still running\")\n\t}\n\tif state.ExitCode != 0 {\n\t\treturn fmt.Errorf(\"build failed with exit code %d\", state.ExitCode)\n\t}\n\n\terr = opts.Log.Log(buildlog.Line{\n\t\tStream: buildlog.StreamBuild,\n\t\tText: \"Transferring output. . .\",\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// read build output\n\tdrc, _, err := opts.Docker.CopyFromContainer(opts.Ctx, containerCreate.ID, \"/root/build/pkgs.tar\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer drc.Close()\n\totr := tar.NewReader(drc)\n\t_, err = otr.Next()\n\tif err != nil {\n\t\treturn err\n\t}\n\ttr := tar.NewReader(otr)\n\tfor {\n\t\thdr, err := tr.Next()\n\t\tif err != nil {\n\t\t\tif err == io.EOF {\n\t\t\t\terr = opts.Log.Log(buildlog.Line{\n\t\t\t\t\tStream: buildlog.StreamBuild,\n\t\t\t\t\tText: \"Build Complete!\",\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\t\tspl := strings.Split(filepath.Base(hdr.Name), \".\")\n\t\tif len(spl) < 2 {\n\t\t\treturn fmt.Errorf(\"found invalid output file %q\", hdr.Name)\n\t\t}\n\t\tpkname := spl[0]\n\t\terr = opts.Output.Store(pkname, pkg.BuildArch, tr)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n}", "title": "" }, { "docid": "820fe296a32a5b69a6f4a6ae1c45ddff", "score": "0.5439", "text": "func (p *Project) Build(name string) (string, error) {\n\t// check layout\n\tif !(slices.Contains(p.Layout, V3) || slices.Contains(p.Layout, V4alpha)) {\n\t\treturn \"\", fmt.Errorf(\"project layout %v is not supported\", p.Layout)\n\t}\n\n\tk, err := kustomize.ParseKustomization(filepath.Join(p.path, defaultKustomization))\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\t// create output folders\n\tpieces := strings.Split(name, \"/\")\n\tmoduleName := pieces[len(pieces)-1] // always return the last part of the path\n\tmanifestsPath := filepath.Join(p.path, OutputPath, moduleName)\n\n\tif err := os.MkdirAll(manifestsPath, os.ModePerm); err != nil {\n\t\treturn \"\", fmt.Errorf(\"could not create chart templates output dir: %w\", err)\n\t}\n\n\t// do build\n\tyml, err := kustomize.Build(k)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\trenderedManifestPath := filepath.Join(manifestsPath, \"rendered.yaml\")\n\tif err := os.WriteFile(renderedManifestPath, yml, os.ModePerm); err != nil {\n\t\treturn \"\", fmt.Errorf(\"could not write rendered kustomization as yml to %s: %w\", manifestsPath, err)\n\t}\n\n\treturn renderedManifestPath, nil\n}", "title": "" }, { "docid": "a214a89965b18e6b17e9d01cb8c42e19", "score": "0.542127", "text": "func (c *Container) Build(srvHash hash.Hash, path string) error {\n\texcludeFiles, err := build.ReadDockerignore(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ttr, err := archive.TarWithOptions(path, &archive.TarOptions{\n\t\tCompression: archive.Gzip,\n\t\tExcludePatterns: excludeFiles,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer tr.Close()\n\n\tif _, err := c.client.ImageBuild(context.Background(), tr, types.ImageBuildOptions{\n\t\tRemove: true,\n\t\tForceRemove: true,\n\t\tSuppressOutput: true,\n\t\tTags: []string{imageTag + srvHash.String()},\n\t}); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "2f2b0cb14bdddab3c7c053e300e4da42", "score": "0.5403823", "text": "func BuildDocker(binaryName string, buildMode string, projectOptions *ProjectOptions) error {\n\tvar packSpinner *spinner.Spinner\n\tif buildMode == BuildModeBridge {\n\t\treturn fmt.Errorf(\"you cant serve the application in cross-compilation\")\n\t}\n\n\t// Check build directory\n\tbuildDirectory := filepath.Join(fs.Cwd(), \"build\")\n\tif !fs.DirExists(buildDirectory) {\n\t\terr := fs.MkDir(buildDirectory)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tbuildCommand := slicer.String()\n\tuserid := 1000\n\tcurrentUser, _ := user.Current()\n\tif i, err := strconv.Atoi(currentUser.Uid); err == nil {\n\t\tuserid = i\n\t}\n\tfor _, arg := range []string{\n\t\t\"docker\",\n\t\t\"run\",\n\t\t\"--rm\",\n\t\t\"-v\", fmt.Sprintf(\"%s:/build\", filepath.Join(fs.Cwd(), \"build\")),\n\t\t\"-v\", fmt.Sprintf(\"%s:/source\", fs.Cwd()),\n\t\t\"-e\", fmt.Sprintf(\"LOCAL_USER_ID=%v\", userid),\n\t\t\"-e\", fmt.Sprintf(\"FLAG_TAGS=%s\", projectOptions.Tags),\n\t\t\"-e\", fmt.Sprintf(\"FLAG_LDFLAGS=%s\", ldFlags(projectOptions, buildMode)),\n\t\t\"-e\", \"FLAG_V=false\",\n\t\t\"-e\", \"FLAG_X=false\",\n\t\t\"-e\", \"FLAG_RACE=false\",\n\t\t\"-e\", \"FLAG_BUILDMODE=default\",\n\t\t\"-e\", \"FLAG_TRIMPATH=false\",\n\t\t\"-e\", fmt.Sprintf(\"TARGETS=%s/%s\", projectOptions.Platform, projectOptions.Architecture),\n\t\t\"-e\", \"GOPROXY=\",\n\t\t\"-e\", \"GO111MODULE=on\",\n\t} {\n\t\tbuildCommand.Add(arg)\n\t}\n\n\tif projectOptions.GoPath != \"\" {\n\t\tbuildCommand.Add(\"-v\")\n\t\tbuildCommand.Add(fmt.Sprintf(\"%s:/go\", projectOptions.GoPath))\n\t}\n\n\tbuildCommand.Add(fmt.Sprintf(\"wailsapp/xgo:%s\", xgoVersion))\n\tbuildCommand.Add(\".\")\n\n\tcompileMessage := fmt.Sprintf(\n\t\t\"Packing + Compiling project for %s/%s using docker image wailsapp/xgo:%s\",\n\t\tprojectOptions.Platform, projectOptions.Architecture, xgoVersion)\n\n\tif buildMode == BuildModeDebug {\n\t\tcompileMessage += \" (Debug Mode)\"\n\t}\n\n\tif !projectOptions.Verbose {\n\t\tpackSpinner = spinner.New(compileMessage + \"...\")\n\t\tpackSpinner.SetSpinSpeed(50)\n\t\tpackSpinner.Start()\n\t} else {\n\t\tprintln(compileMessage)\n\t}\n\n\terr := NewProgramHelper(projectOptions.Verbose).RunCommandArray(buildCommand.AsSlice())\n\tif err != nil {\n\t\tif packSpinner != nil {\n\t\t\tpackSpinner.Error()\n\t\t}\n\t\treturn err\n\t}\n\tif packSpinner != nil {\n\t\tpackSpinner.Success()\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "a6231d7feec05efc80dce8c7a749edc8", "score": "0.5402302", "text": "func preRun(c *cli.Context) (string, *funcfile, []string, error) {\n\twd := getWd()\n\t// if image name is passed in, it will run that image\n\tpath := c.Args().First() // TODO: should we ditch this?\n\tvar err error\n\tvar ff *funcfile\n\tvar fpath string\n\n\tif path != \"\" {\n\t\tfmt.Printf(\"Running function at: /%s\\n\", path)\n\t\tdir := filepath.Join(wd, path)\n\t\terr := os.Chdir(dir)\n\t\tif err != nil {\n\t\t\treturn \"\", nil, nil, err\n\t\t}\n\t\tdefer os.Chdir(wd) // todo: wrap this so we can log the error if changing back fails\n\t\twd = dir\n\t}\n\n\tfpath, ff, err = findAndParseFuncfile(wd)\n\tif err != nil {\n\t\treturn fpath, nil, nil, err\n\t}\n\n\t// check for valid input\n\tenvVars := c.StringSlice(\"env\")\n\t// Check expected env vars defined in func file\n\tfor _, expected := range ff.Expects.Config {\n\t\tn := expected.Name\n\t\te := getEnvValue(n, envVars)\n\t\tif e != \"\" {\n\t\t\tcontinue\n\t\t}\n\t\te = os.Getenv(n)\n\t\tif e != \"\" {\n\t\t\tenvVars = append(envVars, kvEq(n, e))\n\t\t\tcontinue\n\t\t}\n\t\tif expected.Required {\n\t\t\treturn \"\", ff, envVars, fmt.Errorf(\"required env var %s not found, please set either set it in your environment or pass in `-e %s=X` flag.\", n, n)\n\t\t}\n\t\tfmt.Fprintf(os.Stderr, \"info: optional env var %s not found.\\n\", n)\n\t}\n\t// get name from directory if it's not defined\n\tif ff.Name == \"\" {\n\t\tff.Name = filepath.Base(filepath.Dir(fpath)) // todo: should probably make a copy of ff before changing it\n\t}\n\n\t_, err = buildfunc(c, fpath, ff, c.Bool(\"no-cache\"))\n\tif err != nil {\n\t\treturn fpath, nil, nil, err\n\t}\n\treturn fpath, ff, envVars, nil\n}", "title": "" }, { "docid": "d0a7b371e32a383a79d1b0ac05766978", "score": "0.5400837", "text": "func (c *Container) build(files []string, ports []string, env []string) (string, error) {\n\tinputbuf, ierr := buildTar(files...)\n\tif ierr != nil {\n\t\treturn \"\", ierr\n\t}\n\n\toutputbuf := bytes.NewBuffer(nil)\n\topts := docker.BuildImageOptions{\n\t\tName: c.name,\n\t\tDockerfile: files[0],\n\t\tInputStream: inputbuf,\n\t\tOutputStream: outputbuf,\n\t}\n\tif err := c.dc.BuildImage(opts); err != nil {\n\t\treturn \"\", err\n\t}\n\toutput := string(outputbuf.Bytes())\n\tlines := strings.Split(output, \"\\n\")\n\timg := strings.Split(lines[len(lines)-2], \" \")[2]\n\n\texposedPorts := make(map[docker.Port]struct{})\n\tportBindings := make(map[docker.Port][]docker.PortBinding)\n\tfor _, port := range ports {\n\t\tp := docker.Port(port)\n\t\texposedPorts[p] = struct{}{}\n\t\tportBindings[p] = []docker.PortBinding{\n\t\t\t{\n\t\t\t\tHostIP: \"0.0.0.0\",\n\t\t\t\tHostPort: port,\n\t\t\t},\n\t\t}\n\t}\n\n\tcco := docker.CreateContainerOptions{\n\t\tName: c.name,\n\t\tConfig: &docker.Config{\n\t\t\tImage: img,\n\t\t\tExposedPorts: exposedPorts,\n\t\t\tEnv: env,\n\t\t\tAttachStderr: true,\n\t\t\tAttachStdout: true,\n\t\t},\n\t\tHostConfig: &docker.HostConfig{PortBindings: portBindings},\n\t}\n\tcon, cerr := c.dc.CreateContainer(cco)\n\tif cerr != nil {\n\t\treturn \"\", cerr\n\t}\n\treturn con.ID, nil\n}", "title": "" }, { "docid": "700e8e9e26aa606b7c0f53c4b417bc6f", "score": "0.53886807", "text": "func Generate(pkginfo *pkginfo.PkgInfo) {\n\tif pkginfo.Docker == \"\" {\n\t\tlog.Fatal(\"no docker container specified\")\n\t}\n\twriteSingleDockerScript(pkginfo, \".\", \"docker.sh\", dockerSh)\n}", "title": "" }, { "docid": "f0458b86421674a794bd1c482157e331", "score": "0.537704", "text": "func Build(configPath, projectPath, projectName string) []byte {\r\n\tprojectFile := path.Join(projectPath, projectName+\".csproj\")\r\n\tproject := loadProject(projectFile)\r\n\r\n\ttargetFramework := project.Properties[0].TargetFramework\r\n\truntimeVersion := targetFramework[len(targetFramework)-3:]\r\n\r\n\tcore.PrintBlue(\"target framework: \", targetFramework)\r\n\tcore.PrintBlue(\" runtime version: \", runtimeVersion)\r\n\r\n\tprintln()\r\n\tcolor.Magenta(\"DOTNET PUBLISH\")\r\n\r\n\tcmd := exec.Command(\"dotnet\", \"publish\", \"-c\", \"Release\")\r\n\tcmd.Dir = projectPath\r\n\r\n\tvar out bytes.Buffer\r\n\tcmd.Stdout = &out\r\n\terr := cmd.Run()\r\n\tif err != nil {\r\n\t\tfmt.Printf(\"%s\\n\", out.String())\r\n\t\tcore.PrintErrorAndPanic(err)\r\n\t}\r\n\r\n\tpublishPath := path.Join(projectPath, \"bin\", \"Release\", targetFramework, \"publish\")\r\n\tprintln()\r\n\tcore.PrintBlue(\" publish path: \", publishPath)\r\n\r\n\tassetsPath := path.Join(projectPath, \"assets\")\r\n\tif core.FileExistsAndDir(assetsPath) {\r\n\t\tcore.PrintBlue(\" assets path: \", assetsPath)\r\n\t\tif err = copyAssets(assetsPath, path.Join(publishPath, \"assets\")); err != nil {\r\n\t\t\tcore.PrintErrorAndPanic(err)\r\n\t\t}\r\n\t}\r\n\r\n\tprintln()\r\n\tcolor.Magenta(\"GENERATE TARBALL\")\r\n\r\n\tdockerfile, customDockerfileExist := loadCustomDockerfile(projectPath)\r\n\tif !customDockerfileExist {\r\n\t\tdockerfile = generateDockerfile(configPath, projectName, runtimeVersion)\r\n\t}\r\n\r\n\ttarBuffer, err := core.CreateTarball(publishPath, dockerfile)\r\n\tif err != nil {\r\n\t\tcore.PrintErrorAndPanic(err)\r\n\t}\r\n\treturn tarBuffer\r\n}", "title": "" }, { "docid": "3180071c95ef8627fa3959a5fce7846a", "score": "0.5373756", "text": "func resolveNameToImage(environment string) string {\n\n // trim off any .cue suffix which is used by ssh proxycommand mode\n environment = strings.TrimSuffix(environment, \".cue\")\n\n\tdockerfileLibrary := getHomeDir() + \"/src/cue/dockerfiles/\"\n\tenvironmentPath := dockerfileLibrary + \"/\" + environment\n\n\tif stat, err := os.Stat(environmentPath); err == nil && stat.IsDir() {\n\t\tlogInfo(\"resolveNameToImage: environment directory exists - using docker build\\n\")\n\t\tcmd := \"docker\"\n\n\t\tusername := getUsername()\n\n\t\ttagname := \"cue/\" + username + \"/\" + environment\n\t\targs := []string{\"build\", \"--quiet\", \"--tag\", tagname, environmentPath}\n\t\toutput, err := exec.Command(cmd, args...).CombinedOutput()\n\t\texitOnError(\"running Docker build\", 64, err)\n\n\t\tlogInfo(\"resolveNameToImage: successful output from docker build:\\n%s\\n\", output)\n\t\treturn strings.TrimSpace(string(output))\n\t} else {\n\t\tlogInfo(\"resolveNameToImage: environment directory does not exist - using name as raw docker image identifier\\n\")\n\t\treturn strings.TrimSpace(environment)\n\t}\n}", "title": "" }, { "docid": "29ee11fb5d0f7d566ec960c94aeddf13", "score": "0.53688204", "text": "func (d *DockerBuilder) dockerBuild(dir string, tag string) error {\n\tvar noCache bool\n\tvar forcePull bool\n\tvar buildArgs []docker.BuildArg\n\tdockerfilePath := defaultDockerfilePath\n\tif d.build.Spec.Strategy.DockerStrategy != nil {\n\t\tif d.build.Spec.Source.ContextDir != \"\" {\n\t\t\tdir = filepath.Join(dir, d.build.Spec.Source.ContextDir)\n\t\t}\n\t\tif d.build.Spec.Strategy.DockerStrategy.DockerfilePath != \"\" {\n\t\t\tdockerfilePath = d.build.Spec.Strategy.DockerStrategy.DockerfilePath\n\t\t}\n\t\tfor _, ba := range d.build.Spec.Strategy.DockerStrategy.BuildArgs {\n\t\t\tbuildArgs = append(buildArgs, docker.BuildArg{Name: ba.Name, Value: ba.Value})\n\t\t}\n\t\tnoCache = d.build.Spec.Strategy.DockerStrategy.NoCache\n\t\tforcePull = d.build.Spec.Strategy.DockerStrategy.ForcePull\n\t}\n\tauth, err := d.setupPullSecret()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := d.copySecrets(d.build.Spec.Source.Secrets, dir); err != nil {\n\t\treturn err\n\t}\n\tif err = d.copyConfigMaps(d.build.Spec.Source.ConfigMaps, dir); err != nil {\n\t\treturn err\n\t}\n\n\topts := docker.BuildImageOptions{\n\t\tName: tag,\n\t\tRmTmpContainer: true,\n\t\tForceRmTmpContainer: true,\n\t\tOutputStream: os.Stdout,\n\t\tDockerfile: dockerfilePath,\n\t\tNoCache: noCache,\n\t\tPull: forcePull,\n\t\tBuildArgs: buildArgs,\n\t}\n\tnetwork, resolvConfHostPath, err := getContainerNetworkConfig()\n\tif err != nil {\n\t\treturn err\n\t}\n\topts.NetworkMode = network\n\tif len(resolvConfHostPath) != 0 {\n\t\tcmd := exec.Command(\"chcon\", \"system_u:object_r:svirt_sandbox_file_t:s0\", \"/etc/resolv.conf\")\n\t\terr := cmd.Run()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unable to set permissions on /etc/resolv.conf: %v\", err)\n\t\t}\n\t\topts.BuildBinds = fmt.Sprintf(\"[\\\"%s:/etc/resolv.conf\\\"]\", resolvConfHostPath)\n\t}\n\t// Though we are capped on memory and cpu at the cgroup parent level,\n\t// some build containers care what their memory limit is so they can\n\t// adapt, thus we need to set the memory limit at the container level\n\t// too, so that information is available to them.\n\tif d.cgLimits != nil {\n\t\topts.Memory = d.cgLimits.MemoryLimitBytes\n\t\topts.Memswap = d.cgLimits.MemorySwap\n\t\topts.CgroupParent = d.cgLimits.Parent\n\t}\n\n\tif auth != nil {\n\t\topts.AuthConfigs = *auth\n\t}\n\n\tif s := d.build.Spec.Strategy.DockerStrategy; s != nil {\n\t\tif policy := s.ImageOptimizationPolicy; policy != nil {\n\t\t\tswitch *policy {\n\t\t\tcase buildapiv1.ImageOptimizationSkipLayers:\n\t\t\t\treturn buildDirectImage(dir, false, &opts)\n\t\t\tcase buildapiv1.ImageOptimizationSkipLayersAndWarn:\n\t\t\t\treturn buildDirectImage(dir, true, &opts)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn buildImage(d.dockerClient, dir, d.tar, &opts)\n}", "title": "" }, { "docid": "74ae00800ba8a25f6a7d2b01063938a9", "score": "0.53663325", "text": "func (b *Builder) BuildImage(contextPath, dockerfilePath string, options *types.ImageBuildOptions) error {\n\tif options == nil {\n\t\toptions = &types.ImageBuildOptions{}\n\t}\n\n\tctx := context.Background()\n\toutStream := command.NewOutStream(stdout)\n\tcontextDir, relDockerfile, err := build.GetContextFromLocalDir(contextPath, dockerfilePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\texcludes, err := build.ReadDockerignore(contextDir)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := build.ValidateContextDirectory(contextDir, excludes); err != nil {\n\t\treturn errors.Errorf(\"Error checking context: '%s'\", err)\n\t}\n\n\t// And canonicalize dockerfile name to a platform-independent one\n\tauthConfigs, _ := getAllAuthConfigs()\n\trelDockerfile, err = archive.CanonicalTarNameForPath(relDockerfile)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\texcludes = build.TrimBuildFilesFromExcludes(excludes, relDockerfile, false)\n\tbuildCtx, err := archive.TarWithOptions(contextDir, &archive.TarOptions{\n\t\tExcludePatterns: excludes,\n\t\tChownOpts: &idtools.IDPair{UID: 0, GID: 0},\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Setup an upload progress bar\n\tprogressOutput := streamformatter.NewProgressOutput(outStream)\n\tbody := progress.NewProgressReader(buildCtx, progressOutput, 0, \"\", \"Sending build context to Docker daemon\")\n\tresponse, err := b.client.ImageBuild(ctx, body, types.ImageBuildOptions{\n\t\tTags: []string{b.imageURL},\n\t\tDockerfile: relDockerfile,\n\t\tBuildArgs: options.BuildArgs,\n\t\tTarget: options.Target,\n\t\tNetworkMode: options.NetworkMode,\n\t\tAuthConfigs: authConfigs,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer response.Body.Close()\n\n\terr = jsonmessage.DisplayJSONMessagesStream(response.Body, outStream, outStream.FD(), outStream.IsTerminal(), nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f5a23e0dff3cd2bd47afb300481d605d", "score": "0.536048", "text": "func (d *DaemonBuilder) Build(ctx context.Context, imageName string) error {\n\tbuildCtx, err := CreateTar(d.ContextDirectory, d.Dockerfile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer buildCtx.Close()\n\tif d.LoggerFactory == nil {\n\t\td.LoggerFactory = &logger.NullLogger{}\n\t}\n\n\tl := d.LoggerFactory.CreateBuildLogger(imageName)\n\n\tprogBuff := &logger.Wrapper{\n\t\tErr: false,\n\t\tLogger: l,\n\t}\n\n\tbuildBuff := &logger.Wrapper{\n\t\tErr: false,\n\t\tLogger: l,\n\t}\n\n\terrBuff := &logger.Wrapper{\n\t\tErr: true,\n\t\tLogger: l,\n\t}\n\n\t// Setup an upload progress bar\n\tprogressOutput := streamformatter.NewProgressOutput(progBuff)\n\n\tvar body io.Reader = progress.NewProgressReader(buildCtx, progressOutput, 0, \"\", \"Sending build context to Docker daemon\")\n\n\tlogrus.Infof(\"Building %s...\", imageName)\n\n\toutFd, isTerminalOut := term.GetFdInfo(os.Stdout)\n\tw := l.OutWriter()\n\tif w != nil {\n\t\toutFd, isTerminalOut = term.GetFdInfo(w)\n\t}\n\n\t// Convert map[string]*string to map[string]string\n\tlabels := make(map[string]string)\n\tfor lk, lv := range d.Labels {\n\t\tlabels[lk] = *lv\n\t}\n\n\tresponse, err := d.Client.ImageBuild(ctx, body, types.ImageBuildOptions{\n\t\tTags: []string{imageName},\n\t\tNoCache: d.NoCache,\n\t\tRemove: true,\n\t\tForceRemove: d.ForceRemove,\n\t\tPullParent: d.Pull,\n\t\tDockerfile: d.Dockerfile,\n\t\tAuthConfigs: d.AuthConfigs,\n\t\tBuildArgs: d.BuildArgs,\n\t\tCacheFrom: d.CacheFrom,\n\t\tLabels: labels,\n\t\tNetworkMode: d.Network,\n\t\tTarget: d.Target,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = jsonmessage.DisplayJSONMessagesStream(response.Body, buildBuff, outFd, isTerminalOut, nil)\n\tif err != nil {\n\t\tif jerr, ok := err.(*jsonmessage.JSONError); ok {\n\t\t\t// If no error code is set, default to 1\n\t\t\tif jerr.Code == 0 {\n\t\t\t\tjerr.Code = 1\n\t\t\t}\n\t\t\terrBuff.Write([]byte(jerr.Error()))\n\t\t\treturn fmt.Errorf(\"Status: %s, Code: %d\", jerr.Message, jerr.Code)\n\t\t}\n\t}\n\treturn err\n}", "title": "" }, { "docid": "4326bb63cc05cb75814d89b8d974d03c", "score": "0.5337361", "text": "func (c *FunctionController) ensureK8sResources(funcObj *kubelessApi.Function) error {\n\tif len(funcObj.ObjectMeta.Labels) == 0 {\n\t\tfuncObj.ObjectMeta.Labels = make(map[string]string)\n\t}\n\tfuncObj.ObjectMeta.Labels[\"function\"] = funcObj.ObjectMeta.Name\n\n\tdeployment := v1beta1.Deployment{}\n\tif deploymentConfigData, ok := c.config.Data[\"deployment\"]; ok {\n\t\terr := yaml.Unmarshal([]byte(deploymentConfigData), &deployment)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"Error parsing Deployment data in ConfigMap kubeless-function-deployment-config: %v\", err)\n\t\t\treturn err\n\t\t}\n\t\terr = utils.MergeDeployments(&funcObj.Spec.Deployment, &deployment)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\" Error while merging function.Spec.Deployment and Deployment from ConfigMap: %v\", err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\tor, err := utils.GetOwnerReference(funcKind, funcAPIVersion, funcObj.Name, funcObj.UID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = utils.EnsureFuncConfigMap(c.clientset, funcObj, or, c.langRuntime)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = utils.EnsureFuncService(c.clientset, funcObj, or)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tprebuiltImage := \"\"\n\tif len(funcObj.Spec.Deployment.Spec.Template.Spec.Containers) > 0 && funcObj.Spec.Deployment.Spec.Template.Spec.Containers[0].Image != \"\" {\n\t\tprebuiltImage = funcObj.Spec.Deployment.Spec.Template.Spec.Containers[0].Image\n\t}\n\t// Skip image build step if using a custom runtime\n\tif prebuiltImage == \"\" {\n\t\tif c.config.Data[\"enable-build-step\"] == \"true\" {\n\t\t\tvar isBuilding bool\n\t\t\tprebuiltImage, isBuilding, err = c.startImageBuildJob(funcObj, or)\n\t\t\tif err != nil {\n\t\t\t\tlogrus.Errorf(\"Unable to build function: %v\", err)\n\t\t\t} else {\n\t\t\t\tif isBuilding {\n\t\t\t\t\tlogrus.Infof(\"Started build process for function %s\", funcObj.ObjectMeta.Name)\n\t\t\t\t} else {\n\t\t\t\t\tlogrus.Infof(\"Found existing image %s\", prebuiltImage)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else {\n\t\tlogrus.Infof(\"Skipping image-build step for %s\", funcObj.ObjectMeta.Name)\n\t}\n\n\terr = utils.EnsureFuncDeployment(c.clientset, funcObj, or, c.langRuntime, prebuiltImage, c.config.Data[\"provision-image\"], c.imagePullSecrets)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif funcObj.Spec.HorizontalPodAutoscaler.Name != \"\" && funcObj.Spec.HorizontalPodAutoscaler.Spec.ScaleTargetRef.Name != \"\" {\n\t\tfuncObj.Spec.HorizontalPodAutoscaler.OwnerReferences = or\n\t\tif funcObj.Spec.HorizontalPodAutoscaler.Spec.Metrics[0].Type == v2beta1.ObjectMetricSourceType {\n\t\t\t// A service monitor is needed when the metric is an object\n\t\t\terr = utils.CreateServiceMonitor(*c.smclient, funcObj, funcObj.ObjectMeta.Namespace, or)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\terr = utils.CreateAutoscale(c.clientset, funcObj.Spec.HorizontalPodAutoscaler)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\t// HorizontalPodAutoscaler doesn't exists, try to delete if it already existed\n\t\terr = c.deleteAutoscale(funcObj.ObjectMeta.Namespace, funcObj.ObjectMeta.Name)\n\t\tif err != nil && !k8sErrors.IsNotFound(err) {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "f1605f55d4ca6ea3902b5bee3f48931f", "score": "0.5336982", "text": "func (t *Templates) GetDockerfileTemplate() ([]byte, error) {\n\ttmpl := fmt.Sprintf(\"templates/create/template.%s.Dockerfile\", t.GetBuildDriver())\n\treturn t.fs.ReadFile(tmpl)\n}", "title": "" }, { "docid": "7692b3ccf0f6134e2836603e2b43883c", "score": "0.5334955", "text": "func (bc *OktetoBuilder) Build(ctx context.Context, options *types.BuildOptions) error {\n\tif utils.LoadBoolean(constants.OktetoDeployRemote) {\n\t\t// Since the local build has already been built,\n\t\t// we have the environment variables set and we can skip this code\n\t\treturn nil\n\t}\n\tif options.File != \"\" {\n\t\tworkdir := model.GetWorkdirFromManifestPath(options.File)\n\t\tif err := os.Chdir(workdir); err != nil {\n\t\t\treturn err\n\t\t}\n\t\toptions.File = model.GetManifestPathFromWorkdir(options.File, workdir)\n\t}\n\tif options.Manifest.Name == \"\" {\n\t\twd, err := os.Getwd()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tc, _, err := okteto.NewK8sClientProvider().Provide(okteto.Context().Cfg)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tinferer := devenvironment.NewNameInferer(c)\n\t\toptions.Manifest.Name = inferer.InferName(ctx, wd, okteto.Context().Namespace, options.File)\n\t}\n\ttoBuildSvcs := getToBuildSvcs(options.Manifest, options)\n\tif err := validateOptions(options.Manifest, toBuildSvcs, options); err != nil {\n\t\tif errors.Is(err, oktetoErrors.ErrNoServicesToBuildDefined) {\n\t\t\toktetoLog.Infof(\"skipping BuildV2 due to not having any svc to build\")\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\n\tbuildManifest := options.Manifest.Build\n\n\toktetoLog.Infof(\"Images to build: [%s]\", strings.Join(toBuildSvcs, \", \"))\n\tfor len(bc.builtImages) != len(toBuildSvcs) {\n\t\tfor _, svcToBuild := range toBuildSvcs {\n\t\t\tif bc.builtImages[svcToBuild] {\n\t\t\t\toktetoLog.Infof(\"skipping image '%s' due to being already built\")\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif !bc.areImagesBuilt(buildManifest[svcToBuild].DependsOn) {\n\t\t\t\toktetoLog.Infof(\"image '%s' can't be deployed because at least one of its dependent images(%s) are not built\", svcToBuild, strings.Join(buildManifest[svcToBuild].DependsOn, \", \"))\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif options.EnableStages {\n\t\t\t\toktetoLog.SetStage(fmt.Sprintf(\"Building service %s\", svcToBuild))\n\t\t\t}\n\n\t\t\tbuildSvcInfo := buildManifest[svcToBuild]\n\n\t\t\t// We only check that the image is built in the global registry if the noCache option is not set\n\t\t\tif !options.NoCache && bc.Config.IsCleanProject() {\n\t\t\t\timageChecker := getImageChecker(buildSvcInfo, bc.Config, bc.Registry)\n\t\t\t\tif imageWithDigest, isBuilt := imageChecker.checkIfCommitHashIsBuilt(options.Manifest.Name, svcToBuild, buildSvcInfo); isBuilt {\n\t\t\t\t\toktetoLog.Information(\"Skipping build of '%s' image because it's already built for commit %s\", svcToBuild, bc.Config.GetGitCommit())\n\t\t\t\t\t// if the built image belongs to global registry we clone it to the dev registry\n\t\t\t\t\t// so that in can be used in dev containers (i.e. okteto up)\n\t\t\t\t\tif bc.Registry.IsGlobalRegistry(imageWithDigest) {\n\t\t\t\t\t\toktetoLog.Debugf(\"Copying image '%s' from global to personal registry\", svcToBuild)\n\t\t\t\t\t\ttag := bc.Config.GetBuildHash(buildSvcInfo)\n\t\t\t\t\t\tdevImage, err := bc.Registry.CloneGlobalImageToDev(imageWithDigest, tag)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t}\n\t\t\t\t\t\timageWithDigest = devImage\n\t\t\t\t\t}\n\n\t\t\t\t\tbc.SetServiceEnvVars(svcToBuild, imageWithDigest)\n\t\t\t\t\tbc.builtImages[svcToBuild] = true\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif !okteto.Context().IsOkteto && buildSvcInfo.Image == \"\" {\n\t\t\t\treturn fmt.Errorf(\"'build.%s.image' is required if your cluster doesn't have Okteto installed\", svcToBuild)\n\t\t\t}\n\n\t\t\timageTag, err := bc.buildService(ctx, options.Manifest, svcToBuild, options)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"error building service '%s': %w\", svcToBuild, err)\n\t\t\t}\n\t\t\tbc.SetServiceEnvVars(svcToBuild, imageTag)\n\t\t\tbc.builtImages[svcToBuild] = true\n\t\t}\n\t}\n\tif options.EnableStages {\n\t\toktetoLog.SetStage(\"\")\n\t}\n\treturn options.Manifest.ExpandEnvVars()\n}", "title": "" }, { "docid": "c5a91ee70e3173b314ca62e4c0b895e7", "score": "0.5331954", "text": "func (d *ImageManager) BuildImage(dockerfileDirPath, name string, stdoutWriter io.WriteCloser) error {\n\n\tbio := dockerclient.BuildImageOptions{\n\t\tName: name,\n\t\tNoCache: true,\n\t\tContextDir: filepath.Dir(dockerfileDirPath),\n\t\tOutputStream: stdoutWriter,\n\t}\n\n\tfor _, envVar := range []string{\"http_proxy\", \"https_proxy\", \"no_proxy\"} {\n\t\tfor _, name := range []string{strings.ToLower(envVar), strings.ToUpper(envVar)} {\n\t\t\tif val, ok := os.LookupEnv(name); ok {\n\t\t\t\tbio.BuildArgs = append(bio.BuildArgs, dockerclient.BuildArg{\n\t\t\t\t\tName: name,\n\t\t\t\t\tValue: val,\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n\n\tif stdoutWriter != nil {\n\t\tdefer func() {\n\t\t\tstdoutWriter.Close()\n\t\t}()\n\t}\n\n\tif err := d.client.BuildImage(bio); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "3e67d2a92efcadac3320071a39a775c5", "score": "0.5327137", "text": "func (cmd BuildFlags) Build(contextDir string) error {\n\tlog.Infof(\"Starting Makisu build (version=%s)\", utils.BuildHash)\n\n\timageName, err := cmd.getTargetImageName()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to get target image name: %s\", err)\n\t}\n\n\t// Convert context dir to absolute path.\n\tcontextDir, err = filepath.Abs(contextDir)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to resolve context dir: %s\", err)\n\t}\n\n\t// Create BuildContext.\n\tbuildContext, err := context.NewBuildContext(\"/\", contextDir, cmd.imageStore)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create initial build context: %s\", err)\n\t}\n\tdefer buildContext.Cleanup()\n\n\t// Optionally remove everything before and after build.\n\tif cmd.AllowModifyFS {\n\t\tbuildContext.MemFS.Remove()\n\t\tdefer buildContext.MemFS.Remove()\n\t}\n\n\t// Create and execute build plan.\n\tbuildPlan, err := cmd.createBuildPlan(buildContext, imageName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create build plan: %s\", err)\n\t}\n\tif _, err = buildPlan.Execute(); err != nil {\n\t\treturn fmt.Errorf(\"failed to execute build plan: %s\", err)\n\t}\n\tlog.Infof(\"Successfully built image %s\", imageName.ShortName())\n\n\t// Push image to registries that were specified in the --push flag.\n\tfor _, registry := range cmd.GetTargetRegistries() {\n\t\ttarget := imageName.WithRegistry(registry)\n\t\tif err := cmd.pushImage(target); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to push image: %s\", err)\n\t\t}\n\t}\n\n\t// Optionally save image as a tar file.\n\tif cmd.Destination != \"\" {\n\t\tif err := cmd.saveImage(imageName); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to save image: %s\", err)\n\t\t}\n\t}\n\n\t// Optionally load image to local docker daemon.\n\tif cmd.DoLoad {\n\t\tif err := cmd.loadImage(imageName); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to load image: %s\", err)\n\t\t}\n\t}\n\n\tlog.Infof(\"Finished building %s\", imageName.ShortName())\n\treturn nil\n}", "title": "" }, { "docid": "fcaa7f60af5a4216c5633d71d357e37f", "score": "0.5325162", "text": "func (s *K8sService) CreateBuildFile(bf *saasclient.BuildFile) (*corev1.ConfigMap, error) {\n\tname := strings.ToLower(bf.Name)\n\tlabels := labelsForDevEnv(name)\n\tcm := &corev1.ConfigMap{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: name,\n\t\t\tLabels: labels,\n\t\t},\n\t\tData: map[string]string{\"Template\": bf.Value},\n\t}\n\treturn s.k8sClientSet.CoreV1().ConfigMaps(cndeNamespace).Create(cm)\n}", "title": "" }, { "docid": "d0c8bc8e87043b9e1845825938a7a65d", "score": "0.5312941", "text": "func (data *GolangSourceCodeBindingTemplate) GenerateDockerFiles() error {\r\n\tfile, err := os.OpenFile(\"Dockerfile\", os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\ttmpl := template.New(\"DockerFile\")\r\n\ttmpl, err = tmpl.Parse(GOLANGSOURCECODE.dockerFile)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\terr = tmpl.Execute(file, data)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\tfile.Close()\r\n\r\n\tfile, err = os.OpenFile(\".dockerignore\", os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\ttmpl2 := template.New(\".dockerignore\")\r\n\ttmpl2, err = tmpl2.Parse(GOLANGSOURCECODE.dockerignoreFile)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\terr = tmpl2.Execute(file, nil)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\tfile.Close()\r\n\treturn nil\r\n}", "title": "" }, { "docid": "63b224fb62bb5fa8590906fd4f785bfb", "score": "0.53090507", "text": "func (image *Image) Build() error {\n\t// Copy base image and delete after build.\n\tcopy := exec.Command(\"cp\", \"-Ri\", ImageDir+image.BaseImagePath, image.ID)\n\tif err := copy.Run(); err != nil {\n\t\treturn err\n\t}\n\tdefer func() {\n\t\texec.Command(\"rm\", \"-rf\", image.ID).Run()\n\t}()\n\n\t// Write the source files to the new Image\n\tsource := image.Input.Source\n\tsourcefile, err := os.Create(image.ID + \"/source\" + image.Ext)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsourcefile.WriteString(source)\n\tsourcefile.Close()\n\n\t// Write the inputs to the new Image.\n\tfor i, v := range image.Input.Stdin {\n\t\tfilename := fmt.Sprintf(\"/%d.in\", i+1)\n\t\tinputfile, err := os.Create(image.ID + filename)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tinputfile.WriteString(v + \"\\n\")\n\t\tinputfile.Close()\n\t}\n\n\tbuild := exec.Command(\"docker\", \"build\", \"-t\", image.ID, image.ID)\n\tif err = build.Run(); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "14908f1b578fb5f3c88b33ee8cc7f434", "score": "0.53065497", "text": "func (data *GolangExecutableBindingTemplate) GenerateDockerFiles() error {\r\n\tfile, err := os.OpenFile(\"Dockerfile\", os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\ttmpl := template.New(\"DockerFile\")\r\n\ttmpl, err = tmpl.Parse(GOLANGEXECUTABLE.dockerFile)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\terr = tmpl.Execute(file, data)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\tfile.Close()\r\n\r\n\tfile, err = os.OpenFile(\".dockerignore\", os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\ttmpl2 := template.New(\".dockerignore\")\r\n\ttmpl2, err = tmpl2.Parse(GOLANGEXECUTABLE.dockerignoreFile)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\terr = tmpl2.Execute(file, nil)\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\tfile.Close()\r\n\treturn nil\r\n}", "title": "" }, { "docid": "8a594744e26708bbeed35f73ffc7ace2", "score": "0.53051907", "text": "func (b GolangCrossBuilder) Build() error {\n\tfmt.Printf(\">> %v: Building for %v\\n\", b.Target, b.Platform)\n\n\trepoInfo, err := GetProjectRepoInfo()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to determine repo root and package sub dir\")\n\t}\n\n\tmountPoint := filepath.ToSlash(filepath.Join(\"/go\", \"src\", repoInfo.RootImportPath))\n\t// use custom dir for build if given, subdir if not:\n\tcwd := repoInfo.SubDir\n\tif b.InDir != \"\" {\n\t\tcwd = b.InDir\n\t}\n\tworkDir := filepath.ToSlash(filepath.Join(mountPoint, cwd))\n\n\tbuildCmd, err := filepath.Rel(workDir, filepath.Join(mountPoint, repoInfo.SubDir, \"build/mage-linux-amd64\"))\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to determine mage-linux-amd64 relative path\")\n\t}\n\n\tdockerRun := sh.RunCmd(\"docker\", \"run\")\n\timage, err := b.ImageSelector(b.Platform)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to determine golang-crossbuild image tag\")\n\t}\n\tverbose := \"\"\n\tif mg.Verbose() {\n\t\tverbose = \"true\"\n\t}\n\tvar args []string\n\tif runtime.GOOS != \"windows\" {\n\t\targs = append(args,\n\t\t\t\"--env\", \"EXEC_UID=\"+strconv.Itoa(os.Getuid()),\n\t\t\t\"--env\", \"EXEC_GID=\"+strconv.Itoa(os.Getgid()),\n\t\t)\n\t}\n\tif versionQualified {\n\t\targs = append(args, \"--env\", \"VERSION_QUALIFIER=\"+versionQualifier)\n\t}\n\targs = append(args,\n\t\t\"--rm\",\n\t\t\"--env\", \"MAGEFILE_VERBOSE=\"+verbose,\n\t\t\"--env\", \"MAGEFILE_TIMEOUT=\"+EnvOr(\"MAGEFILE_TIMEOUT\", \"\"),\n\t\t\"-v\", repoInfo.RootDir+\":\"+mountPoint,\n\t\t\"-w\", workDir,\n\t\timage,\n\t\t\"--build-cmd\", buildCmd+\" \"+b.Target,\n\t\t\"-p\", b.Platform,\n\t)\n\n\treturn dockerRun(args...)\n}", "title": "" }, { "docid": "db3de0c3b538a4df5206954d12890ab4", "score": "0.5301314", "text": "func buildBundle(builderConfig *BuilderConfig, projectConfig *ProjectConfig, buildConfig *BuildConfig) error {\n\t// create temporary directory\n\tsafeName := strings.Replace(projectConfig.Name, \"/\", \"--\", -1)\n\ttempDir := fmt.Sprintf(\"/tmp/%s--%s\", safeName, projectConfig.GitShortRef)\n\tif _, err := os.Stat(tempDir); err == nil {\n\t\tif err := os.RemoveAll(tempDir); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif err := os.Mkdir(tempDir, 0755); err != nil {\n\t\treturn err\n\t}\n\n\ttag := fmt.Sprintf(\"builder/%s:%s\", projectConfig.Name, projectConfig.GitShortRef)\n\targs := fmt.Sprintf(\"build -f %s -t %s .\", buildConfig.Dockerfile, tag)\n\tif _, err := runCommand(\"docker\", strings.Split(args, \" \")); err != nil {\n\t\treturn err\n\t}\n\n\t// if a `build` script exists, attempt to execute it, erring out when it doesn't exist\n\tif _, err := os.Stat(path.Join(projectConfig.Dir, \"build\")); err == nil {\n\t\targs = fmt.Sprintf(\"run -v %s:/output -t %s /build\", tempDir, tag)\n\t\tif _, err := runCommand(\"docker\", strings.Split(args, \" \")); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif !projectConfig.Push {\n\t\treturn nil\n\t}\n\n\targs = fmt.Sprintf(\"-dir %s -version %s -latest=true -project %s -gcs-prefix %s -url-prefix %s\",\n\t\ttempDir,\n\t\tprojectConfig.GitShortRef,\n\t\tprojectConfig.Name,\n\t\tbuilderConfig.ArtifactorGCSPrefix,\n\t\tbuilderConfig.ArtifactorURLPrefix)\n\n\tif _, err := runCommand(\"artifactor\", strings.Split(args, \" \")); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "e83f2c91a2e8751ea7f6e3eb89599c25", "score": "0.52966386", "text": "func buildFeatures(c *cli.Context) error {\n\treturn ErrNotImplemented\n\t// TODO: In case of -f/--feature-set option slice of features\n\t// should be used instead of Pazuzufile\n\n\t// log.Print(\"Building Dockerfile out of the features\")\n\t// // TODO: check number of c.NArgs() and throw error if nothing was passed\n\t// if c.NArg() == 0 {\n\n\t// \treturn errors.New(ERROR_NO_VALID_PAZUZU_FILE)\n\n\t// }\n\n\t// return nil\n}", "title": "" }, { "docid": "dfbdaa42d1b9e63dc9ec4b3ec92a389d", "score": "0.5287982", "text": "func (h *handler) build(id, url, call string) ([]byte, error) {\n\timportElems := strings.Split(url, \"/\")\n\ttd := &tmplData{\n\t\tPKGName: importElems[len(importElems)-1],\n\t\tImportPath: url,\n\t\tCall: call,\n\t}\n\tt, err := template.New(url).Parse(mainTmpl)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcmdDir := fmt.Sprintf(cmdDirPath, h.conf.Jails.BaseJailDir, url)\n\tif !utils.Exists(cmdDir) {\n\t\tif err := os.Mkdir(cmdDir, os.ModePerm); err != nil {\n\t\t\th.logger.Log(\"error\", err.Error())\n\t\t}\n\t}\n\n\tmainFile := fmt.Sprintf(mainFilePath, h.conf.Jails.BaseJailDir, url)\n\tcode, err := os.Create(mainFile)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer code.Close()\n\n\tif err = t.Execute(code, td); err != nil {\n\t\treturn nil, err\n\t}\n\tbuildCommand := []string{\n\t\tjailGoBin,\n\t\t\"build\",\n\t\t\"-o\",\n\t\t\"/tmp/\" + id,\n\t\t\"-v\",\n\t\turl + \"/cmd\",\n\t}\n\tfullBuildArgs := []string{\n\t\t\"-c\",\n\t\t\"-n\",\n\t\tid,\n\t\t\"ip4=disable\",\n\t\t\"exec.timeout=\" + h.conf.Jails.BuildTimeout,\n\t\t\"path=\" + h.conf.Jails.BaseJailDir + \"/build\",\n\t\t\"host.hostname=build\",\n\t\t\"mount.devfs\",\n\t}\n\tfullBuildArgs = append(fullBuildArgs, buildCommand...)\n\tbuildCmd := exec.Command(\"jail\", fullBuildArgs...)\n\n\treturn buildCmd.CombinedOutput()\n}", "title": "" }, { "docid": "c5d6a677173fe8738a737fd6185a6fbd", "score": "0.5285337", "text": "func TestDockerfilePath(t *testing.T) {\n\ttests := []struct {\n\t\tcontextDir string\n\t\tdockerfilePath string\n\t\tdockerStrategy *buildapiv1.DockerBuildStrategy\n\t}{\n\t\t// default Dockerfile path\n\t\t{\n\t\t\tdockerfilePath: \"Dockerfile\",\n\t\t\tdockerStrategy: &buildapiv1.DockerBuildStrategy{},\n\t\t},\n\t\t// custom Dockerfile path in the root context\n\t\t{\n\t\t\tdockerfilePath: \"mydockerfile\",\n\t\t\tdockerStrategy: &buildapiv1.DockerBuildStrategy{\n\t\t\t\tDockerfilePath: \"mydockerfile\",\n\t\t\t},\n\t\t},\n\t\t// custom Dockerfile path in a sub directory\n\t\t{\n\t\t\tdockerfilePath: \"dockerfiles/mydockerfile\",\n\t\t\tdockerStrategy: &buildapiv1.DockerBuildStrategy{\n\t\t\t\tDockerfilePath: \"dockerfiles/mydockerfile\",\n\t\t\t},\n\t\t},\n\t\t// custom Dockerfile path in a sub directory\n\t\t// with a contextDir\n\t\t{\n\t\t\tcontextDir: \"somedir\",\n\t\t\tdockerfilePath: \"dockerfiles/mydockerfile\",\n\t\t\tdockerStrategy: &buildapiv1.DockerBuildStrategy{\n\t\t\t\tDockerfilePath: \"dockerfiles/mydockerfile\",\n\t\t\t},\n\t\t},\n\t}\n\n\tfrom := \"FROM openshift/origin-base\"\n\texpected := []string{\n\t\tfrom,\n\t\t// expected env variables\n\t\t\"\\\"OPENSHIFT_BUILD_NAME\\\"=\\\"name\\\"\",\n\t\t\"\\\"OPENSHIFT_BUILD_NAMESPACE\\\"=\\\"namespace\\\"\",\n\t\t\"\\\"OPENSHIFT_BUILD_SOURCE\\\"=\\\"http://github.com/openshift/origin.git\\\"\",\n\t\t\"\\\"OPENSHIFT_BUILD_COMMIT\\\"=\\\"commitid\\\"\",\n\t\t// expected labels\n\t\t\"\\\"io.openshift.build.commit.author\\\"=\\\"test user \\\\u003ctest@email.com\\\\u003e\\\"\",\n\t\t\"\\\"io.openshift.build.commit.date\\\"=\\\"date\\\"\",\n\t\t\"\\\"io.openshift.build.commit.id\\\"=\\\"commitid\\\"\",\n\t\t\"\\\"io.openshift.build.commit.ref\\\"=\\\"ref\\\"\",\n\t\t\"\\\"io.openshift.build.commit.message\\\"=\\\"message\\\"\",\n\t\t\"\\\"io.openshift.build.name\\\"=\\\"name\\\"\",\n\t\t\"\\\"io.openshift.build.namespace\\\"=\\\"namespace\\\"\",\n\t}\n\n\tfor _, test := range tests {\n\t\tbuildDir, err := ioutil.TempDir(\"\", \"dockerfile-path\")\n\t\tif err != nil {\n\t\t\tt.Errorf(\"failed to create tmpdir: %v\", err)\n\t\t\tcontinue\n\t\t}\n\t\tdefer func() {\n\t\t\tif err := os.RemoveAll(buildDir); err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t}()\n\n\t\tabsoluteDockerfilePath := filepath.Join(buildDir, test.contextDir, test.dockerfilePath)\n\t\tif err = os.MkdirAll(filepath.Dir(absoluteDockerfilePath), os.FileMode(0750)); err != nil {\n\t\t\tt.Errorf(\"failed to create directory %s: %v\", filepath.Dir(absoluteDockerfilePath), err)\n\t\t\tcontinue\n\t\t}\n\t\tif err = ioutil.WriteFile(absoluteDockerfilePath, []byte(from), os.FileMode(0644)); err != nil {\n\t\t\tt.Errorf(\"failed to write dockerfile to %s: %v\", absoluteDockerfilePath, err)\n\t\t\tcontinue\n\t\t}\n\n\t\tbuild := &buildapiv1.Build{\n\t\t\tSpec: buildapiv1.BuildSpec{\n\t\t\t\tCommonSpec: buildapiv1.CommonSpec{\n\t\t\t\t\tSource: buildapiv1.BuildSource{\n\t\t\t\t\t\tGit: &buildapiv1.GitBuildSource{\n\t\t\t\t\t\t\tURI: \"http://github.com/openshift/origin.git\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\tContextDir: test.contextDir,\n\t\t\t\t\t},\n\t\t\t\t\tStrategy: buildapiv1.BuildStrategy{\n\t\t\t\t\t\tDockerStrategy: test.dockerStrategy,\n\t\t\t\t\t},\n\t\t\t\t\tOutput: buildapiv1.BuildOutput{\n\t\t\t\t\t\tTo: &corev1.ObjectReference{\n\t\t\t\t\t\t\tKind: \"DockerImage\",\n\t\t\t\t\t\t\tName: \"test/test-result:latest\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\t\tbuild.Name = \"name\"\n\t\tbuild.Namespace = \"namespace\"\n\n\t\tsourceInfo := &git.SourceInfo{}\n\t\tsourceInfo.AuthorName = \"test user\"\n\t\tsourceInfo.AuthorEmail = \"test@email.com\"\n\t\tsourceInfo.Date = \"date\"\n\t\tsourceInfo.CommitID = \"commitid\"\n\t\tsourceInfo.Ref = \"ref\"\n\t\tsourceInfo.Message = \"message\"\n\t\tdockerClient := &FakeDocker{\n\t\t\tbuildImageFunc: func(opts docker.BuildImageOptions) error {\n\t\t\t\tif opts.Dockerfile != test.dockerfilePath {\n\t\t\t\t\tt.Errorf(\"Unexpected dockerfile path: %s (expected: %s)\", opts.Dockerfile, test.dockerfilePath)\n\t\t\t\t}\n\t\t\t\treturn nil\n\t\t\t},\n\t\t}\n\n\t\tdockerBuilder := &DockerBuilder{\n\t\t\tdockerClient: dockerClient,\n\t\t\tbuild: build,\n\t\t\ttar: tar.New(s2ifs.NewFileSystem()),\n\t\t}\n\n\t\t// this will validate that the Dockerfile is readable\n\t\t// and append some labels to the Dockerfile\n\t\tif err = addBuildParameters(buildDir, build, sourceInfo); err != nil {\n\t\t\tt.Errorf(\"failed to add build parameters: %v\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\t// check that our Dockerfile has been modified\n\t\tdockerfileData, err := ioutil.ReadFile(absoluteDockerfilePath)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"failed to read dockerfile %s: %v\", absoluteDockerfilePath, err)\n\t\t\tcontinue\n\t\t}\n\t\tfor _, value := range expected {\n\t\t\tif !strings.Contains(string(dockerfileData), value) {\n\t\t\t\tt.Errorf(\"Updated Dockerfile content does not contain expected value:\\n%s\\n\\nUpdated content:\\n%s\\n\", value, string(dockerfileData))\n\n\t\t\t}\n\t\t}\n\n\t\t// check that the docker client is called with the right Dockerfile parameter\n\t\tif err = dockerBuilder.dockerBuild(buildDir, \"\"); err != nil {\n\t\t\tt.Errorf(\"failed to build: %v\", err)\n\t\t\tcontinue\n\t\t}\n\t\tos.RemoveAll(buildDir)\n\t}\n}", "title": "" }, { "docid": "37386eba9c4e9256f8703d4d002366e6", "score": "0.5276583", "text": "func TestDockerfileFromScratch(t *testing.T) {\n\tdockerFile := `FROM scratch\nUSER 1001`\n\n\tdockerClient := &FakeDocker{\n\t\tbuildImageFunc: func(opts docker.BuildImageOptions) error {\n\t\t\treturn nil\n\t\t},\n\t\tpullImageFunc: func(opts docker.PullImageOptions, auth docker.AuthConfiguration) error {\n\t\t\tif opts.Repository == \"scratch\" && opts.Registry == \"\" {\n\t\t\t\treturn fmt.Errorf(\"cannot pull scratch\")\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t}\n\n\tbuild := &buildapiv1.Build{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: \"buildid\",\n\t\t\tNamespace: \"default\",\n\t\t},\n\t\tSpec: buildapiv1.BuildSpec{\n\t\t\tCommonSpec: buildapiv1.CommonSpec{\n\t\t\t\tSource: buildapiv1.BuildSource{\n\t\t\t\t\tContextDir: \"\",\n\t\t\t\t\tDockerfile: &dockerFile,\n\t\t\t\t},\n\t\t\t\tStrategy: buildapiv1.BuildStrategy{\n\t\t\t\t\tDockerStrategy: &buildapiv1.DockerBuildStrategy{\n\t\t\t\t\t\tDockerfilePath: \"\",\n\t\t\t\t\t\tFrom: &corev1.ObjectReference{\n\t\t\t\t\t\t\tKind: \"DockerImage\",\n\t\t\t\t\t\t\tName: \"scratch\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tOutput: buildapiv1.BuildOutput{\n\t\t\t\t\tTo: &corev1.ObjectReference{\n\t\t\t\t\t\tKind: \"ImageStreamTag\",\n\t\t\t\t\t\tName: \"scratch\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\tclient := buildfake.Clientset{}\n\n\tbuildDir, err := ioutil.TempDir(\"\", \"dockerfile-path\")\n\tif err != nil {\n\t\tt.Errorf(\"failed to create tmpdir: %v\", err)\n\t}\n\n\tdockerBuilder := &DockerBuilder{\n\t\tclient: client.Build().Builds(\"\"),\n\t\tbuild: build,\n\t\tdockerClient: dockerClient,\n\t\ttar: tar.New(s2ifs.NewFileSystem()),\n\t\tinputDir: buildDir,\n\t}\n\tif err := ManageDockerfile(buildDir, build); err != nil {\n\t\tt.Errorf(\"failed to manage the dockerfile: %v\", err)\n\t}\n\tif err := dockerBuilder.Build(); err != nil {\n\t\tif strings.Contains(err.Error(), \"cannot pull scratch\") {\n\t\t\tt.Errorf(\"Docker build should not have attempted to pull from scratch\")\n\t\t} else {\n\t\t\tt.Errorf(\"Received unexpected error: %v\", err)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "c886f2d775fa9bc88db54324fad08d42", "score": "0.5273832", "text": "func containsDockerfile(dockerdir string) (err error) {\n\tvar dockerDir, dockerfile string\n\tvar file *os.File\n\tdockerDir, err = filepath.Abs(dockerdir)\n\tif err != nil {\n\t\treturn\n\t}\n\tdockerfile = filepath.Join(dockerDir, \"Dockerfile\")\n\tfile, err = os.Open(dockerfile)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = file.Close()\n\tif err != nil {\n\t\treturn\n\t}\n\treturn\n}", "title": "" }, { "docid": "df9b563d63aabac28b930ce28e7c01fe", "score": "0.5271194", "text": "func AddDockerfileToBuildContext(dockerfileCtx io.ReadCloser, buildCtx io.ReadCloser) (io.ReadCloser, string, error) {\n\tfile, err := io.ReadAll(dockerfileCtx)\n\tdockerfileCtx.Close()\n\tif err != nil {\n\t\treturn nil, \"\", err\n\t}\n\tnow := time.Now()\n\trandomName := \".dockerfile.\" + stringid.GenerateRandomID()[:20]\n\n\tbuildCtx = archive.ReplaceFileTarWrapper(buildCtx, map[string]archive.TarModifierFunc{\n\t\t// Add the dockerfile with a random filename\n\t\trandomName: func(_ string, _ *tar.Header, _ io.Reader) (*tar.Header, []byte, error) {\n\t\t\theader := &tar.Header{\n\t\t\t\tName: randomName,\n\t\t\t\tMode: 0o600,\n\t\t\t\tModTime: now,\n\t\t\t\tTypeflag: tar.TypeReg,\n\t\t\t\tAccessTime: now,\n\t\t\t\tChangeTime: now,\n\t\t\t}\n\t\t\treturn header, file, nil\n\t\t},\n\t\t// Update .dockerignore to include the random filename\n\t\t\".dockerignore\": func(_ string, h *tar.Header, content io.Reader) (*tar.Header, []byte, error) {\n\t\t\tif h == nil {\n\t\t\t\th = &tar.Header{\n\t\t\t\t\tName: \".dockerignore\",\n\t\t\t\t\tMode: 0o600,\n\t\t\t\t\tModTime: now,\n\t\t\t\t\tTypeflag: tar.TypeReg,\n\t\t\t\t\tAccessTime: now,\n\t\t\t\t\tChangeTime: now,\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tb := &bytes.Buffer{}\n\t\t\tif content != nil {\n\t\t\t\tif _, err := b.ReadFrom(content); err != nil {\n\t\t\t\t\treturn nil, nil, err\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tb.WriteString(\".dockerignore\")\n\t\t\t}\n\t\t\tb.WriteString(\"\\n\" + randomName + \"\\n\")\n\t\t\treturn h, b.Bytes(), nil\n\t\t},\n\t})\n\treturn buildCtx, randomName, nil\n}", "title": "" }, { "docid": "e3815de24c99a4ae32485b7afa63caba", "score": "0.52699333", "text": "func CreateTar(contextDirectory, dockerfile string) (io.ReadCloser, error) {\n\t// This code was ripped off from docker/api/client/build.go\n\tdockerfileName := filepath.Join(contextDirectory, dockerfile)\n\n\tabsContextDirectory, err := filepath.Abs(contextDirectory)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfilename := dockerfileName\n\n\tif dockerfile == \"\" {\n\t\t// No -f/--file was specified so use the default\n\t\tdockerfileName = DefaultDockerfileName\n\t\tfilename = filepath.Join(absContextDirectory, dockerfileName)\n\n\t\t// Just to be nice ;-) look for 'dockerfile' too but only\n\t\t// use it if we found it, otherwise ignore this check\n\t\tif _, err = os.Lstat(filename); os.IsNotExist(err) {\n\t\t\ttmpFN := path.Join(absContextDirectory, strings.ToLower(dockerfileName))\n\t\t\tif _, err = os.Lstat(tmpFN); err == nil {\n\t\t\t\tdockerfileName = strings.ToLower(dockerfileName)\n\t\t\t\tfilename = tmpFN\n\t\t\t}\n\t\t}\n\t}\n\n\torigDockerfile := dockerfileName // used for error msg\n\tif filename, err = filepath.Abs(filename); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Now reset the dockerfileName to be relative to the build context\n\tdockerfileName, err = filepath.Rel(absContextDirectory, filename)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// And canonicalize dockerfile name to a platform-independent one\n\tdockerfileName = archive.CanonicalTarNameForPath(dockerfileName)\n\n\tif _, err = os.Lstat(filename); os.IsNotExist(err) {\n\t\treturn nil, fmt.Errorf(\"Cannot locate Dockerfile: %s\", origDockerfile)\n\t}\n\tvar includes = []string{\".\"}\n\tvar excludes []string\n\n\tdockerIgnorePath := path.Join(contextDirectory, \".dockerignore\")\n\tdockerIgnore, err := os.Open(dockerIgnorePath)\n\tif err != nil {\n\t\tif !os.IsNotExist(err) {\n\t\t\treturn nil, err\n\t\t}\n\t\tlogrus.Warnf(\"Error while reading .dockerignore (%s) : %s\", dockerIgnorePath, err.Error())\n\t\texcludes = make([]string, 0)\n\t} else {\n\t\texcludes, err = dockerignore.ReadAll(dockerIgnore)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// If .dockerignore mentions .dockerignore or the Dockerfile\n\t// then make sure we send both files over to the daemon\n\t// because Dockerfile is, obviously, needed no matter what, and\n\t// .dockerignore is needed to know if either one needs to be\n\t// removed. The deamon will remove them for us, if needed, after it\n\t// parses the Dockerfile.\n\tkeepThem1, _ := fileutils.Matches(\".dockerignore\", excludes)\n\tkeepThem2, _ := fileutils.Matches(dockerfileName, excludes)\n\tif keepThem1 || keepThem2 {\n\t\tincludes = append(includes, \".dockerignore\", dockerfileName)\n\t}\n\n\tif err := build.ValidateContextDirectory(contextDirectory, excludes); err != nil {\n\t\treturn nil, fmt.Errorf(\"error checking context is accessible: '%s', please check permissions and try again\", err)\n\t}\n\n\toptions := &archive.TarOptions{\n\t\tCompression: archive.Uncompressed,\n\t\tExcludePatterns: excludes,\n\t\tIncludeFiles: includes,\n\t}\n\n\treturn archive.TarWithOptions(contextDirectory, options)\n}", "title": "" }, { "docid": "7fa138cb31b6e3ce4b46b5654bc30f2e", "score": "0.5261805", "text": "func Containerize(e Environment, name string, startFn func(context.Context) error) (Runnable, error) {\n\tde, ok := e.(*DockerEnvironment)\n\tif !ok {\n\t\treturn nil, errors.New(\"not implemented\")\n\t}\n\n\t// Not portable, but good enough for local unit tests.\n\twd, err := os.Getwd()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tstrs := strings.Split(runtime.FuncForPC(reflect.ValueOf(startFn).Pointer()).Name(), \".\")\n\tfuncName := strs[len(strs)-1]\n\tpkg := strings.Join(strs[:len(strs)-1], \".\")\n\n\tmodulePath := pkg\n\tabsModulePath := wd\n\tfor len(absModulePath) > 0 {\n\t\t_, err := os.Stat(filepath.Join(absModulePath, \"go.mod\"))\n\t\tif os.IsNotExist(err) {\n\t\t\tabsModulePath = filepath.Dir(absModulePath)\n\t\t\tmodulePath = filepath.Dir(modulePath)\n\t\t\tcontinue\n\t\t}\n\t\tif err == nil {\n\t\t\tbreak\n\t\t}\n\t\treturn nil, err\n\t}\n\n\tif len(absModulePath) == 0 {\n\t\treturn nil, errors.Newf(\"not a Go module %v\", wd)\n\t}\n\n\tf := e.Runnable(name).WithPorts(map[string]int{\"http\": 80}).Future()\n\tdir := filepath.Join(f.Dir(), \"shim\")\n\n\tif err := os.MkdirAll(dir, os.ModePerm); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// TODO(saswatamcode): Maybe we can do away with goModTmpl, and just run go mod init shim, go mod edit -replace %v=%v and go mod tidy here.\n\tif err := os.WriteFile(filepath.Join(dir, \"go.mod\"), []byte(fmt.Sprintf(goModTmpl, modulePath, modulePath, absModulePath)), os.ModePerm); err != nil {\n\t\treturn nil, err\n\t}\n\tif err := os.WriteFile(filepath.Join(dir, \"main.go\"), []byte(fmt.Sprintf(mainFileTmpl, pkg, funcName)), os.ModePerm); err != nil {\n\t\treturn nil, err\n\t}\n\tif err := os.WriteFile(filepath.Join(dir, \"Dockerfile\"), []byte(dockerFile), os.ModePerm); err != nil {\n\t\treturn nil, err\n\t}\n\n\tcmd := de.exec(\"go\", \"mod\", \"tidy\")\n\tcmd.Dir = dir\n\tif out, err := cmd.CombinedOutput(); err != nil {\n\t\treturn nil, errors.Wrap(err, string(out))\n\t}\n\n\tcmd = de.exec(\"go\", \"build\", \"-o\", \"exe\", \"main.go\")\n\tcmd.Dir = dir\n\tif out, err := cmd.CombinedOutput(); err != nil {\n\t\treturn nil, errors.Wrap(err, string(out))\n\t}\n\n\timageTag := fmt.Sprintf(\"e2e-local-%v:dynamic\", name)\n\tcmd = de.exec(\"docker\", \"build\", \"-t\", imageTag, \".\")\n\tcmd.Dir = dir\n\tif out, err := cmd.CombinedOutput(); err != nil {\n\t\treturn nil, errors.Wrap(err, string(out))\n\t}\n\treturn f.Init(StartOptions{Image: imageTag}), nil\n}", "title": "" }, { "docid": "bfe11c760440aa2f6287c288aba01811", "score": "0.52613616", "text": "func findDockerfile(l *claircore.Layer) (string, *bytes.Buffer, error) {\n\t// Dockerfile which was used to build given image/layer is stored by OSBS in /root/buildinfo/\n\t// Name of dockerfiles is in following format \"Dockerfile-NAME-VERSION-RELEASE\"\n\t// Name, version and release are labels defined in the dockerfile\n\tre, err := regexp.Compile(\"root/buildinfo/Dockerfile-.*\")\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\tfiles, err := filesByRegexp(l, re)\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\t// there should be always just one Dockerfile - return the first from a map\n\tfor name, buf := range files {\n\t\treturn name, buf, nil\n\t}\n\treturn \"\", nil, nil\n}", "title": "" }, { "docid": "33b53c6fa4b13f74a62bf5c026cb4316", "score": "0.5259302", "text": "func (Debug) Dockerfile() error {\n\treturn docker.Generate(&docker.Command{\n\t\tBin: \"spotigraph\",\n\t\tName: \"Spotigraph\",\n\t\tDescription: \"Spotify agile model data microservice\",\n\t\tURL: \"https://github.com/Zenitha/go-spotigraph/tree/master/cmd/spotigraph\",\n\t})()\n}", "title": "" }, { "docid": "162e8ee2d3fbfa470eca7f444e4cdb07", "score": "0.5256275", "text": "func buildTemplate(teamName string, appID string) (*teamTemplate, error) {\n\tif !isStringSafe(teamName) || !isStringSafe(appID) {\n\t\treturn nil, errors.New(\"Invalid parameters for template.\\n\")\n\t}\n\treturn &teamTemplate{teamID: teamName, appID: appID}, nil\n}", "title": "" }, { "docid": "7b8d578dc602e8966b7cddb40c62c19c", "score": "0.52521604", "text": "func (c *DockerClient) DockerBuild(name, context, dockerfile string) error {\n\tauth := loadRegistryAuthFile()\n\tclient := c.Client\n\terr := client.BuildImage(docker.BuildImageOptions{\n\t\tName: name,\n\t\tDockerfile: dockerfile,\n\t\tOutputStream: bytes.NewBuffer(nil),\n\t\tContextDir: context,\n\t\tAuthConfigs: *auth,\n\t})\n\tif err != nil {\n\t\tlogrus.Error(err)\n\t}\n\treturn err\n}", "title": "" }, { "docid": "ed97dd43b9541d4d88012f4f7c178d78", "score": "0.52515435", "text": "func getImage(constraints models.ImageConstraints) models.DockerImage {\n\t// @TODO (INFRA-3163): add human-readable image tags/other comments for image, if doable in yaml\n\t// @TODO: use SHAs for all images\n\tappType := constraints.AppType\n\tversion := constraints.Version\n\t// default image (reproduces CircleCI 1.0 base)\n\tdefaultImage := models.DockerImage{\n\t\tImage: \"circleci/build-image:ubuntu-14.04-XXL-upstart-1189-5614f37\",\n\t}\n\n\tgolangImageMap := map[string]models.DockerImage{\n\t\t\"1.10\": models.DockerImage{Image: \"circleci/golang:1.10.3-stretch\"}, // \"circleci/golang@sha256:4614481a383e55eef504f26f383db1329c285099fde0cfd342c49e5bb9b6c32a\"\n\t\t\"1.9\": models.DockerImage{Image: \"circleci/golang:1.9.7-stretch\"}, // \"circleci/golang@sha256:c46bee0b60747525d354f219083a46e06c68152f90f3bfb2812d1f232e6a5097\"\n\t\t\"1.8\": models.DockerImage{Image: \"circleci/golang:1.8.7-stretch\"},\n\t}\n\n\tnodeImageMap := map[string]models.DockerImage{\n\t\t\"10\": models.DockerImage{Image: \"circleci/node:10.8.0-stretch\"},\n\t\t\"8\": models.DockerImage{Image: \"circleci/node:8.11.3-stretch\"},\n\t\t\"6\": models.DockerImage{Image: \"circleci/node:6.14.3-stretch\"},\n\t\t\"5\": models.DockerImage{Image: \"circleci/node:6.14.3-stretch\"},\n\t\t\"4\": models.DockerImage{Image: \"circleci/node:6.14.3-stretch\"},\n\t\t\"0\": models.DockerImage{Image: \"circleci/node:6.14.3-stretch\"},\n\t}\n\tpythonImageMap := map[string]models.DockerImage{\n\t\t\"2.7\": models.DockerImage{Image: \"circleci/python:2.7.15\"},\n\t}\n\n\tif appType == GOLANG_APP_TYPE {\n\t\tgolangBaseImage, ok := golangImageMap[version]\n\t\tif ok {\n\t\t\treturn golangBaseImage\n\t\t}\n\t} else if appType == WAG_APP_TYPE {\n\t\tgolangBaseImage, ok := golangImageMap[version]\n\t\tif ok {\n\t\t\t//@TODO: -node version not actually availabe for go 1.8\n\t\t\treturn models.DockerImage{Image: fmt.Sprintf(\"%s-node\", golangBaseImage.Image)}\n\t\t}\n\t} else if appType == NODE_APP_TYPE {\n\t\tnodeBaseImage, ok := nodeImageMap[version]\n\t\tif ok {\n\t\t\treturn nodeBaseImage\n\t\t} else {\n\t\t\tfmt.Printf(\"unrecognized node version !%s!\\n\", version)\n\t\t}\n\t} else if appType == PYTHON_APP_TYPE {\n\t\tpythonBaseImage, ok := pythonImageMap[version]\n\t\tif ok {\n\t\t\treturn pythonBaseImage\n\t\t}\n\t}\n\tfmt.Printf(\"No circleci image selected for app type %s, version %s -- using default\\n\", constraints.AppType, constraints.Version)\n\treturn defaultImage\n}", "title": "" }, { "docid": "737ce89c312f78c597004954c5e19267", "score": "0.5244122", "text": "func (c *buildContext) Build() (err error) {\n\t// ensure kubernetes build is up-to-date first\n\tc.logger.V(0).Info(\"Starting to build Kubernetes\")\n\tbits, err := c.builder.Build()\n\tif err != nil {\n\t\tc.logger.Errorf(\"Failed to build Kubernetes: %v\", err)\n\t\treturn errors.Wrap(err, \"failed to build kubernetes\")\n\t}\n\tc.logger.V(0).Info(\"Finished building Kubernetes\")\n\n\t// then perform the actual docker image build\n\tc.logger.V(0).Info(\"Building node image ...\")\n\treturn c.buildImage(bits)\n}", "title": "" }, { "docid": "0142072d423329215bae13e6bb931cd6", "score": "0.52411795", "text": "func createBuild(image string, app App, options ...string) {\n\tpullOrCreateBuild(image, app, \"builds:create\", strings.Join(options, \" \"))\n}", "title": "" }, { "docid": "280301a127b6623a139ba8742d554894", "score": "0.52363044", "text": "func BuildDockerImage(project string) error {\n\t// If a Dockerfile lives in the current directory,\n\t// we can not assume that it has all the current information so we have to\n\t// abort and let the callee resolve this issue.\n\tif _, err := os.Stat(\"Dockerfile\"); !os.IsNotExist(err) {\n\t\treturn fmt.Errorf(\"Dockerfile found within local directory, aborting\")\n\t}\n\tconfig, err := docker.CreateConfig(project)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// When resolving different config types, we may also be rewriting\n\t// content on disc, so this should ensure that content as the user left it.\n\tswitch config.Type {\n\tcase \"node\":\n\t\tdefer node.RestoreBackups()\n\t}\n\n\tdockerfile, err := config.CreateMetaFile()\n\tif err != nil {\n\t\treturn err\n\t}\n\t// We want to remove the generated Dockefiler once we are done\n\tdefer os.Remove(\"Dockerfile\")\n\tif err := ioutil.WriteFile(\"Dockerfile\", dockerfile, 0644); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6d1dd77ee153c3d99a5375bb7085517f", "score": "0.5222641", "text": "func (testContext *TestContext) TryExecDockerfile(repoId string, dockerfileId string,\n\timageName string, paramNames, paramValues []string) (string, string, map[string]interface{}) {\n\ttestContext.StartTest(\"TryExecDockerfile\")\n\t\n\tif len(paramNames) != len(paramValues) { panic(\n\t\t\"Invalid test: len param names != len param values\") }\n\tvar paramStr string = \"\"\n\tfor i, paramName := range paramNames {\n\t\tif i > 0 { paramStr = paramStr + \";\" }\n\t\tparamStr = paramStr + fmt.Sprintf(\"%s:%s\", paramName, paramValues[i])\n\t}\n\t\n\tfmt.Println(\"paramStr=\" + paramStr)\n\tfmt.Println(fmt.Sprintf(\"len(paramNames)=%d, len(paramValues)=%d\", len(paramNames), len(paramValues)))\n\t\n\tvar resp *http.Response\n\tvar err error\n\tfmt.Println(\"Sending session Post, execDockerfile...\")\n\tresp, err = testContext.SendSessionPost(testContext.SessionId,\n\t\t\"execDockerfile\",\n\t\t[]string{\"Log\", \"RepoId\", \"DockerfileId\", \"ImageName\", \"Params\"},\n\t\t[]string{testContext.TestDemarcation(), repoId, dockerfileId, imageName, paramStr})\n\t\n\tdefer resp.Body.Close()\n\n\tfmt.Println(\"verifying response...\")\n\tif ! testContext.Verify200Response(resp) { testContext.FailTestWithMessage(resp.Status) }\n\t\n\t// Get the repo Id that is returned in the response body.\n\t/* DockerImageVersionDesc:\n\tBaseType\n\tObjId string\n\tVersion string\n\tImageObjId string\n ImageCreationEventId string\n CreationDate string\n Digest []byte\n Signature []byte\n ScanEventIds []string\n DockerBuildOutput string\n\t*/\n\tvar responseMap map[string]interface{}\n\tresponseMap, err = rest.ParseResponseBodyToMap(resp.Body)\n\tif err != nil { fmt.Println(err.Error()); return \"\", \"\", nil }\n\tvar retObjId string = responseMap[\"ObjId\"].(string)\n\tvar retImageObjId string = responseMap[\"ImageObjId\"].(string)\n\tvar retVersion string = responseMap[\"Version\"].(string)\n\tvar retImageCreationEventId string = responseMap[\"ImageCreationEventId\"].(string)\n\tvar retCreationDate string = responseMap[\"CreationDate\"].(string)\n\trest.PrintMap(responseMap)\n\t\n\tfmt.Println(\"verifying response data...\")\n\ttestContext.AssertThat(retObjId != \"\", \"ObjId is empty\")\n\ttestContext.AssertThat(retImageObjId != \"\", \"ImageObjId is empty\")\n\ttestContext.AssertThat(retVersion != \"\", \"Version is empty\")\n\ttestContext.AssertThat(retImageCreationEventId != \"\", \"ImageCreationEventId is empty\")\n\ttestContext.AssertThat(retCreationDate != \"\", \"CreationDate is empty\")\n\t\n\ttestContext.PassTestIfNoFailures()\n\tfmt.Println(\"returing from TryExecDockerfile...\")\n\treturn retObjId, retImageObjId, responseMap\n}", "title": "" }, { "docid": "6c98aef917563c9d6d0acf3b246d951e", "score": "0.52089", "text": "func buildContainerFromImage(cntx context.Context, imgTag string, hostIP string, hostPort string, containerName string, images []types.ImageSummary, cli *client.Client) (string, error) {\n\tvar contID string\n\tfor _, image := range images {\n\n\t\t// Select specified image by the repo tag.\n\t\tif strings.Join(image.RepoTags, \"\") == imgTag {\n\n\t\t\t// Create the container from the image.\n\t\t\t// TODO: Devise a progamatic way of producting a container name.\n\t\t\t// I'm not even sure if the container name is assigned right now.\n\t\t\t// TODO: Determine how exposedPort and port Bindings are different.\n\t\t\texposedPort := map[nat.Port]struct{}{\"8080/tcp\": {}}\n\t\t\tjack := make(map[string]string)\n\t\t\tjack[\"slippery\"] = \"fish\"\n\t\t\tconfigOptions := container.Config{Image: strings.Join(image.RepoTags, \"\"), ExposedPorts: exposedPort, Labels: jack}\n\t\t\tnetworkConfig := network.NetworkingConfig{}\n\t\t\tportBindings := map[nat.Port][]nat.PortBinding{\n\t\t\t\t\"8080/tcp\": {{HostIP: hostIP, HostPort: hostPort}}}\n\t\t\thostConfig := container.HostConfig{\n\t\t\t\tPublishAllPorts: true,\n\t\t\t\tPortBindings: portBindings,\n\t\t\t}\n\n\t\t\tcreateResponse, err := cli.ContainerCreate(cntx, &configOptions, &hostConfig, &networkConfig, containerName)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(err)\n\t\t\t}\n\t\t\tcontID = createResponse.ID\n\t\t}\n\t}\n\treturn contID, nil\n}", "title": "" }, { "docid": "6e45c13cf74693c19ec8f8ae75871c45", "score": "0.52031595", "text": "func Make(fileName string, templateName string, source interface{}) {\n\tf, err := os.Create(fileName)\n\tif err != nil {\n\t\tfmt.Println(\"ERROR \", err)\n\t}\n\tdefer f.Close()\n\n\tft, err := efs.GetFile(templateName)\n\tif err != nil {\n\t\tfmt.Println(\"ERROR, Embedded FS: \", err)\n\t}\n\n\ttmp, err := template.New(\"newtemplate\").Parse(string(ft))\n\tif err != nil {\n\t\tfmt.Println(\"ERROR, Parse Template \", err)\n\t\tpanic(err)\n\t}\n\terr = tmp.Execute(f, source)\n\tif err != nil {\n\t\tfmt.Println(\"ERROR, Execute Template \", err)\n\t\tpanic(err)\n\t}\n}", "title": "" }, { "docid": "7ed4fe8fada60675a3db1a52fbab48fb", "score": "0.5197859", "text": "func (d DockerDeployer) BuildImage(dockerfileTarReader io.Reader, tag string) error {\n\tctx, cancel := context.WithTimeout(context.Background(), time.Duration(300)*time.Second)\n\tdefer cancel()\n\n\tresp, err := d.cli.ImageBuild(\n\t\tctx,\n\t\tdockerfileTarReader,\n\t\ttypes.ImageBuildOptions{\n\t\t\tDockerfile: \"Dockerfile\",\n\t\t\tTags: []string{tag},\n\t\t\tNoCache: true,\n\t\t\tRemove: true,\n\t\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// block until the image is finished building\n\t_, err = ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6457c20239da4e0249babf4fa45c9bdf", "score": "0.5196415", "text": "func Generate(yamlFilename string) string {\n\tlog.Printf(\"Loading config file\")\n\tconfig, _ := loadConfig(yamlFilename)\n\tlayerSize := int64((config.ImageGeneration.ImgSizeMb / config.ImageGeneration.LayerNumber) * 1024 * 1024)\n\tfilepath := config.PullSourceFolder + \"/\"\n\tdir, _ := ioutil.ReadDir(filepath)\n\tfor _, d := range dir {\n\t\tos.RemoveAll(filepath + d.Name())\n\t}\n\tfor i := 0; i < config.ImageGeneration.LayerNumber; i++ {\n\t\thexval, _ := randomHex(32)\n\t\tfd, err := Create(filepath + hexval)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to create file: %v\", err)\n\t\t}\n\t\tif config.ImageGeneration.GenerateRandom {\n\t\t\t_, err = fd.Seek(layerSize-9, 0)\n\t\t\trandbytes := make([]byte, 8)\n\t\t\trand.Read(randbytes)\n\t\t\t_, err = fd.Write(randbytes)\n\t\t\t_, err = fd.Write([]byte{0})\n\t\t\terr = fd.Close()\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(\"Failed to close file\")\n\t\t\t}\n\t\t}\n\n\t\tdigest, err := sha256Digest(filepath + hexval)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\terr = os.Rename(filepath+hexval, filepath+digest)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\tlog.Printf(\"Docker layer generated\")\n\t}\n\n\treturn filepath\n}", "title": "" }, { "docid": "55cf4a617c96af8aec879e3507e9bd0a", "score": "0.5193492", "text": "func (o *objectCache) build(ctx context.Context, cfg *templateCfg, hash string) error {\n\tisHost := cfg.IsHost()\n\ttemplatePath := filepath.Join(o.workingDirectory, defaults.TemplatesDir, hash)\n\theaderPath := filepath.Join(templatePath, common.CHeaderFileName)\n\tepObj := endpointObj\n\tif isHost {\n\t\tepObj = hostEndpointObj\n\t}\n\tobjectPath := filepath.Join(templatePath, epObj)\n\n\tif err := os.MkdirAll(templatePath, defaults.StateDirRights); err != nil {\n\t\treturn &os.PathError{\n\t\t\tOp: \"failed to create template directory\",\n\t\t\tPath: templatePath,\n\t\t\tErr: err,\n\t\t}\n\t}\n\n\tf, err := os.Create(headerPath)\n\tif err != nil {\n\t\treturn &os.PathError{\n\t\t\tOp: \"failed to open template header for writing\",\n\t\t\tPath: headerPath,\n\t\t\tErr: err,\n\t\t}\n\t}\n\tdefer f.Close()\n\tif err = o.ConfigWriter.WriteEndpointConfig(f, cfg); err != nil {\n\t\treturn &os.PathError{\n\t\t\tOp: \"failed to write template header\",\n\t\t\tPath: headerPath,\n\t\t\tErr: err,\n\t\t}\n\t}\n\n\tcfg.stats.BpfCompilation.Start()\n\terr = compileTemplate(ctx, templatePath, isHost)\n\tcfg.stats.BpfCompilation.End(err == nil)\n\tif err != nil {\n\t\treturn &os.PathError{\n\t\t\tOp: \"failed to compile template program\",\n\t\t\tPath: templatePath,\n\t\t\tErr: err,\n\t\t}\n\t}\n\n\tlog.WithFields(logrus.Fields{\n\t\tlogfields.Path: objectPath,\n\t\tlogfields.BPFCompilationTime: cfg.stats.BpfCompilation.Total(),\n\t}).Info(\"Compiled new BPF template\")\n\n\to.insert(hash, objectPath)\n\treturn nil\n}", "title": "" }, { "docid": "4d3133580d679613e97671a6bb2ba871", "score": "0.5184252", "text": "func Build() error {\n\tmg.Deps(clean, buildImage)\n\n\t// Build the volume mount for a local contribute repo, if present\n\tcontentMounts, goModMount, err := useLocalContent()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tpwd, _ := os.Getwd()\n\treturn sh.RunV(\"docker\", expandArgs(\"run\", \"--rm\", \"-v\", pwd+\":/src\",\n\t\tcontentMounts, goModMount, containerName, \"--debug\", \"--verbose\")...)\n}", "title": "" }, { "docid": "8d84c1764a3334805f47e9a068f4a3e3", "score": "0.5177538", "text": "func (s *SSHClient) ImageBuildFromExist(ctx context.Context, ID, name string) (imageID string, err error) {\n\terr = types.ErrEngineNotImplemented\n\treturn\n}", "title": "" }, { "docid": "cf41d01c21b677a3bc9e9fbf78e7d1f4", "score": "0.5175617", "text": "func (n *nodejs) GetProcessorDockerfileContents() string {\n\treturn `ARG NUCLIO_LABEL=latest\nARG NUCLIO_ARCH=amd64\nARG NUCLIO_BASE_IMAGE=node:9.3.0-alpine\nARG NUCLIO_ONBUILD_IMAGE=nuclio/handler-builder-nodejs-onbuild:${NUCLIO_LABEL}-${NUCLIO_ARCH}\n\n# Supplies processor uhttpc, used for healthcheck\nFROM nuclio/uhttpc:0.0.1-amd64 as uhttpc\n\n# Supplies processor binary, wrapper\nFROM ${NUCLIO_ONBUILD_IMAGE} as processor\n\n# From the base image\nFROM ${NUCLIO_BASE_IMAGE}\n\n# Copy required objects from the suppliers\nCOPY --from=processor /home/nuclio/bin/processor /usr/local/bin/processor\nCOPY --from=processor /home/nuclio/bin/wrapper.js /opt/nuclio/wrapper.js\nCOPY --from=uhttpc /home/nuclio/bin/uhttpc /usr/local/bin/uhttpc\n\n# Copy the handler directory to /opt/nuclio\nCOPY handler /opt/nuclio\n\n# Readiness probe\nHEALTHCHECK --interval=1s --timeout=3s CMD /usr/local/bin/uhttpc --url http://localhost:8082/ready || exit 1\n\n# Set node modules path\nENV NODE_PATH=/usr/local/lib/node_modules\n\n# Run processor with configuration and platform configuration\nCMD [ \"processor\", \"--config\", \"/etc/nuclio/config/processor/processor.yaml\", \"--platform-config\", \"/etc/nuclio/config/platform/platform.yaml\" ]\n`\n}", "title": "" }, { "docid": "be12815b2e59d6d8de865c27c43f7982", "score": "0.51709735", "text": "func (t *Templates) GetDockerfile() ([]byte, error) {\n\ttmpl := fmt.Sprintf(\"templates/build/%s.Dockerfile\", t.GetBuildDriver())\n\treturn t.fs.ReadFile(tmpl)\n}", "title": "" }, { "docid": "298af72b54bf8c2f74f7ab36faa8c0ed", "score": "0.515939", "text": "func (g *imageRefGenerator) FromDockerfile(name string, dir string, context string) (*ImageRef, error) {\n\t// Look for Dockerfile in repository\n\tfile, err := os.Open(filepath.Join(dir, context, \"Dockerfile\"))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdockerFile, err := g.dockerParser.Parse(file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\texpose, ok := dockerFile.GetDirective(\"EXPOSE\")\n\tif !ok {\n\t\treturn nil, err\n\t}\n\tports := []string{}\n\tfor _, e := range expose {\n\t\tps := strings.Split(e, \" \")\n\t\tports = append(ports, ps...)\n\t}\n\treturn g.FromNameAndPorts(name, ports)\n}", "title": "" }, { "docid": "8b2b88635c42bdc2ae176d02ed1a8178", "score": "0.5154815", "text": "func MakeDockerCompose(testnet *e2e.Testnet) ([]byte, error) {\n\t// Must use version 2 Docker Compose format, to support IPv6.\n\ttmpl, err := template.New(\"docker-compose\").Funcs(template.FuncMap{\n\t\t\"misbehaviorsToString\": func(misbehaviors map[int64]string) string {\n\t\t\tstr := \"\"\n\t\t\tfor height, misbehavior := range misbehaviors {\n\t\t\t\t// after the first behavior set, a comma must be prepended\n\t\t\t\tif str != \"\" {\n\t\t\t\t\tstr += \",\"\n\t\t\t\t}\n\t\t\t\theightString := strconv.Itoa(int(height))\n\t\t\t\tstr += misbehavior + \",\" + heightString\n\t\t\t}\n\t\t\treturn str\n\t\t},\n\t}).Parse(`version: '2.4'\n\nnetworks:\n {{ .Name }}:\n labels:\n e2e: true\n driver: bridge\n{{- if .IPv6 }}\n enable_ipv6: true\n{{- end }}\n ipam:\n driver: default\n config:\n - subnet: {{ .IP }}\n\nservices:\n{{- range .Nodes }}\n {{ .Name }}:\n labels:\n e2e: true\n container_name: {{ .Name }}\n image: tenderdash/e2e-node\n{{- if eq .ABCIProtocol \"builtin\" }}\n entrypoint: /usr/bin/entrypoint-builtin\n{{- else if .Misbehaviors }}\n entrypoint: /usr/bin/entrypoint-maverick\n command: [\"node\", \"--misbehaviors\", \"{{ misbehaviorsToString .Misbehaviors }}\"]\n{{- end }}\n init: true\n ports:\n - 26656\n - {{ if .ProxyPort }}{{ .ProxyPort }}:{{ end }}26657\n - 6060\n volumes:\n - ./{{ .Name }}:/tenderdash\n{{- if ne $.PreCompiledAppPath \"\" }}\n - {{ $.PreCompiledAppPath }}:/usr/bin/app\n{{- end }}\n networks:\n {{ $.Name }}:\n ipv{{ if $.IPv6 }}6{{ else }}4{{ end}}_address: {{ .IP }}\n\n{{end}}`)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar buf bytes.Buffer\n\tdata := &struct {\n\t\t*e2e.Testnet\n\t\tPreCompiledAppPath string\n\t}{\n\t\tTestnet: testnet,\n\t\tPreCompiledAppPath: os.Getenv(\"PRE_COMPILED_APP_PATH\"),\n\t}\n\terr = tmpl.Execute(&buf, data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "title": "" }, { "docid": "f9f80219a80f6be25689085e13032979", "score": "0.51473695", "text": "func (ctrl *Controller) prepareForBuild(inputs *buildInputs) (ImageBuildRequest, error) {\n\tibr := newImageBuildRequestFromBuildInputs(inputs)\n\n\tmcConfigMap, err := ibr.toConfigMap(inputs.machineConfig)\n\tif err != nil {\n\t\treturn ImageBuildRequest{}, fmt.Errorf(\"could not convert MachineConfig %s into ConfigMap: %w\", inputs.machineConfig.Name, err)\n\t}\n\n\t_, err = ctrl.kubeclient.CoreV1().ConfigMaps(ctrlcommon.MCONamespace).Create(context.TODO(), mcConfigMap, metav1.CreateOptions{})\n\tif err != nil {\n\t\treturn ImageBuildRequest{}, fmt.Errorf(\"could not load rendered MachineConfig %s into configmap: %w\", mcConfigMap.Name, err)\n\t}\n\n\tklog.Infof(\"Stored MachineConfig %s in ConfigMap %s for build\", inputs.machineConfig.Name, mcConfigMap.Name)\n\n\tdockerfileConfigMap, err := ibr.dockerfileToConfigMap()\n\tif err != nil {\n\t\treturn ImageBuildRequest{}, fmt.Errorf(\"could not generate Dockerfile ConfigMap: %w\", err)\n\t}\n\n\t_, err = ctrl.kubeclient.CoreV1().ConfigMaps(ctrlcommon.MCONamespace).Create(context.TODO(), dockerfileConfigMap, metav1.CreateOptions{})\n\tif err != nil {\n\t\treturn ImageBuildRequest{}, fmt.Errorf(\"could not load rendered Dockerfile %s into configmap: %w\", dockerfileConfigMap.Name, err)\n\t}\n\n\tklog.Infof(\"Stored Dockerfile for build %s in ConfigMap %s for build\", ibr.getBuildName(), dockerfileConfigMap.Name)\n\n\treturn ibr, nil\n}", "title": "" }, { "docid": "6305c8f678f39f085099dc1e6e6ec3c8", "score": "0.51438725", "text": "func (c *Client) Create(cfg Function) (err error) {\n\t// Create project root directory, if it doesn't already exist\n\tif err = os.MkdirAll(cfg.Root, 0755); err != nil {\n\t\treturn\n\t}\n\n\t// Root must not already be a Function\n\t//\n\t// Instantiate a Function struct about the given root path, but\n\t// immediately exit with error (prior to actual creation) if this is\n\t// a Function already initialized at that path (Create should never\n\t// clobber a pre-existing Function)\n\tf, err := NewFunctionFromDefaults(cfg)\n\tif err != nil {\n\t\treturn\n\t}\n\tif f.Initialized() {\n\t\terr = fmt.Errorf(\"Function at '%v' already initialized\", f.Root)\n\t\treturn\n\t}\n\n\t// Root must not contain any visible files\n\t//\n\t// We know from above that the target directory does not contain a Function,\n\t// but also immediately exit if the target directoy contains any visible files\n\t// at all, or any of the known hidden files that will be written.\n\t// This is to ensure that if a user inadvertently chooses an incorrect directory\n\t// for their new Function, the template and config file writing steps do not\n\t// cause data loss.\n\tif err = assertEmptyRoot(f.Root); err != nil {\n\t\treturn\n\t}\n\n\t// Write out the template for a Function\n\t// returns a Function which may be mutated based on the content of\n\t// the template (default Function, builders, buildpacks, etc).\n\tf, err = c.Templates().Write(f)\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// Mark it as having been created via this client library and Write (save)\n\tf.Created = time.Now()\n\tif err = f.Write(); err != nil {\n\t\treturn\n\t}\n\n\t// TODO: Create a status structure and return it for clients to use\n\t// for output, such as from the CLI.\n\tif c.verbose {\n\t\tfmt.Printf(\"Builder: %s\\n\", f.Builder)\n\t\tif len(f.Buildpacks) > 0 {\n\t\t\tfmt.Println(\"Buildpacks:\")\n\t\t\tfor _, b := range f.Buildpacks {\n\t\t\t\tfmt.Printf(\" ... %s\\n\", b)\n\t\t\t}\n\t\t}\n\t\tfmt.Println(\"Function project created\")\n\t}\n\treturn\n}", "title": "" }, { "docid": "1e4296be8e72ca04e1a860b09b1b35c4", "score": "0.51352215", "text": "func Build(ctx context.Context, c client.Client) (*client.Result, error) {\n\tbuildOpts := c.BuildOpts()\n\topts := buildOpts.Opts\n\tsessionID := buildOpts.SessionID\n\n\tlocalNameContext := DefaultLocalNameContext\n\tif v, ok := opts[keyNameContext]; ok {\n\t\tlocalNameContext = v\n\t}\n\n\tlocalNameDockerfile := DefaultLocalNameDockerfile\n\tif v, ok := opts[keyNameDockerfile]; ok {\n\t\tlocalNameDockerfile = v\n\t}\n\n\tnetAppDockerfile, err := getManifest(ctx, c, opts, localNameDockerfile, sessionID)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tproject, err := getProject(\n\t\tnetAppDockerfile,\n\t\topts,\n\t\tfunc() (string, error) {\n\t\t\treturn inferProject(ctx, c, localNameDockerfile, sessionID)\n\t\t})\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tconfiguration := getConfiguration(netAppDockerfile, opts)\n\n\tcontextSource := llb.Local(localNameContext,\n\t\tllb.SessionID(c.BuildOpts().SessionID))\n\n\tsourceOp := llb.\n\t\tImage(NetAppSdkImage).\n\t\tDir(NetAppSourceDir).\n\t\tWith(\n\t\t\tcopyFrom(contextSource, project, \"./\"),\n\t\t).\n\t\tRun(llb.Shlexf(\"dotnet restore \\\"%s\\\"\", project)).\n\t\tWith(\n\t\t\tcopyAll(contextSource, \".\"),\n\t\t)\n\n\tassembly, err := getAssembly(\n\t\tnetAppDockerfile,\n\t\topts,\n\t\tfunc() (string, error) {\n\t\t\treturn inferAssembly(ctx, c, sourceOp, project)\n\t\t})\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbuildDir := path.Join(NetAppDir, \"build\")\n\n\tbuildOp := sourceOp.\n\t\tRun(llb.Shlexf(\"dotnet build \\\"%s\\\" -c \\\"%s\\\" -o \\\"%s\\\"\", project, configuration, buildDir))\n\n\tpublishDir := path.Join(NetAppDir, \"publish\")\n\n\tpublishOp := buildOp.\n\t\tRun(llb.Shlexf(\"dotnet publish \\\"%s\\\" -c \\\"%s\\\" -o \\\"%s\\\"\", project, configuration, publishDir))\n\n\tfinalOp := llb.\n\t\tImage(NetAspNetRuntimeImage).\n\t\tDir(NetAppDir).\n\t\tWith(\n\t\t\tcopyFrom(publishOp.State, publishDir, \".\"),\n\t\t)\n\n\tdt, err := finalOp.Marshal(ctx, llb.LinuxAmd64)\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"Unable to create definition\")\n\t}\n\n\tres, err := c.Solve(ctx, client.SolveRequest{\n\t\tDefinition: dt.ToPB(),\n\t})\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"Unable to solve the definition\")\n\t}\n\n\tbuildRes := client.NewResult()\n\n\tref, err := res.SingleRef()\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"Unable to get reference.\")\n\t}\n\n\timage := dockerfile2llb.Image{\n\t}\n\n\tvar entrypoint []string\n\n\tentrypoint = append(entrypoint, \"dotnet\")\n\tentrypoint = append(entrypoint, assembly)\n\n\timage.Architecture = \"amd64\"\n\timage.Config.Entrypoint = entrypoint\n\n\t_, bytes, err := c.ResolveImageConfig(ctx, NetAspNetRuntimeImage, llb.ResolveImageConfigOpt{})\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to runtime resolve image config\")\n\t}\n\n\truntimeImageConfig := dockerfile2llb.Image{}\n\n\tif err := json.Unmarshal(bytes, &runtimeImageConfig); err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to unmarshal runtime image config\")\n\t}\n\n\timage.Config.Env = runtimeImageConfig.Config.Env\n\n\tif image.Config.ExposedPorts == nil {\n\t\timage.Config.ExposedPorts = make(map[string]struct{})\n\t}\n\n\timage.Config.ExposedPorts[\"80/tcp\"] = struct{}{}\n\n\timage.Config.WorkingDir = NetAppDir\n\n\timageMarshaled, err := json.Marshal(image)\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"Unable to marshal the image metadata\")\n\t}\n\n\tbuildRes.AddMeta(exptypes.ExporterImageConfigKey, imageMarshaled)\n\tbuildRes.SetRef(ref)\n\n\treturn buildRes, nil\n}", "title": "" }, { "docid": "51c23c2a2d501a8452ebb40ce383469a", "score": "0.5131572", "text": "func Build(filename string) {\n\t// Read the file\n\tcontents, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\tlog.Fatalf(\"Error reading input file %s\", err.Error())\n\t}\n\n\t// Convert Contents to format\n\tvar request pb.BuildRequest\n\terr = yaml.UnmarshalStrict(contents, &request)\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not parse file %s\", err.Error())\n\t}\n\n\tif err := StartBuild(&request, \"\"); err != nil {\n\t\tlog.Fatalf(\"Error invoking server %s\", err.Error())\n\t}\n\n}", "title": "" }, { "docid": "faee58debb8017a18325712a1f68846f", "score": "0.5128617", "text": "func injectImageDependencies(iTarget model.ImageTarget, iTargetMap map[model.TargetID]model.ImageTarget, deps []store.BuildResult) (model.ImageTarget, error) {\n\tif len(deps) == 0 {\n\t\treturn iTarget, nil\n\t}\n\n\tdf := dockerfile.Dockerfile(\"\")\n\tswitch bd := iTarget.BuildDetails.(type) {\n\tcase model.DockerBuild:\n\t\tdf = dockerfile.Dockerfile(bd.Dockerfile)\n\tcase model.FastBuild:\n\t\tdf = dockerfile.Dockerfile(bd.BaseDockerfile)\n\tdefault:\n\t\treturn model.ImageTarget{}, fmt.Errorf(\"image %q has no valid buildDetails\", iTarget.ConfigurationRef)\n\t}\n\n\tast, err := dockerfile.ParseAST(df)\n\tif err != nil {\n\t\treturn model.ImageTarget{}, errors.Wrap(err, \"injectImageDependencies\")\n\t}\n\n\tfor _, dep := range deps {\n\t\tmodified, err := ast.InjectImageDigest(iTargetMap[dep.TargetID].ConfigurationRef, dep.Image)\n\t\tif err != nil {\n\t\t\treturn model.ImageTarget{}, errors.Wrap(err, \"injectImageDependencies\")\n\t\t} else if !modified {\n\t\t\treturn model.ImageTarget{}, fmt.Errorf(\"Could not inject image %q into Dockerfile of image %q\", dep.Image, iTarget.ConfigurationRef)\n\t\t}\n\t}\n\n\tnewDf, err := ast.Print()\n\tif err != nil {\n\t\treturn model.ImageTarget{}, errors.Wrap(err, \"injectImageDependencies\")\n\t}\n\n\tswitch bd := iTarget.BuildDetails.(type) {\n\tcase model.DockerBuild:\n\t\tbd.Dockerfile = newDf.String()\n\t\tiTarget = iTarget.WithBuildDetails(bd)\n\tcase model.FastBuild:\n\t\tbd.BaseDockerfile = newDf.String()\n\t\tiTarget = iTarget.WithBuildDetails(bd)\n\t}\n\n\treturn iTarget, nil\n}", "title": "" }, { "docid": "d88b83a88aa40f4d3712f1aee5c2db9c", "score": "0.5126058", "text": "func BuildDockerImage(cli imagebuilder, contextPath string, tag string) error {\n\tdockerBuildContext, err := os.Open(contextPath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error opening build context tarfile: %v\", err)\n\t}\n\toptions := types.ImageBuildOptions{\n\t\tSuppressOutput: false,\n\t\tRemove: true,\n\t\tForceRemove: true,\n\t\tPullParent: true,\n\t\tTags: []string{tag},\n\t\tDockerfile: \"Dockerfile\",\n\t}\n\tbuildResponse, err := cli.ImageBuild(context.Background(), dockerBuildContext, options)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error building Docker image: %v\", err)\n\t}\n\tdefer func() {\n\t\tif err := buildResponse.Body.Close(); err != nil {\n\t\t\tlog.Printf(\"Error closing Docker build response body: %v\", err)\n\t\t}\n\t}()\n\n\tlog.Printf(\"Building Docker image from build context %s\", contextPath)\n\n\treader := bufio.NewReader(buildResponse.Body)\n\tfor {\n\t\tline, err := reader.ReadBytes('\\r')\n\t\tif err != nil && err != io.EOF {\n\t\t\treturn fmt.Errorf(\"Error reading from Docker: %v\", err)\n\t\t}\n\n\t\tline = bytes.TrimSpace(line)\n\t\tvar output dockerCliOutput\n\t\tjson.Unmarshal(line, &output)\n\n\t\tfmt.Printf(\"%s\", output.Stream)\n\n\t\tif err != nil {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "9697b9fb5cac9d2e1b6aee54f0112275", "score": "0.51248395", "text": "func generateArgs(images []string) []string {\n\tvar args []string\n\targs = append(args, \"build\")\n\targs = append(args, \"-f\")\n\targs = append(args, \"Dockerfile\")\n\tfor _, img := range images {\n\t\targs = append(args, \"-t\")\n\t\targs = append(args, img)\n\t}\n\treturn append(args, \".\")\n}", "title": "" }, { "docid": "3a49bf8917491d6f08bb439ec756d66a", "score": "0.5124519", "text": "func (d *ReuseDockerfileContainerizer) GetContainer(plan plantypes.Plan, service plantypes.Service) (irtypes.Container, error) {\n\tcontainer := irtypes.NewContainer(d.GetContainerBuildStrategy(), service.Image, true)\n\n\tdockerfilePath := service.ContainerizationTargetOptions[0]\n\tif _, err := os.Stat(dockerfilePath); os.IsNotExist(err) {\n\t\tlog.Errorf(\"Unable to find the Dockerfile at path %q Error: %q\", dockerfilePath, err)\n\t\tlog.Errorf(\"Will assume the dockerfile will be copied and will proceed.\")\n\t}\n\n\tdockerfileDir := filepath.Dir(dockerfilePath)\n\tdockerfileName := filepath.Base(dockerfilePath)\n\tcontext := \".\"\n\tif sc, ok := service.BuildArtifacts[plantypes.SourceDirectoryBuildArtifactType]; ok {\n\t\tif len(sc) > 0 {\n\t\t\tvar err error\n\t\t\trootDir := plan.Spec.Inputs.RootDir\n\t\t\tsourceCodeDir := sc[0]\n\t\t\tcontext, err = filepath.Rel(rootDir, sourceCodeDir)\n\t\t\tif err != nil {\n\t\t\t\tlog.Errorf(\"Failed to make the context path %q relative to the root directory %q Error: %q\", sourceCodeDir, rootDir, err)\n\t\t\t\treturn container, err\n\t\t\t}\n\t\t}\n\t}\n\n\tdockerBuildScript, err := common.GetStringFromTemplate(scripts.Dockerbuild_sh, struct {\n\t\tDockerfilename string\n\t\tImageName string\n\t\tContext string\n\t}{\n\t\tDockerfilename: dockerfileName,\n\t\tImageName: service.Image,\n\t\tContext: context,\n\t})\n\tif err != nil {\n\t\tlog.Warnf(\"Unable to translate template to string : %s\", scripts.Dockerbuild_sh)\n\t}\n\trelOutputPath, err := filepath.Rel(plan.Spec.Inputs.RootDir, dockerfileDir)\n\tif err != nil {\n\t\tlog.Errorf(\"Failed to make the Dockerfile directory %q relative to the root directory %q Error: %q\", dockerfileDir, plan.Spec.Inputs.RootDir, err)\n\t\treturn container, err\n\t}\n\tcontainer.AddFile(filepath.Join(relOutputPath, service.ServiceName+\"-docker-build.sh\"), dockerBuildScript)\n\n\treturn container, nil\n}", "title": "" }, { "docid": "41af9a5bf8287bfe3a5cba2b7268d0b6", "score": "0.51231927", "text": "func deployDockerComposeFile(operation *ServiceDeployOperation) string {\n\tecs := ECS.New(sess, getClusterName())\n\n\tecsService := ecs.DescribeService(operation.ServiceName)\n\n\t//read the compose file configuration\n\tcomposeFile := dockercompose.Read(operation.ComposeFile)\n\tdockerServices, err := getDockerServicesFromComposeFile(&composeFile.Data)\n\n\tif err != nil {\n\t\tconsole.IssueExit(err.Error())\n\t}\n\n\t//register new task definition with container definitions from docker compose services\n\tcontainerDefinitions := convertDockerServicesToContainerDefinitions(dockerServices)\n\ttaskDefinitionArn := ecs.UpdateTaskDefinitionContainers(ecsService.TaskDefinitionArn, containerDefinitions, operation.ComposeImageOnly)\n\n\t//update service with new task definition\n\tecs.UpdateServiceTaskDefinition(operation.ServiceName, taskDefinitionArn)\n\n\tconsole.Info(\"Deployed revision %s to service %s.\", ecs.GetRevisionNumber(taskDefinitionArn), operation.ServiceName)\n\n\treturn taskDefinitionArn\n}", "title": "" }, { "docid": "289e417da5ea1f50c87fcbe85f6f07ad", "score": "0.512061", "text": "func LinuxBuildImage_FromAsset(scope constructs.Construct, id *string, props *awsecrassets.DockerImageAssetProps) IBuildImage {\n\t_init_.Initialize()\n\n\tvar returns IBuildImage\n\n\t_jsii_.StaticInvoke(\n\t\t\"monocdk.aws_codebuild.LinuxBuildImage\",\n\t\t\"fromAsset\",\n\t\t[]interface{}{scope, id, props},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "title": "" }, { "docid": "1564085584b33990e2b6637494128ca9", "score": "0.51178426", "text": "func Build(ctx context.Context, b parser.Backend, p parser.Parser, l parser.Linter) (v1.Image, error) {\n\t// Get YAML stream.\n\tr, err := b.Init(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, errInitBackend)\n\t}\n\tdefer func() { _ = r.Close() }()\n\n\t// Copy stream once to parse and once write to tarball.\n\tbuf := new(bytes.Buffer)\n\tpkg, err := p.Parse(ctx, ioutil.NopCloser(io.TeeReader(r, buf)))\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, errParserPackage)\n\t}\n\tif err := l.Lint(pkg); err != nil {\n\t\treturn nil, errors.Wrap(err, errLintPackage)\n\t}\n\tif _, err = io.Copy(buf, r); err != nil {\n\t\treturn nil, errors.Wrap(err, errCopyStream)\n\t}\n\n\t// Write on-disk package contents to tarball.\n\ttarBuf := new(bytes.Buffer)\n\ttw := tar.NewWriter(tarBuf)\n\n\thdr := &tar.Header{\n\t\tName: StreamFile,\n\t\tMode: int64(StreamFileMode),\n\t\tSize: int64(buf.Len()),\n\t}\n\tif err := tw.WriteHeader(hdr); err != nil {\n\t\treturn nil, errors.Wrap(err, errTarFromStream)\n\t}\n\tif _, err = io.Copy(tw, buf); err != nil {\n\t\treturn nil, errors.Wrap(err, errTarFromStream)\n\t}\n\tif err := tw.Close(); err != nil {\n\t\treturn nil, errors.Wrap(err, errTarFromStream)\n\t}\n\n\t// Build image layer from tarball.\n\tlayer, err := tarball.LayerFromReader(tarBuf)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, errLayerFromTar)\n\t}\n\n\t// Append layer to to scratch image.\n\treturn mutate.AppendLayers(empty.Image, layer)\n}", "title": "" }, { "docid": "64d567424876cb6a1ac6d3a545558a87", "score": "0.5104497", "text": "func (d *Docker) createImage(ctx context.Context, job *Job) (*container.ContainerCreateCreatedBody, error) {\n\tcreate := func() (container.ContainerCreateCreatedBody, error) {\n\t\treturn d.client.ContainerCreate(ctx, &container.Config{\n\t\t\tImage: job.Image,\n\t\t\tCmd: []string{\"/bin/bash\", \"-c\", strings.Join(job.Commands, \"\\n\")},\n\t\t}, nil, nil, nil, job.Name)\n\t}\n\n\tresp, err := create()\n\tif err != nil {\n\t\td.logger.Errorf(\"Failed to create container image '%s' for %s: %v\", job.Image, job.Name, err)\n\t\t// if image not found locally, try to pull it\n\t\tif err := d.pullImage(ctx, job.Image); err != nil {\n\t\t\td.logger.Errorf(\"Failed to pull image '%s' from docker.io: %v\", job.Image, err)\n\t\t\tif err := d.buildImage(ctx, job.Dockerfile, job.Image); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\tresp, err = create()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn &resp, err\n}", "title": "" }, { "docid": "ca8b65d90b80b8aeca4b9fe3076d6cc8", "score": "0.5103564", "text": "func (testContext *TestContext) TryAddAndExecDockerfile(repoId string, desc string,\n\timageName string, dockerfilePath string, paramNames, paramValues []string) (\n\tstring, string, string, map[string]interface{}) {\n\t\n\ttestContext.StartTest(\"TryAddAndExecDockerfile\")\n\t\n\tif len(paramNames) != len(paramValues) { panic(\n\t\t\"Invalid test: len param names != len param values\") }\n\tvar paramStr string = \"\"\n\tfor i, paramName := range paramNames {\n\t\tif i > 0 { paramStr = paramStr + \";\" }\n\t\tparamStr = paramStr + fmt.Sprintf(\"%s:%s\", paramName, paramValues[i])\n\t}\n\t\n\tvar resp *http.Response\n\tvar err error\n\t//resp, err = testContext.SendSessionFilePost(testContext.SessionId,\n\tresp, err = testContext.SendSessionFilePost(\"\",\n\t\t\"addAndExecDockerfile\",\n\t\t[]string{\"Log\", \"RepoId\", \"Description\", \"ImageName\", \"SessionId\", \"Params\"},\n\t\t[]string{testContext.TestDemarcation(), repoId, desc, imageName,\n\t\t\ttestContext.SessionId, paramStr},\n\t\tdockerfilePath)\n\n\tdefer resp.Body.Close()\n\n\tif ! testContext.Verify200Response(resp) { testContext.FailTest() }\n\t\n\t// Returns a DockerImageVersionDesc.\n\tvar responseMap map[string]interface{}\n\tresponseMap, err = rest.ParseResponseBodyToMap(resp.Body)\n\tif err != nil { fmt.Println(err.Error()); return \"\", \"\", \"\", nil }\n\tvar retObjId string = responseMap[\"ObjId\"].(string)\n\tvar retImageObjId string = responseMap[\"ImageObjId\"].(string)\n\tvar retVersion string = responseMap[\"Version\"].(string)\n\tvar retImageCreationEventId string = responseMap[\"ImageCreationEventId\"].(string)\n\tvar retCreationDate string = responseMap[\"CreationDate\"].(string)\n\trest.PrintMap(responseMap)\n\t\n\ttestContext.AssertThat(retObjId != \"\", \"ObjId is empty\")\n\ttestContext.AssertThat(retImageObjId != \"\", \"ImageObjId is empty\")\n\ttestContext.AssertThat(retVersion != \"\", \"Version is empty\")\n\ttestContext.AssertThat(retImageCreationEventId != \"\", \"ImageCreationEventId is empty\")\n\ttestContext.AssertThat(retCreationDate != \"\", \"CreationDate is empty\")\n\t\n\ttestContext.PassTestIfNoFailures()\n\treturn retObjId, retImageObjId, retImageCreationEventId, responseMap\n}", "title": "" }, { "docid": "f37c6b7ebfefaa2f445114f17032cd75", "score": "0.5099935", "text": "func (functionDeployment *FunctionDeployment) makeFunctionGoContent() (functionGoContent string, err error) {\n\ttimeStamp := fmt.Sprintf(\"%s\", time.Now())\n\tswitch functionDeployment.Settings.Service.GCF.FunctionType {\n\tcase \"backgroundPubSub\":\n\t\treturn strings.Replace(strings.Replace(backgroundPubSubFunctionGo,\n\t\t\t\"<serviceName>\", functionDeployment.Core.ServiceName, -1), \"<timeStamp>\", timeStamp, -1), nil\n\tcase \"backgroundGCS\":\n\t\treturn strings.Replace(strings.Replace(backgroundGCSFunctionGo,\n\t\t\t\"<serviceName>\", functionDeployment.Core.ServiceName, -1), \"<timeStamp>\", timeStamp, -1), nil\n\tdefault:\n\t\treturn \"\", fmt.Errorf(\"functionType provided not managed: %s\", functionDeployment.Settings.Service.GCF.FunctionType)\n\t}\n}", "title": "" }, { "docid": "992d4f7fa07f9af5244a0be359fccf00", "score": "0.5097585", "text": "func (c *Config) Build(params map[string]interface{}) ([]byte, error) {\n\ttmpl, err := c.getTemplate()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get template: %s\", err)\n\t}\n\tif _, ok := params[\"client_verification\"]; !ok {\n\t\tparams[\"client_verification\"] = config.DefaultClientVerification\n\t}\n\tsite, err := populateTemplate(tmpl, params)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"populate template: %s\", err)\n\t}\n\n\t// Build nginx config with base template and component specific template.\n\ttmpl, err = config.GetDefaultTemplate(\"base\")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get default base template: %s\", err)\n\t}\n\tsrc, err := populateTemplate(tmpl, map[string]interface{}{\n\t\t\"site\": string(site),\n\t\t\"ssl_enabled\": !c.tls.Server.Disabled,\n\t\t\"ssl_certificate\": c.tls.Server.Cert.Path,\n\t\t\"ssl_certificate_key\": c.tls.Server.Key.Path,\n\t\t\"ssl_password_file\": c.tls.Server.Passphrase.Path,\n\t\t\"ssl_client_certificate\": _clientCABundle,\n\t})\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"populate base: %s\", err)\n\t}\n\treturn src, nil\n}", "title": "" } ]
7787c0dee8820bb71c0f939030fdcfde
NewIPCPOptionIPAddress creates an IPCPOption with given IP address.
[ { "docid": "2ddce5a86772ad20cc65b505b6ac324f", "score": "0.8555483", "text": "func NewIPCPOptionIPAddress(ip net.IP) *IPCPOption {\n\tv4 := ip.To4()\n\n\t// IPv4\n\tif v4 != nil {\n\t\treturn NewIPCPOption(IPCPOptionIPAddress, v4)\n\t}\n\n\t// IPv6\n\treturn NewIPCPOption(IPCPOptionIPAddress, ip)\n}", "title": "" } ]
[ { "docid": "f55a7e04b15cde3307fdf5291f096db1", "score": "0.66251194", "text": "func NewIPCPOption(typ uint8, payload []byte) *IPCPOption {\n\treturn &IPCPOption{\n\t\tType: typ,\n\t\tLength: uint8(2 + len(payload)),\n\t\tPayload: payload,\n\t}\n}", "title": "" }, { "docid": "bb101be157e1f7e609dda94efc3f9ca4", "score": "0.6182222", "text": "func CreateOptionIpam(ipV4, ipV6 net.IP, llIPs []net.IP, ipamOptions map[string]string) EndpointOption {\n\treturn func(ep *Endpoint) {\n\t\tep.prefAddress = ipV4\n\t\tep.prefAddressV6 = ipV6\n\t\tif len(llIPs) != 0 {\n\t\t\tfor _, ip := range llIPs {\n\t\t\t\tnw := &net.IPNet{IP: ip, Mask: linkLocalMask}\n\t\t\t\tif ip.To4() == nil {\n\t\t\t\t\tnw.Mask = linkLocalMaskIPv6\n\t\t\t\t}\n\t\t\t\tep.iface.llAddrs = append(ep.iface.llAddrs, nw)\n\t\t\t}\n\t\t}\n\t\tep.ipamOptions = ipamOptions\n\t}\n}", "title": "" }, { "docid": "5e14a12c5ae5bbce496cd5e3771e7266", "score": "0.6091939", "text": "func MakeIaAddrOption(addr net.IP, preferredLifetime, validLifetime uint32) *Option {\n\tvalue := make([]byte, 24)\n\tcopy(value[0:], addr)\n\tbinary.BigEndian.PutUint32(value[16:], preferredLifetime)\n\tbinary.BigEndian.PutUint32(value[20:], validLifetime)\n\treturn MakeOption(OptIaAddr, value)\n}", "title": "" }, { "docid": "0a4c70ebb3132b3c7772c6fed9dbc806", "score": "0.593614", "text": "func NewIPAddress(ip net.IP, port uint16) *IPAddress {\n\treturn &IPAddress{ip: ip, port: port}\n}", "title": "" }, { "docid": "144b3b09a4dc98f1b43954ab01598b7f", "score": "0.5880498", "text": "func NewIPCPOptionSecondaryDNS(ip net.IP) *IPCPOption {\n\tv4 := ip.To4()\n\n\t// IPv4\n\tif v4 != nil {\n\t\treturn NewIPCPOption(IPCPOptionSecondaryDNS, v4)\n\t}\n\n\t// IPv6\n\treturn NewIPCPOption(IPCPOptionSecondaryDNS, ip)\n}", "title": "" }, { "docid": "8ec0ce826337a88858c7133704a16446", "score": "0.57413477", "text": "func NewIOptionPool(address common.Address, backend bind.ContractBackend) (*IOptionPool, error) {\n\tcontract, err := bindIOptionPool(address, backend, backend, backend)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &IOptionPool{IOptionPoolCaller: IOptionPoolCaller{contract: contract}, IOptionPoolTransactor: IOptionPoolTransactor{contract: contract}, IOptionPoolFilterer: IOptionPoolFilterer{contract: contract}}, nil\n}", "title": "" }, { "docid": "a48156b8e19b4db219fd389462815663", "score": "0.5701355", "text": "func NewIPCPOptionPrimaryDNS(ip net.IP) *IPCPOption {\n\tv4 := ip.To4()\n\n\t// IPv4\n\tif v4 != nil {\n\t\treturn NewIPCPOption(IPCPOptionPrimaryDNS, v4)\n\t}\n\n\t// IPv6\n\treturn NewIPCPOption(IPCPOptionPrimaryDNS, ip)\n}", "title": "" }, { "docid": "c54afbab88746e35daf0a833753c7c3a", "score": "0.56655765", "text": "func NewOption() *IPLimitOption {\n\topt := new(IPLimitOption)\n\treturn opt\n}", "title": "" }, { "docid": "5db11278de724e9f83d6cd5605a05e26", "score": "0.56386125", "text": "func ParseIPAddressOption(data []byte) (*IPAddressOption, error) {\n\tif len(data) < 2 {\n\t\treturn nil, dhcpv4.ErrShortByteStream\n\t}\n\tcode := dhcpv4.OptionCode(data[0])\n\tlength := int(data[1])\n\tif length != 4 {\n\t\treturn nil, fmt.Errorf(\"unexepcted length: expected 4, got %v\", length)\n\t}\n\tif len(data) < 6 {\n\t\treturn nil, dhcpv4.ErrShortByteStream\n\t}\n\treturn &IPAddressOption{OptionCode: code, IPAddress: net.IP(data[2 : 2+length])}, nil\n}", "title": "" }, { "docid": "827e5bd3c5b8d168ad2f821e15d64dc3", "score": "0.5626501", "text": "func (s IpNetwork_getRemoteHost_Params) NewAddress() (IpAddress, error) {\n\tss, err := NewIpAddress(s.Struct.Segment())\n\tif err != nil {\n\t\treturn IpAddress{}, err\n\t}\n\terr = s.Struct.SetPtr(0, ss.Struct.ToPtr())\n\treturn ss, err\n}", "title": "" }, { "docid": "53d5f3236f0835b17233058a0eec664f", "score": "0.54806083", "text": "func (o ServerDiskEditParameterOutput) IpAddress() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ServerDiskEditParameter) *string { return v.IpAddress }).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "f91c6831919c231cc6c61d14632a600e", "score": "0.5471398", "text": "func (ao *AzureOperation) createIPAddress(resourceGroupName string, addressName string, region string) (*network.PublicIPAddress, derrors.Error) {\n\tnetworkClient := network.NewPublicIPAddressesClient(ao.credentials.SubscriptionId)\n\tnetworkClient.Authorizer = ao.managementAuthorizer\n\ttags := make(map[string]*string, 0)\n\ttags[CreateByTag] = StringAsPTR(CreateByValue)\n\n\tproperties := &network.PublicIPAddressPropertiesFormat{\n\t\tPublicIPAllocationMethod: network.Static,\n\t\tIPConfiguration: nil,\n\t\tDNSSettings: nil,\n\t\tIPAddress: nil,\n\t\t// IdleTimeoutInMinutes set to the default value of the cli\n\t\tIdleTimeoutInMinutes: Int32AsPTR(4),\n\t}\n\n\tcreateRequest := network.PublicIPAddress{\n\t\tPublicIPAddressPropertiesFormat: properties,\n\t\tLocation: StringAsPTR(region),\n\t\tTags: tags,\n\t}\n\tctx, cancel := common.GetContext()\n\tdefer cancel()\n\tresponseFuture, createErr := networkClient.CreateOrUpdate(ctx, resourceGroupName, addressName, createRequest)\n\tif createErr != nil {\n\t\treturn nil, derrors.AsError(createErr, \"cannot create IP address\")\n\t}\n\tfutureContext, cancelFuture := context.WithTimeout(context.Background(), IPAddressCreateDeadline)\n\tdefer cancelFuture()\n\twaitErr := responseFuture.WaitForCompletionRef(futureContext, networkClient.Client)\n\tif waitErr != nil {\n\t\treturn nil, derrors.AsError(waitErr, \"IP address failed during creation\")\n\t}\n\tIPAddress, resultErr := responseFuture.Result(networkClient)\n\tif resultErr != nil {\n\t\tlog.Error().Interface(\"err\", resultErr).Msg(\"IP address creation failed\")\n\t\treturn nil, derrors.AsError(resultErr, \"IP address creation failed\")\n\t}\n\tlog.Debug().Interface(\"ip\", IPAddress).Msg(\"ip address created\")\n\treturn &IPAddress, nil\n}", "title": "" }, { "docid": "21ee2de6bec8738a8f6e55d003da6613", "score": "0.54497826", "text": "func (objMgr *ObjectManager) CreateIPAddress(ipAddr string, macAddr string, subnetAddr string, domain string, org string, name string)(string,error){\n\tglog.Infof(\"Creating Ip %s in subnet : %s\", ipAddr, subnetAddr)\n\tipObj := NewIPObject(ipAddr, subnetAddr, domain, org, name)\n\tpayload :=&Payload{Method:\"POST\", Path:\"object/add\", Body:ipObj}\n\tres,err := objMgr.Client.MakeRequest(*payload)\n\tif err!=nil{\n\t\tglog.Errorf(\"Error Creating Ip :: Msg :'%s', Status : %s\", err.ErrorMsg, err.Status)\n\t\treturn \"\", errors.New(err.Status)\n\t}\n\treturn string(res), nil\n}", "title": "" }, { "docid": "8f2c23f21d798ca67ae75801affc9b19", "score": "0.54403704", "text": "func (c Client) IPAddressCreate(ipAddress types.IPAddress) (*models.IPAddress, error) {\n\tdata := new(models.WritableIPAddress)\n\tdata.Address = swag.String(ipAddress.String())\n\tdata.Tags = []string{}\n\n\tparams := ipam.NewIpamIPAddressesCreateParams()\n\tparams.WithData(data)\n\n\t_, err := c.client.Ipam.IpamIPAddressesCreate(params, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c.IPAddressFind(ipAddress)\n}", "title": "" }, { "docid": "c624c869bf14485600f605fd33b53638", "score": "0.54146224", "text": "func NewEventIP(data models.IP) {\n\tlog.Debug(\"New event IP !\")\n\tEventQueue <- Event{Type: EventIP, Timestamp: time.Now(), Data: data}\n}", "title": "" }, { "docid": "abea63f45c78b3fd225be698b1859b06", "score": "0.5339068", "text": "func NewIPCPOptionMobileIPv4(ip net.IP) *IPCPOption {\n\tv4 := ip.To4()\n\n\t// IPv4\n\tif v4 != nil {\n\t\treturn NewIPCPOption(IPCPOptionMobileIPv4, v4)\n\t}\n\n\t// IPv6\n\treturn NewIPCPOption(IPCPOptionMobileIPv4, ip)\n}", "title": "" }, { "docid": "1754905c398d2f968a9f639ee87725dc", "score": "0.5325128", "text": "func NewIP(b net.IP) IP {\n\tb4 := b.To4()\n\tif b4 == nil {\n\t\tpanic(fmt.Sprintf(\"To4(%v) failed\", b))\n\t}\n\treturn IP(get32(b4))\n}", "title": "" }, { "docid": "85eae2c250365c2aedcc542f78c62c4c", "score": "0.530032", "text": "func NewAddress(addresses ...ftn.Address) *AddressCmd {\n\treturn &AddressCmd{addresses}\n}", "title": "" }, { "docid": "1e95fbf624ca86a9eb8aa4639a25e29b", "score": "0.52888846", "text": "func (o ServerDiskEditParameterPtrOutput) IpAddress() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ServerDiskEditParameter) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.IpAddress\n\t}).(pulumi.StringPtrOutput)\n}", "title": "" }, { "docid": "23d3765795e516ac9f848961dcce9c54", "score": "0.5258869", "text": "func NewIpSegmentConfiguration()(*IpSegmentConfiguration) {\n m := &IpSegmentConfiguration{\n SegmentConfiguration: *NewSegmentConfiguration(),\n }\n odataTypeValue := \"#microsoft.graph.ipSegmentConfiguration\"\n m.SetOdataType(&odataTypeValue)\n return m\n}", "title": "" }, { "docid": "f382145487351416db11082fc28cba3f", "score": "0.52350026", "text": "func NewIP() IP {\n\treturn &ipRoute{}\n}", "title": "" }, { "docid": "0465f309d2bc8fc0d43123da5cd85419", "score": "0.5234603", "text": "func NewIPSelector(network string, addrs []string) *IPSelector {\n\treturn &IPSelector{\n\t\tnetwork: network,\n\t\taddrs: addrs,\n\t\tbalancer: &balancer.RandomBalancer{addrs},\n\t}\n}", "title": "" }, { "docid": "a57eb17d656c87b2f87455c680ecc3dc", "score": "0.52333635", "text": "func NewIPKey(addr string) IPKey {\n\thost, _, err := net.SplitHostPort(addr)\n\tif err == nil && host != \"\" {\n\t\taddr = host\n\t} else {\n\t\t// If SplitHostPort failed, IPv6 addresses may still have brackets.\n\t\taddr = strings.Trim(addr, \"[]\")\n\t}\n\n\tip := net.ParseIP(addr)\n\tif ip == nil {\n\t\treturn IPKey{} // i.e. net.IPv6unspecified\n\t}\n\t// IPv4 is encoded in a net.IP of length IPv6len as\n\t// 00:00:00:00:00:00:00:00:00:00:ff:ff:xx:xx:xx:xx\n\t// Thus, must copy all 16 bytes for IPv4.\n\tN := net.IPv6len\n\tif ip.To4() == nil && !ip.Equal(net.IPv6loopback) {\n\t\t// Drop the last 64 bits (interface) of non-loopback IPv6 addresses.\n\t\tN = net.IPv6len / 2 // i.e. ip = ip.Mask(net.CIDRMask(64, 128))\n\t}\n\tvar ipKey IPKey\n\tcopy(ipKey[:], ip[:N])\n\treturn ipKey\n}", "title": "" }, { "docid": "64d0ada9e912803ce5077a54e92220c2", "score": "0.52306944", "text": "func NewIPAPI(cfg *config.Config, bus *eb.EventBus, pool *resolvers.ResolverPool) *IPAPI {\n\ti := &IPAPI{\n\t\tSourceType: requests.API,\n\t\tRateLimit: time.Second,\n\t\tfilter: sf.NewStringFilter(),\n\t}\n\n\ti.BaseService = *services.NewBaseService(i, \"ipapi\", cfg, bus, pool)\n\treturn i\n}", "title": "" }, { "docid": "18af20fdc920a907f86b5164c62949c7", "score": "0.5181931", "text": "func (o *PutIPAMIpnatParams) WithIPAddressTo(iPAddressTo *string) *PutIPAMIpnatParams {\n\to.SetIPAddressTo(iPAddressTo)\n\treturn o\n}", "title": "" }, { "docid": "368c8b4ddfe8a394ea9cd859ca26e918", "score": "0.5180447", "text": "func IPAddresses(ips ...net.IP) CertOption {\n\treturn func(c *CertOptions) {\n\t\tc.IPAddresses = ips\n\t}\n}", "title": "" }, { "docid": "9419601fd196c0fa968a7b38b7f656bd", "score": "0.5127997", "text": "func NewVip(ctx *pulumi.Context,\n\tname string, args *VipArgs, opts ...pulumi.ResourceOption) (*Vip, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.PoolId == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'PoolId'\")\n\t}\n\tif args.Port == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'Port'\")\n\t}\n\tif args.Protocol == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'Protocol'\")\n\t}\n\tif args.SubnetId == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'SubnetId'\")\n\t}\n\topts = internal.PkgResourceDefaultOpts(opts)\n\tvar resource Vip\n\terr := ctx.RegisterResource(\"openstack:loadbalancer/vip:Vip\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "4e5e5e1e8272b086e5f527f05be7eea8", "score": "0.5126385", "text": "func NewAccessIpAddress(classId string, objectType string) *AccessIpAddress {\n\tthis := AccessIpAddress{}\n\tthis.ClassId = classId\n\tthis.ObjectType = objectType\n\treturn &this\n}", "title": "" }, { "docid": "51887a78db907aa7ceb022aadb3d392c", "score": "0.5115514", "text": "func NewPCOPPPWithIPCPOptions(code, id uint8, opts ...*IPCPOption) *PCOPPP {\n\toffset := 0\n\tb := make([]byte, offset)\n\tfor _, o := range opts {\n\t\tl := o.MarshalLen()\n\t\tb = append(b, make([]byte, l)...)\n\t\tif err := o.MarshalTo(b[offset : offset+l]); err != nil {\n\t\t\treturn nil\n\t\t}\n\n\t\toffset += l\n\t}\n\n\treturn NewPCOPPP(code, id, b)\n}", "title": "" }, { "docid": "0f00df1b83512e7e29490b1426d6497b", "score": "0.5104287", "text": "func NewIPCleaner(r client.Client,\n\toption *option.ControllerOption,\n\tcloudNetClient pbcloudnet.CloudNetserviceClient) *IPCleaner {\n\n\treturn &IPCleaner{\n\t\tkubeClient: r,\n\t\tcloudNetClient: cloudNetClient,\n\t\tcleanInterval: time.Duration(option.IPCleanCheckMinute) * time.Minute,\n\t\tmaxReservedTime: time.Duration(option.IPCleanMaxReservedMinute) * time.Minute,\n\t}\n}", "title": "" }, { "docid": "2bd43a88371402864a41f994d3df6d88", "score": "0.5103688", "text": "func (sc *SessionCreate) SetIPAddress(s string) *SessionCreate {\n\tsc.mutation.SetIPAddress(s)\n\treturn sc\n}", "title": "" }, { "docid": "772eba68dca5330a7f0deb9041caf4ca", "score": "0.51011235", "text": "func (t *Interface_Subinterface_Ipv4) NewAddress(Ip string) (*Interface_Subinterface_Ipv4_Address, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Address == nil {\n\t\tt.Address = make(map[string]*Interface_Subinterface_Ipv4_Address)\n\t}\n\n\tkey := Ip\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Address[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Address\", key)\n\t}\n\n\tt.Address[key] = &Interface_Subinterface_Ipv4_Address{\n\t\tIp: &Ip,\n\t}\n\n\treturn t.Address[key], nil\n}", "title": "" }, { "docid": "5330b9089c357f5ce1c5098cf89d79a8", "score": "0.5096787", "text": "func MakeIaNaOption(iaid []byte, t1, t2 uint32, iaOption *Option) *Option {\n\tserializedIaOption, _ := iaOption.Marshal()\n\tvalue := make([]byte, 12+len(serializedIaOption))\n\tcopy(value[0:], iaid[0:4])\n\tbinary.BigEndian.PutUint32(value[4:], t1)\n\tbinary.BigEndian.PutUint32(value[8:], t2)\n\tcopy(value[12:], serializedIaOption)\n\treturn MakeOption(OptIaNa, value)\n}", "title": "" }, { "docid": "774f370170fca0ccb031f81cc9389c01", "score": "0.5084268", "text": "func (o *InterfaceView) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "1a43edc6f349792981808b9a44a10f0d", "score": "0.5081134", "text": "func NewIOptionPoolCaller(address common.Address, caller bind.ContractCaller) (*IOptionPoolCaller, error) {\n\tcontract, err := bindIOptionPool(address, caller, nil, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &IOptionPoolCaller{contract: contract}, nil\n}", "title": "" }, { "docid": "bc7c5951af17c2033b94e4fda72d988e", "score": "0.5040184", "text": "func (o *CreateOptions) WithIPRange(value net.IPNet) *CreateOptions {\n\to.IPRange = &value\n\treturn o\n}", "title": "" }, { "docid": "4dc4979b1af01c72bceaaacc48b37242", "score": "0.5033948", "text": "func ReservedIP() *cobra.Command {\n\treservedIPCmd := &cobra.Command{\n\t\tUse: \"reserved-ip\",\n\t\tAliases: []string{\"rip\"},\n\t\tShort: \"reserved-ip lets you interact with reserved-ip \",\n\t\tLong: reservedIPLong,\n\t\tExample: reservedIPExample,\n\t}\n\n\treservedIPCmd.AddCommand(reservedIPGet, reservedIPList, reservedIPDelete, reservedIPAttach, reservedIPDetach, reservedIPConvert, reservedIPCreate, reservedIPUpdate)\n\n\t// List\n\treservedIPList.Flags().StringP(\"cursor\", \"c\", \"\", \"(optional) Cursor for paging.\")\n\treservedIPList.Flags().IntP(\"per-page\", \"p\", 100, \"(optional) Number of items requested per page. Default is 100 and Max is 500.\")\n\n\t// Attach\n\treservedIPAttach.Flags().StringP(\"instance-id\", \"i\", \"\", \"id of instance you want to attach\")\n\tif err := reservedIPAttach.MarkFlagRequired(\"instance-id\"); err != nil {\n\t\tfmt.Printf(\"error marking reserved-ip attach 'instance-id' flag required: %v\\n\", err)\n\t\tos.Exit(1)\n\t}\n\n\t// Convert\n\treservedIPConvert.Flags().StringP(\"ip\", \"i\", \"\", \"ip you wish to convert\")\n\tif err := reservedIPConvert.MarkFlagRequired(\"ip\"); err != nil {\n\t\tfmt.Printf(\"error marking reserved-ip convert 'ip' flag required: %v\\n\", err)\n\t\tos.Exit(1)\n\t}\n\treservedIPConvert.Flags().StringP(\"label\", \"l\", \"\", \"label\")\n\n\t// Create\n\treservedIPCreate.Flags().StringP(\"region\", \"r\", \"\", \"id of region\")\n\tif err := reservedIPCreate.MarkFlagRequired(\"region\"); err != nil {\n\t\tfmt.Printf(\"error marking reserved-ip create 'region' flag required: %v\\n\", err)\n\t\tos.Exit(1)\n\t}\n\treservedIPCreate.Flags().StringP(\"type\", \"t\", \"\", \"type of IP : v4 or v6\")\n\tif err := reservedIPCreate.MarkFlagRequired(\"type\"); err != nil {\n\t\tfmt.Printf(\"error marking reserved-ip create 'type' flag required: %v\\n\", err)\n\t\tos.Exit(1)\n\t}\n\treservedIPCreate.Flags().StringP(\"label\", \"l\", \"\", \"label\")\n\n\t// Update\n\treservedIPUpdate.Flags().StringP(\"label\", \"l\", \"\", \"label\")\n\tif err := reservedIPUpdate.MarkFlagRequired(\"label\"); err != nil {\n\t\tfmt.Printf(\"error marking reserved-ip update 'label' flag required: %v\\n\", err)\n\t\tos.Exit(1)\n\t}\n\n\treturn reservedIPCmd\n}", "title": "" }, { "docid": "bb4f1570e6641e12042bf66c163a39ef", "score": "0.50111", "text": "func (o *GetLolGeoinfoV1GetlocationParams) SetIPAddress(iPAddress string) {\n\to.IPAddress = iPAddress\n}", "title": "" }, { "docid": "aa6972acb78749194c4dc3b975f9082f", "score": "0.50096285", "text": "func (m *RiskDetection) SetIpAddress(value *string)() {\n m.ipAddress = value\n}", "title": "" }, { "docid": "54cea2fdf170c507443b7ae99f25fcb5", "score": "0.5007038", "text": "func (client *PublicIPAddressesClient) listVirtualMachineScaleSetVMPublicIPAddressesCreateRequest(ctx context.Context, resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string, networkInterfaceName string, ipConfigurationName string, options *PublicIPAddressesListVirtualMachineScaleSetVMPublicIPAddressesOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses\"\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif virtualMachineScaleSetName == \"\" {\n\t\treturn nil, errors.New(\"parameter virtualMachineScaleSetName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{virtualMachineScaleSetName}\", url.PathEscape(virtualMachineScaleSetName))\n\tif virtualmachineIndex == \"\" {\n\t\treturn nil, errors.New(\"parameter virtualmachineIndex cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{virtualmachineIndex}\", url.PathEscape(virtualmachineIndex))\n\tif networkInterfaceName == \"\" {\n\t\treturn nil, errors.New(\"parameter networkInterfaceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{networkInterfaceName}\", url.PathEscape(networkInterfaceName))\n\tif ipConfigurationName == \"\" {\n\t\treturn nil, errors.New(\"parameter ipConfigurationName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{ipConfigurationName}\", url.PathEscape(ipConfigurationName))\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\treq, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.ep, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2018-10-01\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header.Set(\"Accept\", \"application/json\")\n\treturn req, nil\n}", "title": "" }, { "docid": "0e893b38be7b0ad238a8a4c72d5f66b0", "score": "0.5003593", "text": "func NewIPCVal(\n\tlogger log.Logger,\n\tsocketAddr string,\n) *IPCVal {\n\tsc := &IPCVal{\n\t\taddr: socketAddr,\n\t\tconnTimeout: connTimeout,\n\t\tconnHeartbeat: connHeartbeat,\n\t}\n\n\tsc.BaseService = *cmn.NewBaseService(logger, \"IPCVal\", sc)\n\n\treturn sc\n}", "title": "" }, { "docid": "a2f94452fa1e4ba60422ccfc71b63ac4", "score": "0.4992667", "text": "func (t *OpenconfigInterfaces_Interfaces_Interface_Subinterfaces_Subinterface_Ipv4_Addresses) NewAddress(Ip string) (*OpenconfigInterfaces_Interfaces_Interface_Subinterfaces_Subinterface_Ipv4_Addresses_Address, error){\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Address == nil {\n\t\tt.Address = make(map[string]*OpenconfigInterfaces_Interfaces_Interface_Subinterfaces_Subinterface_Ipv4_Addresses_Address)\n\t}\n\n\tkey := Ip\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Address[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Address\", key)\n\t}\n\n\tt.Address[key] = &OpenconfigInterfaces_Interfaces_Interface_Subinterfaces_Subinterface_Ipv4_Addresses_Address{\n\t\tIp: &Ip,\n\t}\n\n\treturn t.Address[key], nil\n}", "title": "" }, { "docid": "f689d84f6722db0e7750dab717e97212", "score": "0.49795628", "text": "func (s *API) CreateIP(req *CreateIPRequest, opts ...scw.RequestOption) (*IP, error) {\n\tvar err error\n\n\tdefaultProjectID, exist := s.client.GetDefaultProjectID()\n\tif exist && req.OrganizationID == nil && req.ProjectID == nil {\n\t\treq.ProjectID = &defaultProjectID\n\t}\n\n\tdefaultOrganizationID, exist := s.client.GetDefaultOrganizationID()\n\tif exist && req.OrganizationID == nil && req.ProjectID == nil {\n\t\treq.OrganizationID = &defaultOrganizationID\n\t}\n\n\tif req.Region == \"\" {\n\t\tdefaultRegion, _ := s.client.GetDefaultRegion()\n\t\treq.Region = defaultRegion\n\t}\n\n\tif fmt.Sprint(req.Region) == \"\" {\n\t\treturn nil, errors.New(\"field Region cannot be empty in request\")\n\t}\n\n\tscwReq := &scw.ScalewayRequest{\n\t\tMethod: \"POST\",\n\t\tPath: \"/lb/v1/regions/\" + fmt.Sprint(req.Region) + \"/ips\",\n\t\tHeaders: http.Header{},\n\t}\n\n\terr = scwReq.SetBody(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar resp IP\n\n\terr = s.client.Do(scwReq, &resp, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}", "title": "" }, { "docid": "467dd7fa3cc7b8b9a5a9480542d3e538", "score": "0.49543002", "text": "func (test *Test) CreateIP(projectName string, ip models.IP) error {\n\tfor _, lip := range tests.NormalIPs {\n\t\tif lip.Value == ip.Value {\n\t\t\treturn models.ErrAlreadyExist\n\t\t}\n\t}\n\n\ttests.NormalIPs = append(tests.NormalIPs, ip)\n\n\tevents.NewEventIP(ip)\n\treturn nil\n}", "title": "" }, { "docid": "a806eb82f8c346323429cd3919b6cf98", "score": "0.4947277", "text": "func (s ApiSession_Params) NewRemoteAddress() (ip.IpAddress, error) {\n\tss, err := ip.NewIpAddress(s.Struct.Segment())\n\tif err != nil {\n\t\treturn ip.IpAddress{}, err\n\t}\n\terr = s.Struct.SetPtr(0, ss.Struct.ToPtr())\n\treturn ss, err\n}", "title": "" }, { "docid": "8225511763fc7803081e2f5dc69a7315", "score": "0.49416167", "text": "func NewEip(ctx *pulumi.Context,\n\tname string, args *EipArgs, opts ...pulumi.ResourceOpt) (*Eip, error) {\n\tinputs := make(map[string]interface{})\n\tif args == nil {\n\t\tinputs[\"associateWithPrivateIp\"] = nil\n\t\tinputs[\"instance\"] = nil\n\t\tinputs[\"networkInterface\"] = nil\n\t\tinputs[\"publicIpv4Pool\"] = nil\n\t\tinputs[\"tags\"] = nil\n\t\tinputs[\"vpc\"] = nil\n\t} else {\n\t\tinputs[\"associateWithPrivateIp\"] = args.AssociateWithPrivateIp\n\t\tinputs[\"instance\"] = args.Instance\n\t\tinputs[\"networkInterface\"] = args.NetworkInterface\n\t\tinputs[\"publicIpv4Pool\"] = args.PublicIpv4Pool\n\t\tinputs[\"tags\"] = args.Tags\n\t\tinputs[\"vpc\"] = args.Vpc\n\t}\n\tinputs[\"allocationId\"] = nil\n\tinputs[\"associationId\"] = nil\n\tinputs[\"domain\"] = nil\n\tinputs[\"privateDns\"] = nil\n\tinputs[\"privateIp\"] = nil\n\tinputs[\"publicDns\"] = nil\n\tinputs[\"publicIp\"] = nil\n\ts, err := ctx.RegisterResource(\"aws:ec2/eip:Eip\", name, true, inputs, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &Eip{s: s}, nil\n}", "title": "" }, { "docid": "688ec3c50f8745e037ea11683f631107", "score": "0.49403977", "text": "func (o *Interface) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "62d62a636bd31590395bb8b02ad5fc58", "score": "0.49237722", "text": "func (o *PutIPAMIpnatParams) WithIPAddressFrom(iPAddressFrom *string) *PutIPAMIpnatParams {\n\to.SetIPAddressFrom(iPAddressFrom)\n\treturn o\n}", "title": "" }, { "docid": "cb6818a9a5e38ea983fbad8504bef204", "score": "0.492368", "text": "func (o *MobileGatewayInterfaceSetting) SetIPAddress(v []string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "eab5f3a1141ca64394e30bd829307fc9", "score": "0.48990437", "text": "func (m *GameServerMutation) SetIPAddress(s string) {\n\tm.ip_address = &s\n}", "title": "" }, { "docid": "3a26e6fd2f62dbeea90bf32ce8fb5338", "score": "0.48976257", "text": "func (m *SessionMutation) SetIPAddress(s string) {\n\tm.ip_address = &s\n}", "title": "" }, { "docid": "0878b51e3d1a7a6b81e8d5d3917e463d", "score": "0.4888612", "text": "func NewOption(optionNumber OptionCode, optionValue interface{}) *CoAPMessageOption {\n\treturn &CoAPMessageOption{\n\t\tCode: optionNumber,\n\t\tValue: optionValue,\n\t}\n}", "title": "" }, { "docid": "d2285fd84ee5fa2ec3873b437fe4ecae", "score": "0.48782718", "text": "func (o *VNCProxy) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "052ae4b29eb50018caba5681f1b482b0", "score": "0.4875485", "text": "func NewAddress() (Address, KeyPair, error) {\n\tkeyPair, err := NewKeyPair()\n\tif err != nil {\n\t\treturn \"\", keyPair, err\n\t}\n\treturn keyPair.Address(), keyPair, nil\n}", "title": "" }, { "docid": "eeca0712d058ddbee2bca0d76b3b765d", "score": "0.48694703", "text": "func NewIPAccess(ipAccess *IPAccess) *IPAccess {\n\t//Setting the IP Latitude, Longitude and Radius fields\n\tipAccess.SetIPCoordinates()\n\n\t//Writing the fully populated IPAccess struct to the sqlDB\n\tCreateIPAccess(ipAccess)\n\n\treturn ipAccess\n}", "title": "" }, { "docid": "09d9d01d98aa099b99608f675d959409", "score": "0.48694083", "text": "func (a *AgentServer) SetIP(value string) {\n\tcssSelector := fmt.Sprintf(\"%v .grv-provision-req-server-interface\", a.cssSelector)\n\tutils.SetDropdownValue2(a.page, cssSelector, \"\", value)\n}", "title": "" }, { "docid": "94d8e4a727e0201bcdcfb59d3f6c1756", "score": "0.48684648", "text": "func (s *ZonedAPI) CreateIP(req *ZonedAPICreateIPRequest, opts ...scw.RequestOption) (*IP, error) {\n\tvar err error\n\n\tdefaultProjectID, exist := s.client.GetDefaultProjectID()\n\tif exist && req.OrganizationID == nil && req.ProjectID == nil {\n\t\treq.ProjectID = &defaultProjectID\n\t}\n\n\tdefaultOrganizationID, exist := s.client.GetDefaultOrganizationID()\n\tif exist && req.OrganizationID == nil && req.ProjectID == nil {\n\t\treq.OrganizationID = &defaultOrganizationID\n\t}\n\n\tif req.Zone == \"\" {\n\t\tdefaultZone, _ := s.client.GetDefaultZone()\n\t\treq.Zone = defaultZone\n\t}\n\n\tif fmt.Sprint(req.Zone) == \"\" {\n\t\treturn nil, errors.New(\"field Zone cannot be empty in request\")\n\t}\n\n\tscwReq := &scw.ScalewayRequest{\n\t\tMethod: \"POST\",\n\t\tPath: \"/lb/v1/zones/\" + fmt.Sprint(req.Zone) + \"/ips\",\n\t\tHeaders: http.Header{},\n\t}\n\n\terr = scwReq.SetBody(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar resp IP\n\n\terr = s.client.Do(scwReq, &resp, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}", "title": "" }, { "docid": "71396579cb5ee1d581e6022f46e2ad19", "score": "0.48671648", "text": "func NewCIDRIPPermission(ipProtocol string, fromPort *int64, toPort *int64, cidr string, labels map[string]string) IPPermissionInfo {\n\tdescription := buildIPPermissionDescriptionForLabels(labels)\n\treturn IPPermissionInfo{\n\t\tPermission: ec2sdk.IpPermission{\n\t\t\tIpProtocol: awssdk.String(ipProtocol),\n\t\t\tFromPort: fromPort,\n\t\t\tToPort: toPort,\n\t\t\tIpRanges: []*ec2sdk.IpRange{\n\t\t\t\t{\n\t\t\t\t\tCidrIp: awssdk.String(cidr),\n\t\t\t\t\tDescription: awssdk.String(description),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tLabels: labels,\n\t}\n}", "title": "" }, { "docid": "4b9cab725177288ffe67d862f00571d4", "score": "0.48635942", "text": "func (t *Interface_Subinterface_Ipv6) NewAddress(Ip string) (*Interface_Subinterface_Ipv6_Address, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Address == nil {\n\t\tt.Address = make(map[string]*Interface_Subinterface_Ipv6_Address)\n\t}\n\n\tkey := Ip\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Address[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Address\", key)\n\t}\n\n\tt.Address[key] = &Interface_Subinterface_Ipv6_Address{\n\t\tIp: &Ip,\n\t}\n\n\treturn t.Address[key], nil\n}", "title": "" }, { "docid": "097c97d135c7efbf13ae4c6738306b12", "score": "0.48622978", "text": "func newIPRange(network net.IPNet) ipRange {\n\tstart := network.IP\n\tend := add(network.IP, size(network.Mask)-1)\n\n\treturn ipRange{start: start, end: end}\n}", "title": "" }, { "docid": "2a663c67891591b021a7eff9fbdd2ee7", "score": "0.48453784", "text": "func (t *OpenconfigInterfaces_Interfaces_Interface_Subinterfaces_Subinterface_Ipv6_Addresses) NewAddress(Ip string) (*OpenconfigInterfaces_Interfaces_Interface_Subinterfaces_Subinterface_Ipv6_Addresses_Address, error){\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Address == nil {\n\t\tt.Address = make(map[string]*OpenconfigInterfaces_Interfaces_Interface_Subinterfaces_Subinterface_Ipv6_Addresses_Address)\n\t}\n\n\tkey := Ip\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Address[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Address\", key)\n\t}\n\n\tt.Address[key] = &OpenconfigInterfaces_Interfaces_Interface_Subinterfaces_Subinterface_Ipv6_Addresses_Address{\n\t\tIp: &Ip,\n\t}\n\n\treturn t.Address[key], nil\n}", "title": "" }, { "docid": "9ad00a0cf590e70067ee9aaea88f5151", "score": "0.48451403", "text": "func (client *PublicIPAddressesClient) getVirtualMachineScaleSetPublicIPAddressCreateRequest(ctx context.Context, resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string, networkInterfaceName string, ipConfigurationName string, publicIPAddressName string, options *PublicIPAddressesGetVirtualMachineScaleSetPublicIPAddressOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses/{publicIpAddressName}\"\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif virtualMachineScaleSetName == \"\" {\n\t\treturn nil, errors.New(\"parameter virtualMachineScaleSetName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{virtualMachineScaleSetName}\", url.PathEscape(virtualMachineScaleSetName))\n\tif virtualmachineIndex == \"\" {\n\t\treturn nil, errors.New(\"parameter virtualmachineIndex cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{virtualmachineIndex}\", url.PathEscape(virtualmachineIndex))\n\tif networkInterfaceName == \"\" {\n\t\treturn nil, errors.New(\"parameter networkInterfaceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{networkInterfaceName}\", url.PathEscape(networkInterfaceName))\n\tif ipConfigurationName == \"\" {\n\t\treturn nil, errors.New(\"parameter ipConfigurationName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{ipConfigurationName}\", url.PathEscape(ipConfigurationName))\n\tif publicIPAddressName == \"\" {\n\t\treturn nil, errors.New(\"parameter publicIPAddressName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{publicIpAddressName}\", url.PathEscape(publicIPAddressName))\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\treq, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.ep, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2018-10-01\")\n\tif options != nil && options.Expand != nil {\n\t\treqQP.Set(\"$expand\", *options.Expand)\n\t}\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header.Set(\"Accept\", \"application/json\")\n\treturn req, nil\n}", "title": "" }, { "docid": "2d43e317c59a2cd818bc4351a4576480", "score": "0.4844142", "text": "func FromVppAddress(addr ip_types.Address) net.IP {\n\treturn FromVppIPAddressUnion(\n\t\taddr.Un,\n\t\taddr.Af == ip_types.ADDRESS_IP6,\n\t)\n}", "title": "" }, { "docid": "98caf66dfb8185732c1661b0e9bc75a7", "score": "0.4840437", "text": "func NewInstanceFromIp(value string) (*Instance, error) {\n\tfilters := []string{\n\t\t\"network-interface.addresses.private-ip-address\",\n\t\t\"network-interface.ipv6-addresses.ipv6-address\",\n\t\t\"network-interface.addresses.association.public-ip\",\n\t}\n\tfor _, filter := range filters {\n\t\tparams := &ec2.DescribeInstancesInput{\n\t\t\tFilters: []*ec2.Filter{\n\t\t\t\t{\n\t\t\t\t\tName: aws.String(filter),\n\t\t\t\t\tValues: []*string{\n\t\t\t\t\t\taws.String(value),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\t\tif instance, err := NewInstance(params); instance != nil {\n\t\t\treturn instance, err\n\t\t}\n\t}\n\treturn nil, errors.New(\"could not find instance by ip\")\n}", "title": "" }, { "docid": "aa08d31cf38e13e45413a7a130aac538", "score": "0.48402503", "text": "func NewIPAPI(sys System) *IPAPI {\n\ti := &IPAPI{SourceType: requests.API}\n\n\ti.BaseService = *NewBaseService(i, \"ipapi\", sys)\n\treturn i\n}", "title": "" }, { "docid": "90d94ff6c1a719c999bfeb9d6348c6d1", "score": "0.48353818", "text": "func NewAddress(address string) (*AddressParts, error) {\n\tvar err error\n\taddressParts := new(AddressParts)\n\terr = addressParts.LoadAddressString(address)\n\tif err != nil {\n\t\treturn addressParts, err\n\t}\n\taddressParts.ProcessAddress()\n\n\treturn addressParts, err\n}", "title": "" }, { "docid": "3e14fb39bcb3465352c77885ae004f03", "score": "0.4820169", "text": "func (client *PublicIPAddressesClient) getCloudServicePublicIPAddressCreateRequest(ctx context.Context, resourceGroupName string, cloudServiceName string, roleInstanceName string, networkInterfaceName string, ipConfigurationName string, publicIPAddressName string, options *PublicIPAddressesGetCloudServicePublicIPAddressOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/roleInstances/{roleInstanceName}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses/{publicIpAddressName}\"\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif cloudServiceName == \"\" {\n\t\treturn nil, errors.New(\"parameter cloudServiceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{cloudServiceName}\", url.PathEscape(cloudServiceName))\n\tif roleInstanceName == \"\" {\n\t\treturn nil, errors.New(\"parameter roleInstanceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{roleInstanceName}\", url.PathEscape(roleInstanceName))\n\tif networkInterfaceName == \"\" {\n\t\treturn nil, errors.New(\"parameter networkInterfaceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{networkInterfaceName}\", url.PathEscape(networkInterfaceName))\n\tif ipConfigurationName == \"\" {\n\t\treturn nil, errors.New(\"parameter ipConfigurationName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{ipConfigurationName}\", url.PathEscape(ipConfigurationName))\n\tif publicIPAddressName == \"\" {\n\t\treturn nil, errors.New(\"parameter publicIPAddressName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{publicIpAddressName}\", url.PathEscape(publicIPAddressName))\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\treq, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.ep, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2021-05-01\")\n\tif options != nil && options.Expand != nil {\n\t\treqQP.Set(\"$expand\", *options.Expand)\n\t}\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header.Set(\"Accept\", \"application/json\")\n\treturn req, nil\n}", "title": "" }, { "docid": "00e24fb7ac80f2c552083e8770d58735", "score": "0.48155144", "text": "func (o *DatabaseReplicationSetting) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "16359a697151c73563abf31378aafb78", "score": "0.481069", "text": "func (o *MobileGatewayInterface) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "c826598f5a6421333d7a5cc6c77b8066", "score": "0.48046374", "text": "func (o *VPCRouterPPTPServerSession) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "be54c521b1c2b137162bf7f5f0c80c36", "score": "0.48032436", "text": "func (t *OpenconfigInterfaces_Interfaces_Interface_RoutedVlan_Ipv4_Addresses) NewAddress(Ip string) (*OpenconfigInterfaces_Interfaces_Interface_RoutedVlan_Ipv4_Addresses_Address, error){\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Address == nil {\n\t\tt.Address = make(map[string]*OpenconfigInterfaces_Interfaces_Interface_RoutedVlan_Ipv4_Addresses_Address)\n\t}\n\n\tkey := Ip\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Address[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Address\", key)\n\t}\n\n\tt.Address[key] = &OpenconfigInterfaces_Interfaces_Interface_RoutedVlan_Ipv4_Addresses_Address{\n\t\tIp: &Ip,\n\t}\n\n\treturn t.Address[key], nil\n}", "title": "" }, { "docid": "0abfab5e64fa65c967101bc492ef6a1e", "score": "0.4771362", "text": "func (e *EnvSet) IP(name string, defaultVal net.IP, description string) net.IP {\n\tv := e.NewVar(newIPValue(defaultVal), name, description)\n\treturn v.Value.Get().(net.IP)\n}", "title": "" }, { "docid": "c501942a5ca27fe8111ecdcb4a5be2ab", "score": "0.4770818", "text": "func (t *NetworkInstance_Mpls_SignalingProtocols_Ldp_InterfaceAttributes_Interface) NewAddressFamily(AfiName E_OpenconfigMplsLdp_MplsLdpAfi) (*NetworkInstance_Mpls_SignalingProtocols_Ldp_InterfaceAttributes_Interface_AddressFamily, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.AddressFamily == nil {\n\t\tt.AddressFamily = make(map[E_OpenconfigMplsLdp_MplsLdpAfi]*NetworkInstance_Mpls_SignalingProtocols_Ldp_InterfaceAttributes_Interface_AddressFamily)\n\t}\n\n\tkey := AfiName\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.AddressFamily[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list AddressFamily\", key)\n\t}\n\n\tt.AddressFamily[key] = &NetworkInstance_Mpls_SignalingProtocols_Ldp_InterfaceAttributes_Interface_AddressFamily{\n\t\tAfiName: AfiName,\n\t}\n\n\treturn t.AddressFamily[key], nil\n}", "title": "" }, { "docid": "e06b34efe5a862ba87da7777a8ef3d63", "score": "0.47691217", "text": "func (o *VPCRouterWireGuard) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "6e3134e7fdee9b5e4d0674ca45bd7877", "score": "0.4766607", "text": "func newIPv4Address(ipCidr string, asn int) (IPAddress, error) {\n\tif isValidIPv4Cidr(ipCidr) == false {\n\t\treturn nil, ErrInvalidIPv4Cidr\n\t}\n\n\tipv4Address := IPv4Address{}\n\tip := strings.Split(ipCidr, \"/\")\n\tipInt, err := ipv4StrToInt(ip[0])\n\tif err != nil {\n\t\treturn ipv4Address, err\n\t}\n\n\tprefix, err := strconv.Atoi(ip[1])\n\tif err != nil {\n\t\treturn ipv4Address, err\n\t}\n\n\tipv4Address.cidrLen = prefix\n\tipv4Address.mask = uint32(^(uint32(0))) << uint32(32-prefix)\n\tipv4Address.ip = ipInt & ipv4Address.mask\n\tipStr, err := intToIPv4Str(ipv4Address.ip)\n\tif err != nil {\n\t\treturn ipv4Address, err\n\t}\n\tipv4Address.ipStr = ipStr\n\tipv4Address.asn = asn\n\n\treturn ipv4Address, nil\n}", "title": "" }, { "docid": "9348c9f28afd96278cc56d30b6076716", "score": "0.47533527", "text": "func (a *Client) CreatePrivateIP(params *CreatePrivateIPParams) (*CreatePrivateIPOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewCreatePrivateIPParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"CreatePrivateIp\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/privateIps\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &CreatePrivateIPReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*CreatePrivateIPOK), nil\n\n}", "title": "" }, { "docid": "31d77071a38888213e6ef7e890a6dc33", "score": "0.47522828", "text": "func NewPublicIPAddress(ctx *pulumi.Context,\n\tname string, args *PublicIPAddressArgs, opts ...pulumi.ResourceOption) (*PublicIPAddress, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.ResourceGroupName == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'ResourceGroupName'\")\n\t}\n\taliases := pulumi.Aliases([]pulumi.Alias{\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20190401:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20150501preview:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20150501preview:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20150615:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20150615:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20160330:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20160330:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20160601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20160601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20160901:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20160901:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20161201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20161201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20170301:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20170301:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20170601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20170601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20170801:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20170801:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20170901:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20170901:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20171001:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20171001:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20171101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20171101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20180101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20180101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20180201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20180201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20180401:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20180401:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20180601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20180601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20180701:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20180701:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20180801:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20180801:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20181001:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20181001:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20181101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20181101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20181201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20181201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20190201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20190201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20190601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20190601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20190701:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20190701:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20190801:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20190801:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20190901:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20190901:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20191101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20191101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20191201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20191201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20200301:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20200301:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20200401:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20200401:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20200501:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20200501:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20200601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20200601:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20200701:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20200701:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20200801:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20200801:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20201101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20201101:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-native:network/v20210201:PublicIPAddress\"),\n\t\t},\n\t\t{\n\t\t\tType: pulumi.String(\"azure-nextgen:network/v20210201:PublicIPAddress\"),\n\t\t},\n\t})\n\topts = append(opts, aliases)\n\tvar resource PublicIPAddress\n\terr := ctx.RegisterResource(\"azure-native:network/v20190401:PublicIPAddress\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "89364d7e4efd848254c58b0c3aa2ef9d", "score": "0.4740215", "text": "func WithIPAM(subnet, gateway string) func(*types.NetworkCreate) {\n\treturn func(n *types.NetworkCreate) {\n\t\tif n.IPAM == nil {\n\t\t\tn.IPAM = &network.IPAM{}\n\t\t}\n\n\t\tn.IPAM.Config = append(n.IPAM.Config, network.IPAMConfig{\n\t\t\tSubnet: subnet,\n\t\t\tGateway: gateway,\n\t\t\tAuxAddress: map[string]string{},\n\t\t})\n\t}\n}", "title": "" }, { "docid": "96dc17a9f4032f4ddd820a9e11003779", "score": "0.47357818", "text": "func IP(name string, defaultVal net.IP, description string) net.IP {\n\treturn DefaultEnv.IP(name, defaultVal, description)\n}", "title": "" }, { "docid": "16cbaa3a105065e70ac1a82d1b0299ef", "score": "0.47332415", "text": "func Address(a string) Option {\n\treturn func(o *Options) {\n\t\to.Address = a\n\t}\n}", "title": "" }, { "docid": "16cbaa3a105065e70ac1a82d1b0299ef", "score": "0.47332415", "text": "func Address(a string) Option {\n\treturn func(o *Options) {\n\t\to.Address = a\n\t}\n}", "title": "" }, { "docid": "16cbaa3a105065e70ac1a82d1b0299ef", "score": "0.47332415", "text": "func Address(a string) Option {\n\treturn func(o *Options) {\n\t\to.Address = a\n\t}\n}", "title": "" }, { "docid": "c47d868e5cb153ed48d449c0c7e2a002", "score": "0.47330117", "text": "func (o *VPCRouterWireGuardPeer) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "70d75d81f6a954570b218fb677bc9f63", "score": "0.47176036", "text": "func New(ip string) Client {\n\treturn NewWithPort(ip, 9999)\n}", "title": "" }, { "docid": "2d68a07fcc96d7a8d4f415d2dd2d4566", "score": "0.47166577", "text": "func (o *DatabaseCreateRequest) SetIPAddresses(v []string) {\n\to.IPAddresses = v\n}", "title": "" }, { "docid": "906b2cea54cb30ef91ebf8d5ec16f2be", "score": "0.47161055", "text": "func (t *Interface_RoutedVlan_Ipv4) NewAddress(Ip string) (*Interface_RoutedVlan_Ipv4_Address, error) {\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Address == nil {\n\t\tt.Address = make(map[string]*Interface_RoutedVlan_Ipv4_Address)\n\t}\n\n\tkey := Ip\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Address[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Address\", key)\n\t}\n\n\tt.Address[key] = &Interface_RoutedVlan_Ipv4_Address{\n\t\tIp: &Ip,\n\t}\n\n\treturn t.Address[key], nil\n}", "title": "" }, { "docid": "93d955bd26b8c7976cb556bd3d446898", "score": "0.47129658", "text": "func (o MobileGatewayPrivateNetworkInterfaceOutput) IpAddress() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MobileGatewayPrivateNetworkInterface) string { return v.IpAddress }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "6d3a8dbf0702d985e59e40af622224a5", "score": "0.4712187", "text": "func NewAddress(ctx *pulumi.Context,\n\tname string, args *AddressArgs, opts ...pulumi.ResourceOption) (*Address, error) {\n\tif args == nil {\n\t\targs = &AddressArgs{}\n\t}\n\n\topts = internal.PkgResourceDefaultOpts(opts)\n\tvar resource Address\n\terr := ctx.RegisterResource(\"gcp:compute/address:Address\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "d5edf962a4b642108653f615ca369e97", "score": "0.47049102", "text": "func NewRemoteIP() *RemoteIP {\n\treturn &RemoteIP{}\n}", "title": "" }, { "docid": "bd658d7e5c5c41db5b98259931b22c22", "score": "0.47043687", "text": "func Address(c string) Option {\n\treturn func(o *Options) {\n\t\to.address = c\n\t}\n}", "title": "" }, { "docid": "25ac95e82e259527d23b6492ce50bce6", "score": "0.47012648", "text": "func (client *PublicIPAddressesClient) listCloudServiceRoleInstancePublicIPAddressesCreateRequest(ctx context.Context, resourceGroupName string, cloudServiceName string, roleInstanceName string, networkInterfaceName string, ipConfigurationName string, options *PublicIPAddressesListCloudServiceRoleInstancePublicIPAddressesOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/roleInstances/{roleInstanceName}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses\"\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif cloudServiceName == \"\" {\n\t\treturn nil, errors.New(\"parameter cloudServiceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{cloudServiceName}\", url.PathEscape(cloudServiceName))\n\tif roleInstanceName == \"\" {\n\t\treturn nil, errors.New(\"parameter roleInstanceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{roleInstanceName}\", url.PathEscape(roleInstanceName))\n\tif networkInterfaceName == \"\" {\n\t\treturn nil, errors.New(\"parameter networkInterfaceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{networkInterfaceName}\", url.PathEscape(networkInterfaceName))\n\tif ipConfigurationName == \"\" {\n\t\treturn nil, errors.New(\"parameter ipConfigurationName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{ipConfigurationName}\", url.PathEscape(ipConfigurationName))\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\treq, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.ep, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2021-05-01\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header.Set(\"Accept\", \"application/json\")\n\treturn req, nil\n}", "title": "" }, { "docid": "40815cf434112f2b3d6bc340361e4499", "score": "0.46961933", "text": "func (t *OpenconfigInterfaces_Interfaces_Interface_RoutedVlan_Ipv6_Addresses) NewAddress(Ip string) (*OpenconfigInterfaces_Interfaces_Interface_RoutedVlan_Ipv6_Addresses_Address, error){\n\n\t// Initialise the list within the receiver struct if it has not already been\n\t// created.\n\tif t.Address == nil {\n\t\tt.Address = make(map[string]*OpenconfigInterfaces_Interfaces_Interface_RoutedVlan_Ipv6_Addresses_Address)\n\t}\n\n\tkey := Ip\n\n\t// Ensure that this key has not already been used in the\n\t// list. Keyed YANG lists do not allow duplicate keys to\n\t// be created.\n\tif _, ok := t.Address[key]; ok {\n\t\treturn nil, fmt.Errorf(\"duplicate key %v for list Address\", key)\n\t}\n\n\tt.Address[key] = &OpenconfigInterfaces_Interfaces_Interface_RoutedVlan_Ipv6_Addresses_Address{\n\t\tIp: &Ip,\n\t}\n\n\treturn t.Address[key], nil\n}", "title": "" }, { "docid": "a2f3074a2e1697946a0840faa16ab39b", "score": "0.46944758", "text": "func (iuo *IPUpdateOne) SetIPAddress(s string) *IPUpdateOne {\n\tiuo.mutation.SetIPAddress(s)\n\treturn iuo\n}", "title": "" }, { "docid": "356eedae7f4f80ed41b35a08b168d517", "score": "0.4688449", "text": "func (o *VPCRouterInterfaceSetting) SetIPAddress(v []string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "91d7b734b9473777bb4ed69f53f6c54c", "score": "0.46835268", "text": "func NewFloatingIp(ctx *pulumi.Context,\n\tname string, args *FloatingIpArgs, opts ...pulumi.ResourceOption) (*FloatingIp, error) {\n\tif args == nil || args.Region == nil {\n\t\treturn nil, errors.New(\"missing required argument 'Region'\")\n\t}\n\tif args == nil {\n\t\targs = &FloatingIpArgs{}\n\t}\n\tvar resource FloatingIp\n\terr := ctx.RegisterResource(\"digitalocean:index/floatingIp:FloatingIp\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "title": "" }, { "docid": "2de8101960591c674127bc7a2fc6a171", "score": "0.46764582", "text": "func (c *VIPClient) Create(params VIPParams) (*VIP, error) {\n\tvar result VIP\n\terr := c.Backend.CallIntoInterface(\"v1/VIP/create\", params, &result)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &result, nil\n}", "title": "" }, { "docid": "7e90e32a70c0eda7d99eb59fc111fe79", "score": "0.46746138", "text": "func (o *VPCRouterL2TPIPsecServerSession) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "f3c48bbf38725a355e852296f31af36a", "score": "0.46743122", "text": "func (o *GSLBServer) SetIPAddress(v string) {\n\to.IPAddress = v\n}", "title": "" }, { "docid": "4c3504b8d070a4352c6f46e7cacd5d09", "score": "0.4671443", "text": "func (p *CNIPod) SetIPAddr(ip string) {\n\tp.cniIPAddr = ip\n}", "title": "" }, { "docid": "2c305cadaeee81aef5dec2ec70edf8ba", "score": "0.46671873", "text": "func (o *CreateInstanceParameters) SetIp(v string) {\n\to.Ip = &v\n}", "title": "" } ]
f85e82f599cd93b6e6539da14429b187
IsEqual will return true if `f == g`.
[ { "docid": "7e2321624487a3f140f2d7731598310c", "score": "0.6106936", "text": "func (f *Float) IsEqual(g interface{}) bool {\n\tgf := toFloat(g)\n\tif gf == nil {\n\t\treturn false\n\t}\n\treturn f.Cmp(&gf.Float) == 0\n}", "title": "" } ]
[ { "docid": "bc2b277d9179a7b8d16532f19b25a3e9", "score": "0.6318961", "text": "func (g *G2) IsEqual(p *G2) bool {\n\tvar lx, rx, ly, ry ff.Fp2\n\tlx.Mul(&g.x, &p.z) // lx = x1*z2\n\trx.Mul(&p.x, &g.z) // rx = x2*z1\n\tlx.Sub(&lx, &rx) // lx = lx-rx\n\tly.Mul(&g.y, &p.z) // ly = y1*z2\n\try.Mul(&p.y, &g.z) // ry = y2*z1\n\tly.Sub(&ly, &ry) // ly = ly-ry\n\treturn lx.IsZero() == 1 && ly.IsZero() == 1\n}", "title": "" }, { "docid": "ea006d66aefdbe6fdc458a78707c878d", "score": "0.61022204", "text": "func (f *NativeFunction) Equal(i interface{}) bool {\n\tif g, ok := i.(*NativeFunction); ok {\n\t\treturn f == g\n\t}\n\treturn false\n}", "title": "" }, { "docid": "162e7a8d56a60adb7e71c74e5aa450bb", "score": "0.60447395", "text": "func (a assert) Equalf(g, e any, format string, args ...any) bool {\n\tif !a.equal(g, e) {\n\t\ta.helper()\n\t\ta.failEqualf(g, e, format, args...)\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "c2535e9395f614d054bdbe35a3176e34", "score": "0.6025907", "text": "func (g *Graph[E, V]) Equal(g2 *Graph[E, V]) bool {\n\n\tif len(g.valIns) != len(g2.valIns) {\n\t\treturn false\n\t}\n\nouter:\n\tfor _, valIn := range g.valIns {\n\n\t\tfor _, valIn2 := range g2.valIns {\n\n\t\t\tif valIn.equal(valIn2) {\n\t\t\t\tcontinue outer\n\t\t\t}\n\t\t}\n\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "8309aed0ab5879d350abfcb016178baa", "score": "0.6014428", "text": "func equal(a, b interface{}) bool {\n\treturn a == b\n}", "title": "" }, { "docid": "8bb4660a5c607641178051ebcbc1232e", "score": "0.60037357", "text": "func (guest Guest) equals(g *Guest) bool {\n if guest.serial != g.serial {\n return false\n }\n if guest.username != g.username {\n return false\n }\n return true\n}", "title": "" }, { "docid": "d3355137f5bfdd333cdf9832e809e00a", "score": "0.59915996", "text": "func Equal(g1, g2 Graph) bool {\n\tn := g1.NumNodes()\n\tif n != g2.NumNodes() {\n\t\treturn false\n\t}\n\tvar temp []int\n\tfor i := 0; i < n; i++ {\n\t\te1 := g1.Out(i)\n\t\te2 := g2.Out(i)\n\t\tif len(e1) != len(e2) {\n\t\t\treturn false\n\t\t}\n\t\t// Quick check to see if they're identical without\n\t\t// sorting.\n\t\teq := true\n\t\tfor ei, x := range e1 {\n\t\t\tif e2[ei] != x {\n\t\t\t\teq = false\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif eq {\n\t\t\tcontinue\n\t\t}\n\t\t// Sort the adjacency list and check equality again.\n\t\ttemp = append(append(temp[:0], e1...), e2...)\n\t\te1, e2 = temp[:len(e1)], temp[len(e1):]\n\t\tsort.Ints(e1)\n\t\tsort.Ints(e2)\n\t\tfor ei, x := range e1 {\n\t\t\tif e2[ei] != x {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "fd3d3b8ac0b02a4a650c6aa5d4d8ac56", "score": "0.5926445", "text": "func (g GenesisState) Equal(g2 GenesisState) bool {\n\tb1 := ModuleCdc.MustMarshalBinaryBare(g)\n\tb2 := ModuleCdc.MustMarshalBinaryBare(g2)\n\treturn bytes.Equal(b1, b2)\n}", "title": "" }, { "docid": "a664257fd3dbbf211ffd6144590b94a0", "score": "0.5907992", "text": "func (a assert) NotEqualf(g, e any, format string, args ...any) bool {\n\tif a.equal(g, e) {\n\t\ta.helper()\n\t\ta.failNotEqualf(g, e, format, args...)\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "7248b50fb5262bb30df398bb5ab755c9", "score": "0.5880707", "text": "func (z Fp) IsEqual(x *Fp) int { return ctUint64Eq(z.i[:], x.i[:]) }", "title": "" }, { "docid": "d98563c98a9aa6ce65517f9e7d729a9c", "score": "0.58680654", "text": "func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {\n\tif h, ok := a.t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\treturn Equalf(a.t, expected, actual, msg, args...)\n}", "title": "" }, { "docid": "d504df03ca3d9a24c889defa425f7853", "score": "0.5866641", "text": "func (a Gop_bigrat) Gop_EQ(b Gop_bigrat) bool {\n\treturn a.Cmp(b.Rat) == 0\n}", "title": "" }, { "docid": "15c659332ec0669e36f163c310c5e19e", "score": "0.5845074", "text": "func (c *Closure) Eq(rhs interface{}) bool {\n\treturn c == rhs\n}", "title": "" }, { "docid": "7666636b65a4e558c65aa2c5724ef05d", "score": "0.5822234", "text": "func Equals(a, b float64) bool {\n\treturn BothNaN(a, b) || SameInfinity(a, b) || ((a-b) < Epsilon && (b-a) < Epsilon)\n}", "title": "" }, { "docid": "8522b441e4d8ca729442f7a77a75c055", "score": "0.5818209", "text": "func (g *Global) Equal(other *Global) bool {\n\treturn g.glbptr == other.glbptr\n}", "title": "" }, { "docid": "cb9d5806a642561b1d293f5aad3cde4f", "score": "0.58131427", "text": "func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {\n\tif h, ok := a.t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\treturn Samef(a.t, expected, actual, msg, args...)\n}", "title": "" }, { "docid": "398540d2422580562230c1d74280caed", "score": "0.5765306", "text": "func (s Surge) Equals(s2 Surge) bool {\n\treturn s.d.Equals(s2.d)\n}", "title": "" }, { "docid": "b99584b6d6d2d4d77f920b8e91f640f5", "score": "0.5751704", "text": "func (p1 *PriPoly) Equal(p2 *PriPoly) bool {\n\tk := len(p1.s)\n\tif p1.g != p2.g || k != len(p2.s) {\n\t\tpanic(\"Mismatched polynomials\")\n\t}\n\tfor i := 0; i < len(p1.s); i++ {\n\t\tif !p1.s[i].Equal(p2.s[i]) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "ed1751694d72b3dc414bec0e3e21f972", "score": "0.57359374", "text": "func (f *Frac) Equal(b *Frac) bool {\n\treturn f.top*b.bot == f.bot*b.top\n}", "title": "" }, { "docid": "8d7d2f85f58eb2a1603e63656b3f2c15", "score": "0.5682258", "text": "func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {\n\tif h, ok := a.t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\treturn NotSamef(a.t, expected, actual, msg, args...)\n}", "title": "" }, { "docid": "8fce9ec9955fe845327541ffc92016a5", "score": "0.56798774", "text": "func (gs GenesisState) Equal(gs2 GenesisState) bool {\n\tb1 := ModuleCdc.MustMarshalBinaryBare(gs)\n\tb2 := ModuleCdc.MustMarshalBinaryBare(gs2)\n\treturn bytes.Equal(b1, b2)\n}", "title": "" }, { "docid": "95a3500e7c05cf819945a81a90c06920", "score": "0.5649966", "text": "func (d *GF255s) Eq(a *GF255s) uint64 {\n\treturn gf_eq((*[4]uint64)(d), (*[4]uint64)(a), mq255s)\n}", "title": "" }, { "docid": "2d5fd9fb1f51108df567732e16c2b051", "score": "0.5647513", "text": "func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {\n\tif h, ok := a.t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\treturn NotEqualf(a.t, expected, actual, msg, args...)\n}", "title": "" }, { "docid": "ce8b7d4f979faca28e2443c515f570ca", "score": "0.5647439", "text": "func (a Gop_bigint) Gop_EQ(b Gop_bigint) bool {\n\treturn a.Cmp(b.Int) == 0\n}", "title": "" }, { "docid": "8c290503de619a7090a3728456d95284", "score": "0.56064266", "text": "func (m *matrixGT) Equal(b *matrixGT) bool {\n\tif !m.conformal(b) {\n\t\treturn false\n\t}\n\tfor i := 0; i < m.rows; i++ {\n\t\tfor j := 0; j < m.cols; j++ {\n\t\t\tif !m.entries[i*m.cols+j].IsEqual(&b.entries[i*b.cols+j]) {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "36f1550b7312c096c45acd9dcbba0ddd", "score": "0.56000996", "text": "func Equality(a int, b int) int {\n\n\tif a == 6 {\n\t\treturn 1\n\t}\n\n\tif a != 6 {\n\t\treturn 1\n\t}\n\n\treturn b\n}", "title": "" }, { "docid": "7dc2310d6f353825201ad55aea973b44", "score": "0.5568668", "text": "func (fitness *Fitness) Equal(other *Fitness) bool {\n\tfor i, fit := range fitness.wvalues {\n\t\tif other.wvalues[i] != fit {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "8e3bf16e0875efa557f1f0ba04711c85", "score": "0.5543913", "text": "func testFunctionEquality(t *testing.T, f1, f2 interface{}) {\n\tassert.True(t, (f1 == nil) == (f2 == nil))\n\tif f1 == nil {\n\t\treturn\n\t}\n\tfuncName1 := runtime.FuncForPC(reflect.ValueOf(f1).Pointer()).Name()\n\tfuncName2 := runtime.FuncForPC(reflect.ValueOf(f2).Pointer()).Name()\n\tassert.Equal(t, funcName1, funcName2)\n}", "title": "" }, { "docid": "e0f62e62117491c2b418d777ddd3c072", "score": "0.55340534", "text": "func TestSignatureIsEqual(t *testing.T) {\n\tsig1 := &Signature{\n\t\tr: *new(ModNScalar).SetHex(\"82235e21a2300022738dabb8e1bbd9d19cfb1e7ab8c30a23b0afbb8d178abcf3\"),\n\t\ts: *new(ModNScalar).SetHex(\"24bf68e256c534ddfaf966bf908deb944305596f7bdcc38d69acad7f9c868724\"),\n\t}\n\tsig1Copy := &Signature{\n\t\tr: *new(ModNScalar).SetHex(\"82235e21a2300022738dabb8e1bbd9d19cfb1e7ab8c30a23b0afbb8d178abcf3\"),\n\t\ts: *new(ModNScalar).SetHex(\"24bf68e256c534ddfaf966bf908deb944305596f7bdcc38d69acad7f9c868724\"),\n\t}\n\tsig2 := &Signature{\n\t\tr: *new(ModNScalar).SetHex(\"4e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd41\"),\n\t\ts: *new(ModNScalar).SetHex(\"181522ec8eca07de4860a4acdd12909d831cc56cbbac4622082221a8768d1d09\"),\n\t}\n\n\tif !sig1.IsEqual(sig1) {\n\t\tt.Fatalf(\"bad self signature equality check: %v == %v\", sig1, sig1Copy)\n\t}\n\tif !sig1.IsEqual(sig1Copy) {\n\t\tt.Fatalf(\"bad signature equality check: %v == %v\", sig1, sig1Copy)\n\t}\n\n\tif sig1.IsEqual(sig2) {\n\t\tt.Fatalf(\"bad signature equality check: %v != %v\", sig1, sig2)\n\t}\n}", "title": "" }, { "docid": "7f162af53f7657c8a8a4b5fb4a2468f3", "score": "0.5530095", "text": "func Equal(f func(a, b interface{}) bool, s *Seq, t *Seq) bool {\n\treturn true\n}", "title": "" }, { "docid": "18a42cb6ac5a8d7beccf3958726edff7", "score": "0.5526587", "text": "func (bf *BuiltinFn) Eq(a interface{}) bool {\n\treturn bf == a\n}", "title": "" }, { "docid": "a0b7106cb1067fbf5ead5359f3a11e5f", "score": "0.5524667", "text": "func isEqualFloat64(a, b float64) bool { return math.Float64bits(a) == math.Float64bits(b) }", "title": "" }, { "docid": "d8fcbf3edd8672e1093cc78955ea98be", "score": "0.5521011", "text": "func (p *Matcher) Same() bool {\n\treturn p.a == p.b\n}", "title": "" }, { "docid": "5351cf69c0ed7e6c5a134445cce4d38a", "score": "0.551677", "text": "func (fc *FrontendConfig) Equal(fc2 *FrontendConfig) bool {\n\treturn *fc == *fc2\n}", "title": "" }, { "docid": "fad289309eaa7b23fa8a5ded5be78ee2", "score": "0.5503866", "text": "func equal(a, b interface{}) bool {\n\tvar anil, bnil bool\n\tva, vb := reflect.ValueOf(a), reflect.ValueOf(b)\n\tswitch va.Kind() {\n\tcase reflect.Invalid:\n\t\tanil = true\n\tcase reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:\n\t\tanil = va.IsNil()\n\t}\n\tswitch vb.Kind() {\n\tcase reflect.Invalid:\n\t\tbnil = true\n\tcase reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:\n\t\tbnil = vb.IsNil()\n\t}\n\tif anil == true && bnil == true {\n\t\treturn true\n\t}\n\tswitch va.Kind() {\n\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\treturn equalNumeric(va, vb)\n\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\treturn equalNumeric(va, vb)\n\tcase reflect.Float32, reflect.Float64:\n\t\treturn equalNumeric(va, vb)\n\tdefault:\n\t\treturn a == b\n\t}\n}", "title": "" }, { "docid": "8e5c85e9e80b0b21aea18a297009d6bc", "score": "0.55002844", "text": "func IsGeometryEqual(g1, g2 Geometry) bool {\n\n\tswitch geo1 := g1.(type) {\n\tcase Point:\n\t\tgeo2, ok := g2.(Point)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\t\treturn IsPointEqual(geo1, geo2)\n\tcase Point3:\n\t\tgeo2, ok := g2.(Point3)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\t\treturn IsPoint3Equal(geo1, geo2)\n\tcase MultiPoint:\n\t\tgeo2, ok := g2.(MultiPoint)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\t\treturn IsMultiPointEqual(geo1, geo2)\n\tcase LineString:\n\t\tgeo2, ok := g2.(LineString)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\t\treturn IsLineStringEqual(geo1, geo2)\n\tcase MultiLine:\n\t\tgeo2, ok := g2.(MultiLine)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\t\treturn IsMultiLineEqual(geo1, geo2)\n\tcase Polygon:\n\t\tgeo2, ok := g2.(Polygon)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\t\treturn IsPolygonEqual(geo1, geo2)\n\tcase MultiPolygon:\n\t\tgeo2, ok := g2.(MultiPolygon)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\t\treturn IsMultiPolygonEqual(geo1, geo2)\n\tcase Collection:\n\t\tgeo2, ok := g2.(Collection)\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\t\treturn IsCollectionEqual(geo1, geo2)\n\t}\n\t// If we don't know the type, we will assume they don't match.\n\treturn false\n}", "title": "" }, { "docid": "969ccd71243ae2df8fa0c105cd6221d6", "score": "0.5499945", "text": "func (gs *GraphState) IsEqual(other *GraphState) bool {\n\tif gs == other {\n\t\treturn true\n\t}\n\tif gs.layer != other.layer ||\n\t\tgs.mainOrder != other.mainOrder ||\n\t\tgs.mainHeight != other.mainHeight {\n\t\treturn false\n\t}\n\tif gs.tips.Contain(other.tips) ||\n\t\tother.tips.Contain(gs.tips) {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "b296687bf75316d9a3b0231e636ecec1", "score": "0.5462662", "text": "func Equals(a, b float64) bool {\n\treturn math.Abs(a - b) < epsilon\n}", "title": "" }, { "docid": "1e969e3e294252fb6be5048f4f8aae24", "score": "0.5461673", "text": "func funcsEqual(f1, f2 interface{}) bool {\n\tval1 := reflect.ValueOf(f1)\n\tval2 := reflect.ValueOf(f2)\n\treturn val1.Pointer() == val2.Pointer()\n}", "title": "" }, { "docid": "b12e0b045a5351f9183a5a85eaf324f6", "score": "0.5456597", "text": "func (gs GenesisState) Equal(data2 GenesisState) bool {\n\tb1 := ModuleCdc.MustMarshalBinaryBare(gs)\n\tb2 := ModuleCdc.MustMarshalBinaryBare(data2)\n\treturn bytes.Equal(b1, b2)\n}", "title": "" }, { "docid": "b12e0b045a5351f9183a5a85eaf324f6", "score": "0.5456597", "text": "func (gs GenesisState) Equal(data2 GenesisState) bool {\n\tb1 := ModuleCdc.MustMarshalBinaryBare(gs)\n\tb2 := ModuleCdc.MustMarshalBinaryBare(data2)\n\treturn bytes.Equal(b1, b2)\n}", "title": "" }, { "docid": "add2a97fcc4775a7aa178656b67f7b55", "score": "0.5452411", "text": "func (b *BuiltinFn) Eq(rhs interface{}) bool {\n\treturn b == rhs\n}", "title": "" }, { "docid": "d5c3e0a03b15e2141a3cfc0f1a4a48e3", "score": "0.5451775", "text": "func (gb *GameBoard) Equal(gb2 *GameBoard) bool {\n\tif gb.xSize != gb2.xSize || gb.ySize != gb2.ySize {\n\t\treturn false\n\t}\n\n\tif len(gb.cells) != len(gb2.cells) {\n\t\treturn false\n\t}\n\n\tfor k := range gb.cells {\n\t\tif gb.cells[k] != gb2.cells[k] {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "ba71ac7bf5f043ec89f8fc8542ac944f", "score": "0.54216915", "text": "func Eqf(a, b float64, eps ...float64) bool {\n\te := 0.0001\n\tif len(eps) > 0 {\n\t\te = eps[0]\n\t}\n\t// min is the smallest normal value possible\n\tconst min = float64(2.2250738585072014E-308) // 1 / 2**(1022)\n\n\tabsA := math.Abs(a)\n\tabsB := math.Abs(b)\n\tdiff := math.Abs(a - b)\n\n\tif a == b {\n\t\treturn true\n\t} else if a == b || b == 0 || diff < min {\n\t\t// a or b is zero or both are extremely close to it relative error is less meaningful here\n\t\treturn diff < (e * min)\n\t} else {\n\t\t// use relative error\n\t\treturn diff/(absA+absB) < e\n\t}\n}", "title": "" }, { "docid": "c2fb8b1440df9219ffacd301f50a7060", "score": "0.54112285", "text": "func Equal(x, y interface{}) bool {\n\tseen := make(map[comparison]bool)\n\treturn equal2(reflect.ValueOf(x), reflect.ValueOf(y), seen)\n}", "title": "" }, { "docid": "28cd14d42b2be2ee5ba1d08a18d4681d", "score": "0.5401087", "text": "func Equal(arg1 interface{}, arg2 interface{}) (res bool, ok bool) {\n\tif arg1 == arg2 {\n\t\treturn true, true\n\t}\n\t// Comparing things to nil should work\n\tif arg1 == nil || arg2 == nil {\n\t\t// The first if statement should have been equal if they are both not nil\n\t\treturn false, true\n\t}\n\n\ts1, ok1 := arg1.(string)\n\ts2, ok2 := arg2.(string)\n\tif ok1 && ok2 {\n\t\tif s1 == s2 {\n\t\t\treturn true, true\n\t\t}\n\t}\n\n\t// Neither is a string. Let's see if one of them is a number.\n\t// That way we know we can compare as numbers\n\tf1, ok := Float(arg1)\n\tif !ok {\n\t\t// using this slows doewn the entire function. It is the same issue\n\t\t// that required forking parseFloat to be faster.\n\t\t//if reflect.DeepEqual(arg1, arg2) {\n\t\t//\treturn true, true\n\t\t//}\n\t\treturn false, true\n\t}\n\n\t// OK, so at least the first one was a number. Let's try the second one\n\tf2, ok := Float(arg2)\n\tif !ok {\n\t\treturn false, true\n\t}\n\n\tif math.IsNaN(f1) && math.IsNaN(f2) {\n\t\treturn true, true\n\t}\n\n\treturn f1 == f2, true\n\n}", "title": "" }, { "docid": "eafa7a79300e7635cb0cc32e78ebf79a", "score": "0.54001737", "text": "func (o *Function) Equal(right Object) bool {\n\tv, ok := right.(*Function)\n\tif !ok {\n\t\treturn false\n\t}\n\treturn v == o\n}", "title": "" }, { "docid": "62ee928f7e0c4122d56e8729b2af44a8", "score": "0.5397372", "text": "func (counter *GCounter) IsEqualTo(c GCounter) bool {\n\treturn reflect.DeepEqual(counter.data, c.data)\n}", "title": "" }, { "docid": "29e2dbd8226386fc026ff0683f7895ef", "score": "0.5394351", "text": "func Eqf(a, b float64, eps ...float64) bool {\n\te := 1e-3\n\tif len(eps) > 0 {\n\t\te = eps[0]\n\t}\n\t// min is the smallest normal value possible\n\tconst min = float64(2.2250738585072014E-308) // 1 / 2**(1022)\n\n\tabsA := math.Abs(a)\n\tabsB := math.Abs(b)\n\tdiff := math.Abs(a - b)\n\n\tif a == b {\n\t\treturn true\n\t} else if a == b || b == 0 || diff < min {\n\t\t// a or b is zero or both are extremely close to it relative error is less meaningful here\n\t\treturn diff < (e * min)\n\t} else {\n\t\t// use relative error\n\t\treturn diff/(absA+absB) < e\n\t}\n}", "title": "" }, { "docid": "a71a67a791ef259b95270b873edd4e12", "score": "0.5389246", "text": "func equalValue(fd protoreflect.FieldDescriptor, x, y protoreflect.Value) bool {\n\tswitch fd.Kind() {\n\tcase protoreflect.BoolKind:\n\t\treturn x.Bool() == y.Bool()\n\tcase protoreflect.EnumKind:\n\t\treturn x.Enum() == y.Enum()\n\tcase protoreflect.Int32Kind, protoreflect.Sint32Kind,\n\t\tprotoreflect.Int64Kind, protoreflect.Sint64Kind,\n\t\tprotoreflect.Sfixed32Kind, protoreflect.Sfixed64Kind:\n\t\treturn x.Int() == y.Int()\n\tcase protoreflect.Uint32Kind, protoreflect.Uint64Kind,\n\t\tprotoreflect.Fixed32Kind, protoreflect.Fixed64Kind:\n\t\treturn x.Uint() == y.Uint()\n\tcase protoreflect.FloatKind, protoreflect.DoubleKind:\n\t\tfx := x.Float()\n\t\tfy := y.Float()\n\t\tif math.IsNaN(fx) || math.IsNaN(fy) {\n\t\t\treturn math.IsNaN(fx) && math.IsNaN(fy)\n\t\t}\n\t\treturn fx == fy\n\tcase protoreflect.StringKind:\n\t\treturn x.String() == y.String()\n\tcase protoreflect.BytesKind:\n\t\treturn bytes.Equal(x.Bytes(), y.Bytes())\n\tcase protoreflect.MessageKind, protoreflect.GroupKind:\n\t\treturn equalMessage(x.Message(), y.Message())\n\tdefault:\n\t\treturn x.Interface() == y.Interface()\n\t}\n}", "title": "" }, { "docid": "26f7c7d2551c556af524f185a0399709", "score": "0.5386387", "text": "func (p *Float) Equals(q *Float) bool {\n\tif p.Degree != q.Degree {\n\t\treturn false\n\t}\n\tpdegs := p.Degrees()\n\tqdegs := q.Degrees()\n\tif pdegs.Len() != qdegs.Len() {\n\t\treturn false\n\t}\n\tfor i, deg := range pdegs {\n\t\tif deg != qdegs[i] {\n\t\t\treturn false\n\t\t}\n\t\ta, _ := p.Coeff(deg)\n\t\tb, _ := q.Coeff(deg)\n\t\tif a.Cmp(b) != 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "737391a66d76fdfe3ae3b8c1503a434e", "score": "0.5378176", "text": "func (c CountryCode) Equal(o CountryCode) bool {\n\treturn c == o\n}", "title": "" }, { "docid": "c50b14c24aee938de69d7ff9925f916d", "score": "0.53779286", "text": "func isEquilateral(a, b, c float64) bool {\n\treturn a == b && b == c\n}", "title": "" }, { "docid": "df7f5a4346420173c1567a1a58ce036b", "score": "0.53749883", "text": "func (c *Closure) Equals(c1 *Closure) bool {\n\tif c.Code != c1.Code || len(c.Upvalues) != len(c1.Upvalues) {\n\t\treturn false\n\t}\n\tfor i, upv := range c.Upvalues {\n\t\tif c1.Upvalues[i] != upv {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "32da9b297980811c0af728ca5231e427", "score": "0.536307", "text": "func (i Int128) Geq(j Int128) bool {\n\tpanic(\"unimplemented\")\n}", "title": "" }, { "docid": "5d9cf24a490a3bd068b10eb35469f8bb", "score": "0.5346609", "text": "func (p1 *PubPoly) Equal(p2 *PubPoly) bool {\n\tk := len(p1.p)\n\tif p1.g.String() != p2.g.String() || k != len(p2.p) {\n\t\tpanic(\"Mismatched polynomial commitments\")\n\t}\n\tfor i := 0; i < len(p1.p); i++ {\n\t\tif !p1.p[i].Equal(p2.p[i]) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "7369eb882f366239c5f16e3c6c6dbaf2", "score": "0.5344731", "text": "func FloatEquals(a, b float64) bool {\n\treturn (a-b) < EPSILON && (b-a) < EPSILON\n}", "title": "" }, { "docid": "c288d53a963a3b3d5ee1f211fb0fdf54", "score": "0.5341035", "text": "func DebugEqualf(expected, actual interface{}, format string, v ...interface{}) {}", "title": "" }, { "docid": "bfadb9d2d4e169c045e85140c7dd92f8", "score": "0.5325735", "text": "func (d *Digest) Equal(other *Digest) bool {\n\tif d == nil || other == nil {\n\t\treturn d == nil && other == nil\n\t}\n\treturn *d == *other\n}", "title": "" }, { "docid": "fa7e58127ff004f80607a85ba5f33ffc", "score": "0.53127587", "text": "func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {\n\tif h, ok := a.t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\treturn EqualValuesf(a.t, expected, actual, msg, args...)\n}", "title": "" }, { "docid": "97075a6eacac3a50eab0ab37c6760d68", "score": "0.53124636", "text": "func (f *Float) Equals(o Object) bool {\n\t// type cast left object if the two operands are not the same type\n\tif f.Type() != o.Type() {\n\t\to = o.Cast(f.Type())\n\t}\n\n\treturn f.HashKey() == o.(Hashable).HashKey()\n}", "title": "" }, { "docid": "ba82dcaa2acb4ccab535b418d8810e1e", "score": "0.53117365", "text": "func (data GenesisState) Equal(data2 GenesisState) bool {\n\tb1 := ModuleCdc.MustMarshalBinaryBare(&data)\n\tb2 := ModuleCdc.MustMarshalBinaryBare(&data2)\n\treturn bytes.Equal(b1, b2)\n}", "title": "" }, { "docid": "8ec55c518d6e28a28326cc3eee566c1c", "score": "0.5309875", "text": "func (p PubComp) Equals(other interface{}) bool {\n\treturn p == other\n}", "title": "" }, { "docid": "5e4445ba3129d7eb866250614284c8c8", "score": "0.53023696", "text": "func eq(s1, s2 string) bool {\n\treturn s1 == s2\n}", "title": "" }, { "docid": "37e85620131810e10afdc5dcb064d452", "score": "0.52975786", "text": "func (this Float64) Equals(other Float64) bool {\n\treturn (this.IsNull && other.IsNull) ||\n\t\t(this.IsNull == other.IsNull && this.Value == other.Value)\n}", "title": "" }, { "docid": "0941acde5a04c68b5507382cd54b5ddc", "score": "0.52936167", "text": "func (a *Assertions) NotEqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {\n\tif h, ok := a.t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\treturn NotEqualValuesf(a.t, expected, actual, msg, args...)\n}", "title": "" }, { "docid": "111264112db3315aa604f42b85ac199a", "score": "0.5291153", "text": "func (a GenesisState) Equal(b GenesisState) bool {\n\taJSON, err := json.Marshal(a)\n\tif err != nil {\n\t\treturn false\n\t}\n\tbJSON, err := json.Marshal(b)\n\tif err != nil {\n\t\treturn false\n\t}\n\taJSONSorted, err := sdk.SortJSON(aJSON)\n\tif err != nil {\n\t\treturn false\n\t}\n\tbJSONSorted, err := sdk.SortJSON(bJSON)\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn string(aJSONSorted) == string(bJSONSorted)\n}", "title": "" }, { "docid": "7aa5e888c0ac29ecf187e67cae06ab75", "score": "0.5281565", "text": "func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool {\n\tif h, ok := a.t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\treturn LessOrEqualf(a.t, e1, e2, msg, args...)\n}", "title": "" }, { "docid": "44b967fea19f00ac208c754e3869b376", "score": "0.5278681", "text": "func isFloatEqual(a, b, tolerance float64) (equal bool) {\n\tconst pos, neg = 1, -1\n\tswitch {\n\tcase math.IsNaN(a), math.IsInf(a, pos), math.IsInf(a, neg), math.IsNaN(b), math.IsInf(b, pos), math.IsInf(b, neg):\n\t\tequal = false\n\tcase a != 0 && b != 0:\n\t\tequal = math.Abs((a-b)/a) <= tolerance\n\tdefault:\n\t\tequal = math.Abs(a-b) <= tolerance\n\t}\n\treturn\n}", "title": "" }, { "docid": "b7192275f4c6d001d1d88f24ff60e6c9", "score": "0.52585137", "text": "func (p PostGIS) TolerantEquals(t *testing.T, g1, g2 geom.Geometry) bool {\n\t// The snap to grid can sometimes mess up the equality check if the\n\t// geometry is split different in the two forms. Try without snap to grid\n\t// first.\n\tvar eq bool\n\tif err := p.db.QueryRow(`\n\t\tSELECT ST_Equals(\n\t\t\tST_GeomFromWKB($1),\n\t\t\tST_GeomFromWKB($2)\n\t\t)`, g1, g2,\n\t).Scan(&eq); err != nil {\n\t\tt.Fatalf(\"pg err: %v\", err)\n\t}\n\tif eq {\n\t\treturn true\n\t}\n\n\tif err := p.db.QueryRow(`\n\t\tSELECT ST_Equals(\n\t\t\tST_SnapToGrid(ST_GeomFromWKB($1), 0, 0, 0.00001, 0.00001),\n\t\t\tST_SnapToGrid(ST_GeomFromWKB($2), 0, 0, 0.00001, 0.00001)\n\t\t)`, g1, g2,\n\t).Scan(&eq); err != nil {\n\t\tt.Fatalf(\"pg err: %v\", err)\n\t}\n\treturn eq\n}", "title": "" }, { "docid": "b89ca8294f6766a89e636009f8fd5df7", "score": "0.52557284", "text": "func Eq(arg1 interface{}, arg2 interface{}) (res bool, ok bool) {\n\treturn Equal(arg1, arg2)\n}", "title": "" }, { "docid": "b3693bd40db6140621308f1714b52f73", "score": "0.52531433", "text": "func (a assert) Equal(g, e any) bool {\n\tif !a.equal(g, e) {\n\t\ta.helper()\n\t\ta.failEqualf(g, e, \"\")\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "31520a57ba05bd22f4c71e713b3a01b3", "score": "0.52408385", "text": "func Equals(frame wdte.Frame, args ...wdte.Func) wdte.Func {\n\tif len(args) <= 1 {\n\t\treturn wdteutil.SaveArgsReverse(wdte.GoFunc(Equals), args...)\n\t}\n\n\ta1 := args[0]\n\tif _, ok := a1.(error); ok {\n\t\treturn a1\n\t}\n\n\ta2 := args[1]\n\tif _, ok := a2.(error); ok {\n\t\treturn a2\n\t}\n\n\tif cmp, ok := a1.(wdte.Comparer); ok {\n\t\tc, _ := cmp.Compare(a2)\n\t\treturn wdte.Bool(c == 0)\n\t}\n\n\tif cmp, ok := a2.(wdte.Comparer); ok {\n\t\tc, _ := cmp.Compare(a1)\n\t\treturn wdte.Bool(c == 0)\n\t}\n\n\treturn wdte.Bool(a1 == a2)\n}", "title": "" }, { "docid": "70b3df4fa5d74822aa9ccc157d33180d", "score": "0.52378154", "text": "func equal(lhsV, rhsV reflect.Value) bool {\n\tlhsNotValid, rhsVNotValid := !lhsV.IsValid(), !rhsV.IsValid()\n\tif lhsNotValid && rhsVNotValid {\n\t\treturn true\n\t}\n\tif (!lhsNotValid && rhsVNotValid) || (lhsNotValid && !rhsVNotValid) {\n\t\treturn false\n\t}\n\n\tlhsIsNil, rhsIsNil := isNil(lhsV), isNil(rhsV)\n\tif lhsIsNil && rhsIsNil {\n\t\treturn true\n\t}\n\tif (!lhsIsNil && rhsIsNil) || (lhsIsNil && !rhsIsNil) {\n\t\treturn false\n\t}\n\tif lhsV.Kind() == reflect.Interface || lhsV.Kind() == reflect.Ptr {\n\t\tlhsV = lhsV.Elem()\n\t}\n\tif rhsV.Kind() == reflect.Interface || rhsV.Kind() == reflect.Ptr {\n\t\trhsV = rhsV.Elem()\n\t}\n\n\t// Compare a string and a number.\n\t// This will attempt to convert the string to a number,\n\t// while leaving the other side alone. Code further\n\t// down takes care of converting ints and floats as needed.\n\tif isNum(lhsV) && rhsV.Kind() == reflect.String {\n\t\trhsF, err := tryToFloat64(rhsV)\n\t\tif err != nil {\n\t\t\t// Couldn't convert RHS to a float, they can't be compared.\n\t\t\treturn false\n\t\t}\n\t\trhsV = reflect.ValueOf(rhsF)\n\t} else if lhsV.Kind() == reflect.String && isNum(rhsV) {\n\t\t// If the LHS is a string formatted as an int, try that before trying float\n\t\tlhsI, err := tryToInt64(lhsV)\n\t\tif err != nil {\n\t\t\t// if LHS is a float, e.g. \"1.2\", we need to set lhsV to a float64\n\t\t\tlhsF, err := tryToFloat64(lhsV)\n\t\t\tif err != nil {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tlhsV = reflect.ValueOf(lhsF)\n\t\t} else {\n\t\t\tlhsV = reflect.ValueOf(lhsI)\n\t\t}\n\t}\n\n\tif isNum(lhsV) && isNum(rhsV) {\n\t\tif rhsV.Type().ConvertibleTo(lhsV.Type()) {\n\t\t\trhsV = rhsV.Convert(lhsV.Type())\n\t\t}\n\t}\n\n\t// Try to compare bools to strings and numbers\n\tif lhsV.Kind() == reflect.Bool || rhsV.Kind() == reflect.Bool {\n\t\tlhsB, err := tryToBool(lhsV, false)\n\t\tif err != nil {\n\t\t\treturn false\n\t\t}\n\t\trhsB, err := tryToBool(rhsV, false)\n\t\tif err != nil {\n\t\t\treturn false\n\t\t}\n\t\treturn lhsB == rhsB\n\t}\n\n\tif lhsV.CanInterface() && rhsV.CanInterface() {\n\t\treturn reflect.DeepEqual(lhsV.Interface(), rhsV.Interface())\n\t}\n\treturn reflect.DeepEqual(lhsV, rhsV)\n}", "title": "" }, { "docid": "7a12459e081ab975dbc6adfde30b8875", "score": "0.5234541", "text": "func (fn *PureFunction) Equals(f astmodel.Function, _ astmodel.EqualityOverrides) bool {\n\ttypedF, ok := f.(*PureFunction)\n\tif !ok {\n\t\treturn false\n\t}\n\n\t// TODO: We're not actually checking function structure here\n\t// - ensure overrides is used if/when we do so\n\treturn fn.name == typedF.name\n}", "title": "" }, { "docid": "00492c59bf5dea33a80473762748edb5", "score": "0.523136", "text": "func (o *OriginalVersionFunction) Equals(f astmodel.Function, _ astmodel.EqualityOverrides) bool {\n\t_, ok := f.(*OriginalVersionFunction)\n\t// Equality is just based on Type for now\n\treturn ok\n}", "title": "" }, { "docid": "f6665efb8a43635d9ffa60f6428eeb3f", "score": "0.52309465", "text": "func (s *State) Equal(i1, i2 int) bool {\n\treturn int(C.lua_equal(s.l, C.int(i1), C.int(i2))) == 1\n}", "title": "" }, { "docid": "d46fac8ba4b3f5dbe8eb105584465bbb", "score": "0.52238417", "text": "func (a AggregatorSpec_Aggregation) Equals(b AggregatorSpec_Aggregation) bool {\n\tif a.Func != b.Func || a.Distinct != b.Distinct {\n\t\treturn false\n\t}\n\tif a.FilterColIdx == nil {\n\t\tif b.FilterColIdx != nil {\n\t\t\treturn false\n\t\t}\n\t} else {\n\t\tif b.FilterColIdx == nil || *a.FilterColIdx != *b.FilterColIdx {\n\t\t\treturn false\n\t\t}\n\t}\n\tif len(a.ColIdx) != len(b.ColIdx) {\n\t\treturn false\n\t}\n\tfor i, c := range a.ColIdx {\n\t\tif c != b.ColIdx[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "1b230134de6704530ed0edae40a62817", "score": "0.522018", "text": "func (x Gvk) Equals(o Gvk) bool {\n\treturn x.Group == o.Group && x.Version == o.Version && x.Kind == o.Kind\n}", "title": "" }, { "docid": "2624cfa2f6d3f46996c5d31890f51ead", "score": "0.5216749", "text": "func assertEqual(t TestingT, eq ByteArrayComparisonAssertionFunc, filepath string, actual []byte, msgAndArgs ...interface{}) bool {\n\tif h, ok := t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\tif *Update {\n\t\twriteGoldenFile(t, filepath, actual)\n\t\treturn true\n\t}\n\texpected := readGoldenFile(t, filepath)\n\treturn eq(t, expected, actual, msgAndArgs...)\n}", "title": "" }, { "docid": "103a86317796d4267f9f73c3287b73f9", "score": "0.52144444", "text": "func EqualsRefOfFuncExpr(a, b *FuncExpr) bool {\n\tif a == b {\n\t\treturn true\n\t}\n\tif a == nil || b == nil {\n\t\treturn false\n\t}\n\treturn a.Distinct == b.Distinct &&\n\t\tEqualsTableIdent(a.Qualifier, b.Qualifier) &&\n\t\tEqualsColIdent(a.Name, b.Name) &&\n\t\tEqualsSelectExprs(a.Exprs, b.Exprs)\n}", "title": "" }, { "docid": "4fe14f91d8014363ec6077d364d16776", "score": "0.52125376", "text": "func eq(x, y interface{}) bool {\n\tnormalize := func(v interface{}) interface{} {\n\t\tvv := reflect.ValueOf(v)\n\t\tswitch vv.Kind() {\n\t\tcase reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:\n\t\t\treturn vv.Int()\n\t\tcase reflect.Float32, reflect.Float64:\n\t\t\treturn vv.Float()\n\t\tcase reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:\n\t\t\treturn vv.Uint()\n\t\tdefault:\n\t\t\treturn v\n\t\t}\n\t}\n\tx = normalize(x)\n\ty = normalize(y)\n\treturn reflect.DeepEqual(x, y)\n}", "title": "" }, { "docid": "21d42f84b97240440c4c99383fea639c", "score": "0.52104", "text": "func (c Color) Equals(other Color) bool {\n\treturn c.tuple.Equals(other.tuple)\n}", "title": "" }, { "docid": "50de1c7c3449532855675d20643c1caf", "score": "0.5204104", "text": "func (k *PKFunc) IsEqual(other Expr) bool {\n\t_, ok := other.(*PKFunc)\n\treturn ok\n}", "title": "" }, { "docid": "543f4027f34b574521fdf98a3ce80140", "score": "0.5203366", "text": "func (ss Float64s) Equals(rhs Float64s) bool {\n\tif len(ss) != len(rhs) {\n\t\treturn false\n\t}\n\n\tfor i := range ss {\n\t\tif !(ss[i] == rhs[i]) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "e30be0517cf5be97c3a52c43a7c347f1", "score": "0.5202225", "text": "func (x *TimeTime) Geq(y *TimeTime) bool {\n\treturn x.rat.Cmp(y.rat) >= 0\n}", "title": "" }, { "docid": "09bfaede4f539b23d379a291562b5ed0", "score": "0.51993364", "text": "func (gs groups) Equal(other groups) bool {\n\tif len(gs) != len(other) {\n\t\treturn false\n\t}\n\tfor _, g := range other {\n\t\tif !gs.HasGroup(g) {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "title": "" }, { "docid": "b537d40f11cd87c9de492b8824c75e34", "score": "0.5196839", "text": "func notEqual(f1, f2 float64) bool {\n\tif math.Abs(f1-f2) > 0.000001 {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "1eea7c96ab94a7de9512af19f3257a1f", "score": "0.5194024", "text": "func Equal(got, want interface{}) bool {\n\treturn reflect.DeepEqual(got, want)\n}", "title": "" }, { "docid": "f9f76abd844cf5fb34011c8a07e7b94a", "score": "0.5186143", "text": "func Eq(t testing.TB, expect interface{}, got interface{}) {\n\teq(t, 1, expect, got)\n}", "title": "" }, { "docid": "880eb2d255ce5c0600289a2182504597", "score": "0.5173677", "text": "func (fs *FileSystem) Equal(a *FileSystem) bool {\n\tlocalMatch := fs.file == a.file && fs.size == a.size\n\tsuperblockMatch := fs.superblock.equal(a.superblock)\n\treturn localMatch && superblockMatch\n}", "title": "" }, { "docid": "d0d8f7af3a27dd7aa084c3be09bbcc66", "score": "0.51707566", "text": "func (o Float32) Equals(other Float32, determinant func(this, other float32) bool) bool {\n\treturn (!o.Present && !other.Present) || (o.Present && other.Present && determinant(o.Value, other.Value))\n}", "title": "" }, { "docid": "542dbaf28a2833f79d9e07f8fbb8e407", "score": "0.5170151", "text": "func ColorEq(c1, c2 color.Color) bool {\n\tr1, g1, b1, a1 := c1.RGBA()\n\tr2, g2, b2, a2 := c2.RGBA()\n\treturn r1 == r2 && g1 == g2 && b1 == b2 && a1 == a2\n}", "title": "" }, { "docid": "a792715037133d972d11b706bbe8d592", "score": "0.51639634", "text": "func (f *Float) IsLessOrEqual(g interface{}) bool {\n\tgf := toFloat(g)\n\tif gf == nil {\n\t\treturn false\n\t}\n\treturn f.Cmp(&gf.Float) <= 0\n}", "title": "" }, { "docid": "4b51a337e87202d8dbae5afe3166adc8", "score": "0.51475155", "text": "func (s *Struct) Equal(rhs interface{}) bool {\n\treturn s == rhs || eqMapLike(s, rhs)\n}", "title": "" }, { "docid": "e90e9c26ae77085f6ca05bbb32c8caf0", "score": "0.5143022", "text": "func (x *GRN) Equals(b *GRN) bool {\n\tif b == nil {\n\t\treturn false\n\t}\n\treturn x == b || (x.TenantId == b.TenantId &&\n\t\tx.Kind == b.Kind &&\n\t\tx.UID == b.UID)\n}", "title": "" }, { "docid": "0b64c803535ae8fd3390795991e14506", "score": "0.51418406", "text": "func (s *State) Equal(other *State) bool {\n\t// For the moment this is sufficient, but we may need to do something\n\t// more elaborate in future if we have any portions of state that require\n\t// more sophisticated comparisons.\n\treturn reflect.DeepEqual(s, other)\n}", "title": "" }, { "docid": "03a897185a00b9536fc9d95a33490994", "score": "0.51410663", "text": "func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool {\n\tif h, ok := a.t.(tHelper); ok {\n\t\th.Helper()\n\t}\n\treturn GreaterOrEqualf(a.t, e1, e2, msg, args...)\n}", "title": "" }, { "docid": "5c521e234e352c4f2caae67f590022af", "score": "0.5139562", "text": "func Eq(a, b ww.Any) (bool, error) {\n\t// Nil is only equal to itself\n\tif IsNil(a) && IsNil(b) {\n\t\treturn true, nil\n\t}\n\n\t// Check for usable interfaces on object A\n\tswitch val := a.(type) {\n\tcase Comparable:\n\t\ti, err := val.Comp(b)\n\t\treturn i == 0, err\n\n\tcase EqualityProvider:\n\t\treturn val.Eq(b)\n\n\t}\n\n\t// Check for usable interfaces on object B\n\tswitch val := b.(type) {\n\tcase Comparable:\n\t\ti, err := val.Comp(b)\n\t\treturn i == 0, err\n\n\tcase EqualityProvider:\n\t\treturn val.Eq(b)\n\n\t}\n\n\t// Identical types with the same canonical representation are equal.\n\tif a.Value().Which() == b.Value().Which() {\n\t\tca, err := Canonical(a)\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\n\t\tcb, err := Canonical(b)\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\n\t\treturn bytes.Equal(ca, cb), nil\n\t}\n\n\t// Disparate types are unequal by default.\n\treturn false, nil\n}", "title": "" }, { "docid": "5e905e6d1af548d8694d86278071a397", "score": "0.5132524", "text": "func (f *FidoAddr) Equal(fn *FidoAddr) bool {\n\tif f.zone == fn.zone && f.net == fn.net && f.node == fn.node && f.point == fn.point {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" } ]
9e8528ef4756be7b05a1adbab498f0ca
EncoderPool mocks base method.
[ { "docid": "a2115f4ca1e1a619e37c78592705e8c2", "score": "0.7755282", "text": "func (m *MockOptions) EncoderPool() encoding.EncoderPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"EncoderPool\")\n\tret0, _ := ret[0].(encoding.EncoderPool)\n\treturn ret0\n}", "title": "" } ]
[ { "docid": "86f89e353132389dfb4fc495b6b9a236", "score": "0.65997416", "text": "func (m *MockOptions) TagEncoderPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"TagEncoderPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "5674333fc5ef025bcaffd367eb764300", "score": "0.6586802", "text": "func (m *MockOptions) SetEncoderPool(value encoding.EncoderPool) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetEncoderPool\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "b952a73aee888695f07e41ac11c8f4fb", "score": "0.65492624", "text": "func (m *MockAdminOptions) TagEncoderPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"TagEncoderPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "252c0e3b1ddb0ce3c92d265342bedbec", "score": "0.6282103", "text": "func (ip *MockIteratorPool) TagEncoder() serialize.TagEncoderPool {\n\tmu.Lock()\n\tip.EncodePoolUsed = true\n\tencoderPool := serialize.NewTagEncoderPool(serialize.NewTagEncoderOptions(), poolOpts)\n\tencoderPool.Init()\n\tmu.Unlock()\n\treturn encoderPool\n}", "title": "" }, { "docid": "d09b427d53f9727faf23187f8180be00", "score": "0.6256968", "text": "func (m *MockhostQueue) ConnectionPool() connectionPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ConnectionPool\")\n\tret0, _ := ret[0].(connectionPool)\n\treturn ret0\n}", "title": "" }, { "docid": "89010333a0e0f951bb461dba62169905", "score": "0.5920521", "text": "func (m *MockOptions) SetTagEncoderPoolSize(value pool.Size) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetTagEncoderPoolSize\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "53eec49d1ef2127d094674b98fc39254", "score": "0.5890853", "text": "func (m *MockAdminOptions) SetTagEncoderPoolSize(value pool.Size) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetTagEncoderPoolSize\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "916a4ecbfd01199d5732970375fbf78c", "score": "0.5830742", "text": "func (m *MockAdminOptions) TagDecoderPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"TagDecoderPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "11d79cb9b2b905f4284f6766eac2766d", "score": "0.58109516", "text": "func (m *MockOptions) TagDecoderPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"TagDecoderPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "74e52507579f9dade0f74af5ad94efe3", "score": "0.5803965", "text": "func (m *MockOptions) BytesPool() pool.CheckedBytesPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"BytesPool\")\n\tret0, _ := ret[0].(pool.CheckedBytesPool)\n\treturn ret0\n}", "title": "" }, { "docid": "6909283ce20ea912b24e4082d77b51da", "score": "0.57472014", "text": "func testEncoder(t *testing.T, v interface{}, err error, expected []byte) {\n\t// buffer is where we write the CBOR encoded values\n\tvar buffer = bytes.Buffer{}\n\t// create a new encoder writing to buffer, and encode v with it\n\tvar e = NewEncoder(&buffer).Encode(v)\n\n\tif e != err {\n\t\tt.Fatalf(\"err: %#v != %#v with %#v\", e, err, v)\n\t}\n\n\tif !bytes.Equal(buffer.Bytes(), expected) {\n\t\tt.Fatalf(\n\t\t\t\"(%#v) %#v != %#v\", v, buffer.Bytes(), expected,\n\t\t)\n\t}\n}", "title": "" }, { "docid": "07dbbce4f0230283c9cc70b22c390e39", "score": "0.5721393", "text": "func (m *MockconnectionPool) Open() {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Open\")\n}", "title": "" }, { "docid": "9064a6df17f86ae15bef60197c3b45e1", "score": "0.5683553", "text": "func CacheableObjectTest(t *testing.T, e runtime.Encoder) {\n\tgvk1 := schema.GroupVersionKind{Group: \"group\", Version: \"version1\", Kind: \"MockCacheableObject\"}\n\n\ttestCases := []struct {\n\t\tdesc string\n\t\trunEncode bool\n\t\treturnSelf bool\n\t\texpectedResult string\n\t\texpectedError error\n\t}{\n\t\t{\n\t\t\tdesc: \"delegate\",\n\t\t\trunEncode: true,\n\t\t},\n\t\t{\n\t\t\tdesc: \"delegate return self\",\n\t\t\trunEncode: true,\n\t\t\treturnSelf: true,\n\t\t},\n\t\t{\n\t\t\tdesc: \"cached success\",\n\t\t\trunEncode: false,\n\t\t\texpectedResult: \"result\",\n\t\t\texpectedError: nil,\n\t\t},\n\t\t{\n\t\t\tdesc: \"cached failure\",\n\t\t\trunEncode: false,\n\t\t\texpectedResult: \"\",\n\t\t\texpectedError: fmt.Errorf(\"encoding error\"),\n\t\t},\n\t}\n\n\tfor _, test := range testCases {\n\t\tt.Run(test.desc, func(t *testing.T) {\n\t\t\tobj := &MockCacheableObject{\n\t\t\t\tgvk: gvk1,\n\t\t\t\tt: t,\n\t\t\t\trunEncode: test.runEncode,\n\t\t\t\treturnSelf: test.returnSelf,\n\t\t\t\texpectedResult: test.expectedResult,\n\t\t\t\texpectedError: test.expectedError,\n\t\t\t}\n\t\t\tbuffer := bytes.NewBuffer(nil)\n\t\t\tw := &testBuffer{\n\t\t\t\twriter: buffer,\n\t\t\t\tt: t,\n\t\t\t\tobject: obj,\n\t\t\t}\n\n\t\t\tif err := e.Encode(obj, w); err != test.expectedError {\n\t\t\t\tt.Errorf(\"unexpected error: %v, expected: %v\", err, test.expectedError)\n\t\t\t}\n\t\t\tif !test.runEncode {\n\t\t\t\tif result := buffer.String(); result != test.expectedResult {\n\t\t\t\t\tt.Errorf(\"unexpected result: %s, expected: %s\", result, test.expectedResult)\n\t\t\t\t}\n\t\t\t}\n\t\t\tintercepted := obj.interceptedCalls()\n\t\t\tif len(intercepted) != 1 {\n\t\t\t\tt.Fatalf(\"unexpected number of intercepted calls: %v\", intercepted)\n\t\t\t}\n\t\t\tif intercepted[0] != e.Identifier() {\n\t\t\t\tt.Errorf(\"unexpected intercepted call: %v, expected: %v\", intercepted, e.Identifier())\n\t\t\t}\n\t\t})\n\t}\n}", "title": "" }, { "docid": "e2fc33ff2bf35d238b86895a48e55c7e", "score": "0.56721824", "text": "func (m *MockITx) GetObjPool(className string, funcName iface.IObjPoolFunc, params ...interface{}) iface.IObject {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{className, funcName}\n\tfor _, a := range params {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"GetObjPool\", varargs...)\n\tret0, _ := ret[0].(iface.IObject)\n\treturn ret0\n}", "title": "" }, { "docid": "f388ecb54a5ca6bcde937e46b6689a25", "score": "0.56396663", "text": "func (m *MockSessionManager) Encoder() SessionEncoder {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Encoder\")\n\tret0, _ := ret[0].(SessionEncoder)\n\treturn ret0\n}", "title": "" }, { "docid": "012606f3516a233ee017f7a5b106e40e", "score": "0.562071", "text": "func (m *MockIRow) GetObjPool(className string, funcName iface.IObjPoolFunc, params ...interface{}) iface.IObject {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{className, funcName}\n\tfor _, a := range params {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"GetObjPool\", varargs...)\n\tret0, _ := ret[0].(iface.IObject)\n\treturn ret0\n}", "title": "" }, { "docid": "ebfa240ce973570041e8bd1015bd9493", "score": "0.55448675", "text": "func (m *MockcodecSerializable) Prepare() error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Prepare\")\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "ca3f1ac6c54c0d16ee9d1fc9c7e1d792", "score": "0.5462333", "text": "func (m *MockOptions) WriteOpPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteOpPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "cecb949138bd6a6d015e6dee8007455a", "score": "0.5461952", "text": "func (m *MockOptions) WriteTaggedOpPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteTaggedOpPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "da13d0760c6060d18042d23bd1030369", "score": "0.5450286", "text": "func TestPool(t *testing.T) {\n\n\t// All sub-tests to run. All of these tests will be run with a postgres\n\t// database and a bolt database.\n\ttests := map[string]func(*testing.T){\n\t\t\"testCSRFSecret\": testCSRFSecret,\n\t\t\"testLastPaymentInfo\": testLastPaymentInfo,\n\t\t\"testLastPaymentCreatedOn\": testLastPaymentCreatedOn,\n\t\t\"testPoolMode\": testPoolMode,\n\t\t\"testAcceptedWork\": testAcceptedWork,\n\t\t\"testAccount\": testAccount,\n\t\t\"testJob\": testJob,\n\t\t\"testDeleteJobsBeforeHeight\": testDeleteJobsBeforeHeight,\n\t\t\"testShares\": testShares,\n\t\t\"testPPSEligibleShares\": testPPSEligibleShares,\n\t\t\"testPPLNSEligibleShares\": testPPLNSEligibleShares,\n\t\t\"testPruneShares\": testPruneShares,\n\t\t\"testPayment\": testPayment,\n\t\t\"testArchivePayment\": testArchivePayment,\n\t\t\"testPaymentAccessors\": testPaymentAccessors,\n\t\t\"testEndpoint\": testEndpoint,\n\t\t\"testClientHashCalc\": testClientHashCalc,\n\t\t\"testClientRolledWork\": testClientTimeRolledWork,\n\t\t\"testClientMessageHandling\": testClientMessageHandling,\n\t\t\"testClientUpgrades\": testClientUpgrades,\n\t\t\"testHashData\": testHashData,\n\t\t\"testPaymentMgrPPS\": testPaymentMgrPPS,\n\t\t\"testPaymentMgrPPLNS\": testPaymentMgrPPLNS,\n\t\t\"testPaymentMgrMaturity\": testPaymentMgrMaturity,\n\t\t\"testPaymentMgrPayment\": testPaymentMgrPayment,\n\t\t\"testPaymentMgrDust\": testPaymentMgrDust,\n\t\t\"testPaymentSignals\": testPaymentMgrSignals,\n\t\t\"testChainState\": testChainState,\n\t\t\"testHub\": testHub,\n\t}\n\n\t// Run all tests with bolt DB.\n\tfor testName, test := range tests {\n\t\tboltDB, err := setupBoltDB()\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"setupBoltDB error: %v\", err)\n\t\t}\n\n\t\tdb = boltDB\n\n\t\tt.Run(testName+\"_Bolt\", test)\n\n\t\terr = boltDB.purge()\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"bolt teardown error: %v\", err)\n\t\t}\n\n\t\tboltDB.Close()\n\t}\n\n\t// Run all tests with postgres DB.\n\tfor testName, test := range tests {\n\t\tpostgresDB, err := setupPostgresDB()\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"setupPostgresDB error: %v\", err)\n\t\t}\n\n\t\tdb = postgresDB\n\n\t\tt.Run(testName+\"_Postgres\", test)\n\n\t\terr = postgresDB.purge()\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"postgres teardown error: %v\", err)\n\t\t}\n\n\t\tpostgresDB.Close()\n\t}\n}", "title": "" }, { "docid": "7a0d4dc8fdf7bd473e8684cce548b037", "score": "0.54411256", "text": "func (m *MockOptions) MultiReaderIteratorPool() encoding.MultiReaderIteratorPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MultiReaderIteratorPool\")\n\tret0, _ := ret[0].(encoding.MultiReaderIteratorPool)\n\treturn ret0\n}", "title": "" }, { "docid": "c79766505322ab1ecc34f878e9ea338b", "score": "0.54396254", "text": "func (m *MockPool) Acquire() (gobolt.Connection, error) {\n\tret := m.ctrl.Call(m, \"Acquire\")\n\tret0, _ := ret[0].(gobolt.Connection)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "979019ecec436c2cfa73e0fb6cbf9070", "score": "0.5433136", "text": "func (m *MockMachineAllocator) WithResourcePool(arg0 string) maasclient.MachineAllocator {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WithResourcePool\", arg0)\n\tret0, _ := ret[0].(maasclient.MachineAllocator)\n\treturn ret0\n}", "title": "" }, { "docid": "6b036605e30e985ba995fcac26b21324", "score": "0.5429427", "text": "func (m *MockOptions) SetTagDecoderPoolSize(value pool.Size) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetTagDecoderPoolSize\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "a697b2afd5df0aa40aeef17cbccef51c", "score": "0.5419813", "text": "func (m *MockAdminOptions) SetTagDecoderPoolSize(value pool.Size) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetTagDecoderPoolSize\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "f69cbcf57534500f65aca7e3bf451fbd", "score": "0.5401593", "text": "func (m *MockOptions) ContextPool() context0.Pool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ContextPool\")\n\tret0, _ := ret[0].(context0.Pool)\n\treturn ret0\n}", "title": "" }, { "docid": "7e549580f5853826b8f39576de24d282", "score": "0.53754216", "text": "func (m *MockAdminOptions) ContextPool() context0.Pool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ContextPool\")\n\tret0, _ := ret[0].(context0.Pool)\n\treturn ret0\n}", "title": "" }, { "docid": "ace45a6478c4214f0f12a5c8b5c929c3", "score": "0.5372557", "text": "func (m *MockAdminOptions) WriteTaggedOpPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteTaggedOpPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "f3c666c3c50b9eb575d635f77d9ff295", "score": "0.5361917", "text": "func init() {\n\tbufPool = NewBufferPool(64)\n}", "title": "" }, { "docid": "463bac43af1ea275fa6fd74e4e4678ce", "score": "0.53316057", "text": "func (m *MockPool) ConstructExternal() *storage.PoolExternal {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ConstructExternal\")\n\tret0, _ := ret[0].(*storage.PoolExternal)\n\treturn ret0\n}", "title": "" }, { "docid": "f17b358e13ad4652a3956e335b6696c6", "score": "0.53229046", "text": "func (m *MockIStmt) GetObjPool(className string, funcName iface.IObjPoolFunc, params ...interface{}) iface.IObject {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{className, funcName}\n\tfor _, a := range params {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"GetObjPool\", varargs...)\n\tret0, _ := ret[0].(iface.IObject)\n\treturn ret0\n}", "title": "" }, { "docid": "e9cda37de051ae9ba9cd1a2edd1356cd", "score": "0.5314771", "text": "func (m *MockAdminOptions) WriteOpPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteOpPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "1a69ffa7416ae058386f74fb833adf8c", "score": "0.5299359", "text": "func (m *MockIPAllocator) ReleasePool(arg0 context.Context, arg1 string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReleasePool\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "17903d9310c34932e78e8155e784e006", "score": "0.5295901", "text": "func MakeMockIteratorPool() *MockIteratorPool {\n\treturn &MockIteratorPool{}\n}", "title": "" }, { "docid": "dcdcfae61e53502ac21f3b034df0a00d", "score": "0.5285002", "text": "func (_m *MockProtocol) Encode(_param0 wire.Value, _param1 io.Writer) error {\n\tret := _m.ctrl.Call(_m, \"Encode\", _param0, _param1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "035a63f7b62250b892744c4e4e7699d1", "score": "0.5280279", "text": "func (m *MockOptions) ContextPool() context.Pool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ContextPool\")\n\tret0, _ := ret[0].(context.Pool)\n\treturn ret0\n}", "title": "" }, { "docid": "0304c46857b2039dbf54cfc8ea80393e", "score": "0.52793574", "text": "func (m *MockOptions) ReaderIteratorPool() encoding.ReaderIteratorPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReaderIteratorPool\")\n\tret0, _ := ret[0].(encoding.ReaderIteratorPool)\n\treturn ret0\n}", "title": "" }, { "docid": "248999183429bfaa89c6b8006c997c3c", "score": "0.52731675", "text": "func init() {\n\tnewMetricClient = mockNewMetricClient\n\tnewExpBundler = mockNewExpBundler\n}", "title": "" }, { "docid": "36a8ccff95e8c72e7ef4b9c2d54e0079", "score": "0.5208661", "text": "func (m *MockSession) IteratorPools() (encoding.IteratorPools, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"IteratorPools\")\n\tret0, _ := ret[0].(encoding.IteratorPools)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "618fd8c7df48eb4e4163e0a44364d320", "score": "0.52032703", "text": "func (m *MockProtocol) Encode(arg0 wire.Value, arg1 io.Writer) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Encode\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "f17b34c8eee73d4370a79993ae13606c", "score": "0.5202802", "text": "func (mr *MockOptionsMockRecorder) EncoderPool() *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"EncoderPool\", reflect.TypeOf((*MockOptions)(nil).EncoderPool))\n}", "title": "" }, { "docid": "c49e1d92bbe9b2aa218c089d835e5381", "score": "0.5192007", "text": "func TestSyncPoolV2(t *testing.T) {\n\tp := sync.Pool{\n\t\tNew: func() interface{} {\n\t\t\treturn 0\n\t\t},\n\t}\n\tp.Put(1)\n\tp.Put(2)\n\tp.Put(3)\n\tp.Put(4)\n\n\truntime.GC()\n\tfor i := 0; i < 5; i++ {\n\t\tfmt.Println(p.Get())\n\t}\n}", "title": "" }, { "docid": "c6e6b7314cdb87311fe2ee53e8eb5afa", "score": "0.51918083", "text": "func (m *MockOptions) IdentifierPool() ident.Pool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"IdentifierPool\")\n\tret0, _ := ret[0].(ident.Pool)\n\treturn ret0\n}", "title": "" }, { "docid": "3654885a77964149d2cd2e8bafd60ffb", "score": "0.5157852", "text": "func TestEncoding(t *testing.T) {\n\n\tos := []Optimus{\n\t\tNew(309779747, 49560203, 57733611),\n\t\tNew(684934207, 1505143743, 846034763),\n\t\tNew(743534599, 1356791223, 1336232185),\n\t\tNew(54661037, 1342843941, 576322863),\n\t\tNew(198194831, 229517423, 459462336),\n\t\tNewCalculated(198194831, 459462336),\n\t}\n\n\tfor i := 0; i < 5; i++ { // How many times we want to run GenerateSeed()\n\t\to := os[i]\n\n\t\tc := 10\n\t\th := 100 // How many random numbers to select in between 0-c and (MAX_INT-c) - MAX-INT\n\n\t\tvar y []uint64 // Stores all the values we want to run encoding tests on\n\n\t\tfor t := 0; t < c; t++ {\n\t\t\ty = append(y, uint64(t))\n\t\t}\n\n\t\t//Generate Random numbers\n\t\tfor t := 0; t < h; t++ {\n\t\t\tupper := *big.NewInt(int64(MAX_INT - 2*uint64(c)))\n\t\t\trand, _ := rand.Int(rand.Reader, &upper)\n\t\t\trandomNumber := rand.Uint64() + uint64(c)\n\n\t\t\ty = append(y, randomNumber)\n\t\t}\n\n\t\tfor t := MAX_INT; t >= MAX_INT-uint64(c); t-- {\n\t\t\ty = append(y, t)\n\t\t}\n\n\t\tfor _, value := range y {\n\t\t\torig := value\n\t\t\thashed := o.Encode(value)\n\t\t\tunhashed := o.Decode(hashed)\n\n\t\t\tif orig != unhashed {\n\t\t\t\tt.Errorf(\"%d: %d -> %d - FAILED\", orig, hashed, unhashed)\n\t\t\t} else {\n\t\t\t\tt.Logf(\"%d: %d -> %d - PASSED\", orig, hashed, unhashed)\n\t\t\t}\n\t\t}\n\n\t}\n}", "title": "" }, { "docid": "6b68e1d25a791cc88ad4733d7ab4f93f", "score": "0.51549953", "text": "func (m *MockOptions) AsyncWriteWorkerPool() sync.PooledWorkerPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AsyncWriteWorkerPool\")\n\tret0, _ := ret[0].(sync.PooledWorkerPool)\n\treturn ret0\n}", "title": "" }, { "docid": "fab4f66cfc26d1f425b0eab6d1f2d047", "score": "0.5143166", "text": "func (m *MockclientSession) IteratorPools() (encoding.IteratorPools, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"IteratorPools\")\n\tret0, _ := ret[0].(encoding.IteratorPools)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "cb2dc964809263d0e1ea8b1129c24ff5", "score": "0.5126908", "text": "func (m *MockOptions) HostQueueOpsArrayPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"HostQueueOpsArrayPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "930a2769311031f2562ca605785196ee", "score": "0.51127005", "text": "func (m *MockKeyValue) Pool() (*redis.Pool, bool) {\n\tr0, r1 := m.PoolFunc.nextHook()()\n\tm.PoolFunc.appendCall(KeyValuePoolFuncCall{r0, r1})\n\treturn r0, r1\n}", "title": "" }, { "docid": "f5f78a6dde9612cccf294292bcf65027", "score": "0.5106901", "text": "func (m *MockAdminSession) IteratorPools() (encoding.IteratorPools, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"IteratorPools\")\n\tret0, _ := ret[0].(encoding.IteratorPools)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "244eedf18a42f062102b9ff03c6cb573", "score": "0.5094423", "text": "func (m *MockAdminOptions) IdentifierPool() ident.Pool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"IdentifierPool\")\n\tret0, _ := ret[0].(ident.Pool)\n\treturn ret0\n}", "title": "" }, { "docid": "15bdc28f9b422a90e3f6f2794a6e618f", "score": "0.50929123", "text": "func TestNewPool(t *testing.T) {\n\tif env == \"test\" {\n\t}\n}", "title": "" }, { "docid": "1d87c1a93a2d007717ca71c4e82b5664", "score": "0.5084382", "text": "func (m *MockITx) GetObjPoolCtx(ctr iface.IContext, className string, funcName iface.IObjPoolFunc, params ...interface{}) iface.IObject {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{ctr, className, funcName}\n\tfor _, a := range params {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"GetObjPoolCtx\", varargs...)\n\tret0, _ := ret[0].(iface.IObject)\n\treturn ret0\n}", "title": "" }, { "docid": "4c13b2a280de056cc7a515cd04eb69e6", "score": "0.5083976", "text": "func (m *MockAdminOptions) HostQueueOpsArrayPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"HostQueueOpsArrayPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "d56d4993b0d0c32fca0184977308ca6d", "score": "0.5076265", "text": "func initTest(queue int, numCPUs int) (pool *Pool) {\n\t\n\t//runtime.GOMAXPROCS(numCPUs)\n\tprintln(\"Using MAXPROC \", numCPUs)\n\tvar p *Pool = newPool(queue, numCPUs)\n\tp.run()\n\t\n\treturn p\n}", "title": "" }, { "docid": "d2c4d8461e8e94297a9fc6211c412bda", "score": "0.50643337", "text": "func (m *MockOptions) SetBytesPool(value pool.CheckedBytesPool) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetBytesPool\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "a222dc88c8d7e9329c778bf2d5871256", "score": "0.5063692", "text": "func NewMockPool() *MockPool {\n\treturn &MockPool{\n\t\tBorrowFunc: &PoolBorrowFunc{\n\t\t\tdefaultHook: func() (iface.Conn, bool) {\n\t\t\t\treturn nil, false\n\t\t\t},\n\t\t},\n\t\tBorrowTimeoutFunc: &PoolBorrowTimeoutFunc{\n\t\t\tdefaultHook: func(time.Duration) (iface.Conn, bool) {\n\t\t\t\treturn nil, false\n\t\t\t},\n\t\t},\n\t\tCloseFunc: &PoolCloseFunc{\n\t\t\tdefaultHook: func() {\n\t\t\t\treturn\n\t\t\t},\n\t\t},\n\t\tReleaseFunc: &PoolReleaseFunc{\n\t\t\tdefaultHook: func(iface.Conn) {\n\t\t\t\treturn\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "96365d0f1404fa513b4bfaa09b6c60cd", "score": "0.5056133", "text": "func (m *MockOptions) CheckedBytesPool() pool.CheckedBytesPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"CheckedBytesPool\")\n\tret0, _ := ret[0].(pool.CheckedBytesPool)\n\treturn ret0\n}", "title": "" }, { "docid": "c8ecfcde97d6da44408bd1eabe5619e8", "score": "0.5046784", "text": "func newEncoder() *encoder {\n\te := new(encoder)\n\te.buf = make([]byte, 4096)\n\te.bwriter.Reset(e.buf, true)\n\treturn e\n}", "title": "" }, { "docid": "91495826852fb4dd09eec7056bd7dca2", "score": "0.50447434", "text": "func Test_Pool_01(t *testing.T) {\n\n\tpool := NewVerifierPool()\n\n\tfoo, err := GenerateCredentials(AlgorithmEd25519)\n\trequire.Nil(t, err)\n\terr = pool.Add(\"foo\", foo)\n\trequire.Nil(t, err)\n\n\tdata := []byte(\"When in the course of human events\")\n\n\tsignature, err := foo.Sign(data)\n\trequire.Nil(t, err)\n\trequire.NotEmpty(t, signature)\n\n\t// Test that the data has been signed by some known source.\n\terr = pool.VerifyAny(data, signature)\n\trequire.Nil(t, err)\n\n\terr = pool.VerifySpecific(\"foo\", data, signature)\n\trequire.Nil(t, err)\n\n}", "title": "" }, { "docid": "21e777cc7ccc4fb7fe81b07974f318c4", "score": "0.50383306", "text": "func Test_Pool_03(t *testing.T) {\n\n\tpool := NewVerifierPool()\n\n\tfoo, err := GenerateCredentials(AlgorithmEd25519)\n\trequire.Nil(t, err)\n\terr = pool.Add(\"foo\", foo)\n\trequire.Nil(t, err)\n\n\tbar, err := GenerateCredentials(AlgorithmECDSA_P256)\n\trequire.Nil(t, err)\n\terr = pool.Add(\"bar\", bar)\n\trequire.Nil(t, err)\n\n\tcrabapple, err := GenerateCredentials(AlgorithmECDSA_P256)\n\trequire.Nil(t, err)\n\n\tdata := []byte(\"When in the course of human events\")\n\n\tsignature, err := crabapple.Sign(data)\n\trequire.Nil(t, err)\n\trequire.NotEmpty(t, signature)\n\n\terr = pool.VerifyAny(data, signature)\n\trequire.NotNil(t, err)\n\n\terr = pool.VerifySpecific(\"crabapple\", data, signature)\n\trequire.NotNil(t, err)\n\trequire.Equal(t, ErrNoVerifier, err)\n\n\terr = pool.VerifySpecific(\"foo\", data, signature)\n\trequire.NotNil(t, err)\n\trequire.Equal(t, ErrUnverifiedSignature, err)\n\n}", "title": "" }, { "docid": "fcfe8cf123ae3af3ac9951d3b4276f35", "score": "0.50319713", "text": "func (m *MockRecordLogEncoder) Encode(arg0 interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Encode\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "848090d929e1fe3cfd03a4f1f5894cc7", "score": "0.50262064", "text": "func BenchmarkPool(b *testing.B) {\n\tpool := sync.Pool{\n\t\tNew: func() interface{} {\n\t\t\treturn new(bytes.Buffer)\n\t\t},\n\t}\n\n\tfor i := 0; i < b.N; i++ {\n\t\tbuf := pool.Get().(*bytes.Buffer)\n\t\tbuf.WriteString(\"Hello\")\n\t\t_ = buf\n\t\tbuf.Reset()\n\t\tpool.Put(buf)\n\t}\n}", "title": "" }, { "docid": "d98770c61a0831cf9a168da91d096905", "score": "0.5021624", "text": "func (mr *MockOptionsMockRecorder) SetEncoderPool(value interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"SetEncoderPool\", reflect.TypeOf((*MockOptions)(nil).SetEncoderPool), value)\n}", "title": "" }, { "docid": "9f27e8b90debad8e1a2ee9531ffcbad7", "score": "0.5020699", "text": "func TestPool_New(t *testing.T) {\n\tcfg := PoolCfg{6, 10, func(cfg ConnCfg) (net.Conn, error) {\n\t\tlog.Printf(\"cfg=%+v\\n\", cfg)\n\t\tc, err := net.Dial(cfg.Protocol, cfg.HostPort)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"failed to initialize connection for : %v due to: %v\\n\", c, err)\n\t\t}\n\t\treturn c, nil\n\t}, []ConnCfg{\n\t\t{HostPort: \"127.0.0.1:12345\", Protocol: \"tcp\"},\n\t\t{HostPort: \"127.0.0.1:12346\", Protocol: \"tcp\"},\n\t\t{HostPort: \"127.0.0.1:12347\", Protocol: \"tcp\"},\n\t\t{HostPort: \"127.0.0.1:12348\", Protocol: \"tcp\"},\n\t\t{HostPort: \"127.0.0.1:12349\", Protocol: \"tcp\"},\n\t\t{HostPort: \"127.0.0.1:12350\", Protocol: \"tcp\"},\n\t}, true}\n\t_, p = New(cfg)\n}", "title": "" }, { "docid": "72998d9245b4323543f872653fc5b845", "score": "0.501672", "text": "func (m *MogClientImpl) initPool() {\n\n\tm.Pool = make(MogPoolType, 0)\n}", "title": "" }, { "docid": "457c4f1029588bb2139846ee7a80aebe", "score": "0.49988076", "text": "func (m *MockMachinePoolConverter) ToExternal(arg0 *api.MachinePool) interface{} {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ToExternal\", arg0)\n\tret0, _ := ret[0].(interface{})\n\treturn ret0\n}", "title": "" }, { "docid": "c259c7294c25cd17ca8ce0f057c4f55e", "score": "0.49924716", "text": "func (m *MockIRow) GetObjPoolCtx(ctr iface.IContext, className string, funcName iface.IObjPoolFunc, params ...interface{}) iface.IObject {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{ctr, className, funcName}\n\tfor _, a := range params {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"GetObjPoolCtx\", varargs...)\n\tret0, _ := ret[0].(iface.IObject)\n\treturn ret0\n}", "title": "" }, { "docid": "46731169e4fd5a15efc973410ab98baf", "score": "0.49885857", "text": "func Test_Pool_02(t *testing.T) {\n\n\tpool := NewVerifierPool()\n\n\tfoo, err := GenerateCredentials(AlgorithmEd25519)\n\trequire.Nil(t, err)\n\terr = pool.Add(\"foo\", foo)\n\trequire.Nil(t, err)\n\n\tbar, err := GenerateCredentials(AlgorithmECDSA_P256)\n\trequire.Nil(t, err)\n\terr = pool.Add(\"bar\", bar)\n\trequire.Nil(t, err)\n\n\tdata := []byte(\"When in the course of human events\")\n\n\tsignature, err := foo.Sign(data)\n\trequire.Nil(t, err)\n\trequire.NotEmpty(t, signature)\n\n\terr = pool.VerifyAny(data, signature)\n\trequire.Nil(t, err)\n\n\terr = pool.VerifySpecific(\"foo\", data, signature)\n\trequire.Nil(t, err)\n\n}", "title": "" }, { "docid": "7aaac8922cdd7a2b1ce860d15e24211a", "score": "0.49863386", "text": "func TestGeneratePoolBlockResults(t *testing.T) {\n\ttf.UnitTest(t)\n\n\tCreatePoSTFunc := func() {}\n\n\tctx := context.Background()\n\tmockSigner, blockSignerAddr := setupSigner()\n\tnewCid := types.NewCidForTestGetter()\n\tst, pool, addrs, cst, bs := sharedSetup(t, mockSigner)\n\n\tgetStateTree := func(c context.Context, ts types.TipSet) (state.Tree, error) {\n\t\treturn st, nil\n\t}\n\tgetAncestors := func(ctx context.Context, ts types.TipSet, newBlockHeight *types.BlockHeight) ([]types.TipSet, error) {\n\t\treturn nil, nil\n\t}\n\n\tmessages := chain.NewMessageStore(cst)\n\n\tworker := mining.NewDefaultWorkerWithDeps(mining.WorkerParameters{\n\t\tAPI: th.NewDefaultTestWorkerPorcelainAPI(),\n\n\t\tMinerAddr: addrs[4],\n\t\tMinerOwnerAddr: addrs[3],\n\t\tMinerWorker: blockSignerAddr,\n\t\tWorkerSigner: mockSigner,\n\n\t\tGetStateTree: getStateTree,\n\t\tGetWeight: getWeightTest,\n\t\tGetAncestors: getAncestors,\n\n\t\tMessageSource: pool,\n\t\tProcessor: consensus.NewDefaultProcessor(),\n\t\tPowerTable: &th.TestView{},\n\t\tBlockstore: bs,\n\t\tMessageStore: messages,\n\t}, CreatePoSTFunc)\n\n\t// addr3 doesn't correspond to an extant account, so this will trigger errAccountNotFound -- a temporary failure.\n\tmsg1 := types.NewMessage(addrs[2], addrs[0], 0, types.ZeroAttoFIL, \"\", nil)\n\tsmsg1, err := types.NewSignedMessage(*msg1, &mockSigner, types.NewGasPrice(1), types.NewGasUnits(0))\n\trequire.NoError(t, err)\n\n\t// This is actually okay and should result in a receipt\n\tmsg2 := types.NewMessage(addrs[0], addrs[1], 0, types.ZeroAttoFIL, \"\", nil)\n\tsmsg2, err := types.NewSignedMessage(*msg2, &mockSigner, types.NewGasPrice(1), types.NewGasUnits(0))\n\trequire.NoError(t, err)\n\n\t// add the following and then increment the actor nonce at addrs[1], nonceTooLow, a permanent error.\n\tmsg3 := types.NewMessage(addrs[1], addrs[0], 0, types.ZeroAttoFIL, \"\", nil)\n\tsmsg3, err := types.NewSignedMessage(*msg3, &mockSigner, types.NewGasPrice(1), types.NewGasUnits(0))\n\trequire.NoError(t, err)\n\n\tmsg4 := types.NewMessage(addrs[1], addrs[2], 1, types.ZeroAttoFIL, \"\", nil)\n\tsmsg4, err := types.NewSignedMessage(*msg4, &mockSigner, types.NewGasPrice(1), types.NewGasUnits(0))\n\trequire.NoError(t, err)\n\n\t_, err = pool.Add(ctx, smsg1, 0)\n\tassert.NoError(t, err)\n\t_, err = pool.Add(ctx, smsg2, 0)\n\tassert.NoError(t, err)\n\t_, err = pool.Add(ctx, smsg3, 0)\n\tassert.NoError(t, err)\n\t_, err = pool.Add(ctx, smsg4, 0)\n\tassert.NoError(t, err)\n\n\tassert.Len(t, pool.Pending(), 4)\n\n\t// Set actor nonce past nonce of message in pool.\n\t// Have to do this here to get a permanent error in the pool.\n\tact, err := st.GetActor(ctx, addrs[1])\n\trequire.NoError(t, err)\n\n\tact.Nonce = types.Uint64(2)\n\terr = st.SetActor(ctx, addrs[1], act)\n\trequire.NoError(t, err)\n\n\tstateRoot, err := st.Flush(ctx)\n\trequire.NoError(t, err)\n\n\tbaseBlock := types.Block{\n\t\tParents: types.NewTipSetKey(newCid()),\n\t\tHeight: types.Uint64(100),\n\t\tStateRoot: stateRoot,\n\t\tProof: types.PoStProof{},\n\t}\n\tblk, err := worker.Generate(ctx, th.RequireNewTipSet(t, &baseBlock), nil, types.PoStProof{}, 0)\n\tassert.NoError(t, err)\n\n\t// This is the temporary failure + the good message,\n\t// which will be removed by the node if this block is accepted.\n\tassert.Len(t, pool.Pending(), 2)\n\tassert.Contains(t, pool.Pending(), smsg1)\n\tassert.Contains(t, pool.Pending(), smsg2)\n\n\t// message and receipts can be loaded from message store and have\n\t// length 1.\n\tmsgs, err := messages.LoadMessages(ctx, blk.Messages)\n\trequire.NoError(t, err)\n\tassert.Len(t, msgs, 1) // This is the good message\n\trcpts, err := messages.LoadReceipts(ctx, blk.MessageReceipts)\n\trequire.NoError(t, err)\n\tassert.Len(t, rcpts, 1)\n}", "title": "" }, { "docid": "6bd230d22276bf4ed162fa2267ae75c0", "score": "0.49852902", "text": "func (m *MockEnvelopeAgnosticProtocol) Encode(arg0 wire.Value, arg1 io.Writer) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Encode\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "82b3b7f3f265e4f6e2192c1c9f55f6e5", "score": "0.4981621", "text": "func redisPoolForTest(t *testing.T, prefix string) *redis.Pool {\n\tt.Helper()\n\n\tpool := &redis.Pool{\n\t\tMaxIdle: 3,\n\t\tIdleTimeout: 240 * time.Second,\n\t\tDial: func() (redis.Conn, error) {\n\t\t\treturn redis.Dial(\"tcp\", \"127.0.0.1:6379\")\n\t\t},\n\t\tTestOnBorrow: func(c redis.Conn, t time.Time) error {\n\t\t\t_, err := c.Do(\"PING\")\n\t\t\treturn err\n\t\t},\n\t}\n\n\tc := pool.Get()\n\tt.Cleanup(func() {\n\t\tc.Close()\n\t})\n\n\tif err := deleteAllKeysWithPrefix(c, prefix); err != nil {\n\t\tt.Logf(\"Could not clear test prefix name=%q prefix=%q error=%v\", t.Name(), prefix, err)\n\t}\n\n\treturn pool\n}", "title": "" }, { "docid": "7846f2685b042108f434f17ecf113843", "score": "0.49813592", "text": "func (m *MockcodecSerializable) Init(adapter Adapter) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Init\", adapter)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "476e6d0ad60ee3d8952e92856599ebd6", "score": "0.49799293", "text": "func (m *MockOptions) SetMultiReaderIteratorPool(value encoding.MultiReaderIteratorPool) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetMultiReaderIteratorPool\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "3a004ce1cef4dcdbaeb7c3886c297563", "score": "0.49713185", "text": "func mockSetup() {\n\ttestMux = http.NewServeMux()\n\ttestServer = httptest.NewServer(testMux)\n\n\ttestGithubClient = github.NewClient(nil)\n\tu, _ := url.Parse(testServer.URL)\n\ttestGithubClient.BaseURL = u\n\ttestGithubClient.UploadURL = u\n\n\ttestKeyCollector = &KeyCollector{\n\t\tgithubClient: testGithubClient,\n\t\thttpClient: &http.Client{},\n\t\tgithubKeysURL: fmt.Sprintf(\"%s/%%s.keys\", u),\n\t}\n}", "title": "" }, { "docid": "181f24fbc1baac767acdc470224822ce", "score": "0.49685198", "text": "func (m *MockAdminOptions) TagEncoderOptions() serialize.TagEncoderOptions {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"TagEncoderOptions\")\n\tret0, _ := ret[0].(serialize.TagEncoderOptions)\n\treturn ret0\n}", "title": "" }, { "docid": "c315164488688c245bb8ccd9be0b22bf", "score": "0.49651557", "text": "func NewMockPoolFrom(i iface.Pool) *MockPool {\n\treturn &MockPool{\n\t\tBorrowFunc: &PoolBorrowFunc{\n\t\t\tdefaultHook: i.Borrow,\n\t\t},\n\t\tBorrowTimeoutFunc: &PoolBorrowTimeoutFunc{\n\t\t\tdefaultHook: i.BorrowTimeout,\n\t\t},\n\t\tCloseFunc: &PoolCloseFunc{\n\t\t\tdefaultHook: i.Close,\n\t\t},\n\t\tReleaseFunc: &PoolReleaseFunc{\n\t\t\tdefaultHook: i.Release,\n\t\t},\n\t}\n}", "title": "" }, { "docid": "535f8468d7ddca25cbc2f0558e0dbba0", "score": "0.49602103", "text": "func TestEncodeBase64(t *testing.T) {\n\tenc := base64er{}\n\tvar c compressor\n\n\tc = noCompression{}\n\tfor _, p := range pairs {\n\t\tpl, err := c.compress(enc.binarise(p.val))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Compress error %v\", err)\n\t\t}\n\n\t\tgot, err := enc.encode(pl)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Encoder error %v\", err)\n\t\t}\n\n\t\tif !bytes.Equal(got, p.b64) {\n\t\t\tt.Errorf(\"Wrongly uncompressed base64 encoding: got: %v exp: %v\",\n\t\t\t\tstring(got), string(p.b64))\n\t\t}\n\t}\n\n\tc = zlibCompression{level: DefaultCompression}\n\tfor _, p := range pairs {\n\t\tpl, err := c.compress(enc.binarise(p.val))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Compress error %v\", err)\n\t\t}\n\n\t\tgot, err := enc.encode(pl)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Encoder error %v\", err)\n\t\t}\n\n\t\tif !bytes.Equal(got, p.b64c) {\n\t\t\tt.Errorf(\"Wrongly uncompressed base64 encoding: got: %v exp: %v\",\n\t\t\t\tstring(got), string(p.b64))\n\t\t}\n\t}\n}", "title": "" }, { "docid": "4985cabaf6723e11f17be94ce86a7ac2", "score": "0.4950939", "text": "func (m *MockAdminOptions) AsyncWriteWorkerPool() sync.PooledWorkerPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AsyncWriteWorkerPool\")\n\tret0, _ := ret[0].(sync.PooledWorkerPool)\n\treturn ret0\n}", "title": "" }, { "docid": "38d606f6dc53bcd43ce3f00d87148464", "score": "0.49476597", "text": "func (m *MockOptions) SeriesIteratorPoolSize() pool.Size {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SeriesIteratorPoolSize\")\n\tret0, _ := ret[0].(pool.Size)\n\treturn ret0\n}", "title": "" }, { "docid": "78b2bc4253a478c5a1139dbb30ee959f", "score": "0.49439412", "text": "func TestNewPeerPool(t *testing.T) {\n\n\tpool, err := NewPeerPool([]string{\"127.0.0.1:8005\", \"192.168.1.1:8005\"})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tt.Logf(\"pool:%#v\", pool)\n\ttime.Sleep(time.Second)\n}", "title": "" }, { "docid": "05790ec9d26def30c7d4aee0ddf0b19e", "score": "0.49435097", "text": "func (m *MockOptions) SetHostQueueOpsArrayPoolSize(value pool.Size) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHostQueueOpsArrayPoolSize\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "d81c2a3c85af5684d3ac8a9a475e3d90", "score": "0.49425727", "text": "func (m *MockconnectionPool) Close() {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Close\")\n}", "title": "" }, { "docid": "b9db4cf30f541e520515bdf2eee067da", "score": "0.49386007", "text": "func (m *MockOptions) SegmentReaderPool() xio.SegmentReaderPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SegmentReaderPool\")\n\tret0, _ := ret[0].(xio.SegmentReaderPool)\n\treturn ret0\n}", "title": "" }, { "docid": "b049f3a3130373891f44e1dc7609ec84", "score": "0.49378845", "text": "func TestNewPool(t *testing.T) {\r\n\tpool := NewPool(1000, 10000)\r\n\tdefer pool.Release()\r\n\r\n\titerations := 1000000\r\n\tvar counter uint64\r\n\r\n\twg := sync.WaitGroup{}\r\n\twg.Add(iterations)\r\n\tfor i := 0; i < iterations; i++ {\r\n\t\targ := uint64(1)\r\n\t\tjob := func() {\r\n\t\t\tdefer wg.Done()\r\n\t\t\tatomic.AddUint64(&counter, arg)\r\n\t\t}\r\n\r\n\t\tpool.JobQueue <- job\r\n\t}\r\n\twg.Wait()\r\n\r\n\tcounterFinal := atomic.LoadUint64(&counter)\r\n\tif uint64(iterations) != counterFinal {\r\n\t\tt.Errorf(\"iterations %v is not equal counterFinal %v\", iterations, counterFinal)\r\n\t}\r\n}", "title": "" }, { "docid": "0d63f1863937f22d45be8cf004603aec", "score": "0.4933986", "text": "func TestEncoder(t *testing.T) {\n\tfor _, td := range coderTestdata {\n\t\tfor _, formatName := range []string{\"Compact\", \"Escaped\", \"Indented\"} {\n\t\t\tfor _, typeName := range []string{\"Token\", \"Value\", \"TokenDelims\"} {\n\t\t\t\tt.Run(path.Join(td.name.name, typeName, formatName), func(t *testing.T) {\n\t\t\t\t\ttestEncoder(t, td.name.where, formatName, typeName, td)\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "11123ec9acbbb09950d1232c4f36ca2a", "score": "0.49316895", "text": "func (m *MockDatabaseBlockPool) Init(alloc DatabaseBlockAllocate) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Init\", alloc)\n}", "title": "" }, { "docid": "765a52b08345cb87a63a1930acb1f2f0", "score": "0.49303174", "text": "func (m *MockBackend) AddStoragePool(arg0 storage.Pool) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"AddStoragePool\", arg0)\n}", "title": "" }, { "docid": "b9269a673d94e15b22acbb4e56935b9c", "score": "0.49301353", "text": "func TestQueueing(t *testing.T) {\n\tnumWorkers, numJobs := 1, 10\n\n\tpool, poolErr := CreatePool(numWorkers, func(data interface{}) interface{} {\n\t\ttime.Sleep(50 * time.Millisecond)\n\t\treturn data\n\t}).Open()\n\n\tif poolErr != nil {\n\t\tt.Errorf(\"Error starting pool: \", poolErr)\n\t\treturn\n\t}\n\n\toutChan := make(chan int)\n\tinChan := make(chan int)\n\n\tfor i := 0; i < numJobs; i++ {\n\t\tgo func() {\n\t\t\tval := <-inChan\n\t\t\tresult, _ := pool.SendWork(val)\n\t\t\toutChan <- result.(int)\n\t\t}()\n\t}\n\n\tfor i := 0; i < numJobs; i++ {\n\t\tinChan <- i\n\t\ttime.Sleep(5 * time.Millisecond)\n\t}\n\n\tfor i := 0; i < numJobs; i++ {\n\t\tval := <-outChan\n\t\tif val != i {\n\t\t\tt.Errorf(\"Wrong value, expected %v, got %v\", i, val)\n\t\t}\n\t}\n\n\tpool.Close()\n}", "title": "" }, { "docid": "0c11f6eefb15dfd3221d56c05829f511", "score": "0.49295127", "text": "func (_m *MockRedisClient) ObjectEncoding(key string) *redis.StringCmd {\n\tret := _m.Called(key)\n\n\tvar r0 *redis.StringCmd\n\tif rf, ok := ret.Get(0).(func(string) *redis.StringCmd); ok {\n\t\tr0 = rf(key)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*redis.StringCmd)\n\t\t}\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "77acfc988a9a7a6e0a2ef4cd2071fcc1", "score": "0.4929045", "text": "func (m *MockOptions) TagEncoderOptions() serialize.TagEncoderOptions {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"TagEncoderOptions\")\n\tret0, _ := ret[0].(serialize.TagEncoderOptions)\n\treturn ret0\n}", "title": "" }, { "docid": "61047cdb44fe29646d45207677521a89", "score": "0.49273074", "text": "func (m *MockManager) RegisterCodec(arg0 uint16, arg1 Codec) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"RegisterCodec\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "title": "" }, { "docid": "a26507ad8e8454441d56f90964cb6492", "score": "0.49185067", "text": "func (m *MockFetchBlockMetadataResultsPool) Put(res FetchBlockMetadataResults) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Put\", res)\n}", "title": "" }, { "docid": "e792970d53e7cd3f80ae2a358f925652", "score": "0.4918321", "text": "func (m *MockAdminOptions) CheckedBytesPool() pool.CheckedBytesPool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"CheckedBytesPool\")\n\tret0, _ := ret[0].(pool.CheckedBytesPool)\n\treturn ret0\n}", "title": "" }, { "docid": "d33b2464c8d1a9cfdeafc2d0d7cfeb10", "score": "0.4917115", "text": "func (m *MockOptions) SetWriteTaggedOpPoolSize(value pool.Size) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetWriteTaggedOpPoolSize\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "84a68cb1e68d1c06b140779f377e1e15", "score": "0.49088445", "text": "func TestPeerPool_PushSyncMessage(t *testing.T) {\n\n\tpool, err := NewPeerPool([]string{\"127.0.0.1:8005\", \"192.168.1.1:8005\"})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tt.Logf(\"pool:%#v\", pool)\n\ttime.Sleep(time.Second)\n\n\tpool.PushSyncMessage(&SyncMessage{\n\t\tType: SyncRegType,\n\t\tContent: &pb.RegisterRequest{\n\t\t\tAction: pb.ActionType_Replication,\n\t\t\tInstance: &pb.ServiceInstance{\n\t\t\t\tSegment: \"dev\",\n\t\t\t\tServiceName: \"com.busgo.trade.proto.TradeService\",\n\t\t\t\tIp: \"192.168.1.1\",\n\t\t\t\tPort: 8001,\n\t\t\t\tMetadata: make(map[string]string),\n\t\t\t\tRegTimestamp: time.Now().UnixNano(),\n\t\t\t\tUpTimestamp: time.Now().UnixNano(),\n\t\t\t\tRenewTimestamp: time.Now().UnixNano(),\n\t\t\t\tDirtyTimestamp: time.Now().UnixNano(),\n\t\t\t\tLatestTimestamp: time.Now().UnixNano(),\n\t\t\t},\n\t\t},\n\t})\n\n\tt.Logf(\"push the sync reg message success....\")\n\n\ttime.Sleep(time.Second)\n}", "title": "" }, { "docid": "318f97f4393dd39f772a91b3bf8d6578", "score": "0.49005038", "text": "func TestSynchronizeEncoding(t *testing.T) {\n\t// tbi\n}", "title": "" }, { "docid": "2af694cc12dfb5fc6a4d1f470794a6e2", "score": "0.48954526", "text": "func TestEncoder(t *testing.T) {\n\ttests := []struct {\n\t\tf encodeFunc // function to use to encode\n\t\tin interface{} // input value\n\t\twantBytes []byte // expected bytes\n\t\twantN int // expected number of bytes written\n\t\terr error // expected error\n\t}{\n\t\t// Bool\n\t\t{fEncodeBool, false, []byte{0x00, 0x00, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeBool, true, []byte{0x00, 0x00, 0x00, 0x01}, 4, nil},\n\t\t// Expected Failure -- Short write\n\t\t{fEncodeBool, true, []byte{0x00, 0x00, 0x00}, 3, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// Double\n\t\t{fEncodeDouble, float64(0), []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeDouble, float64(3.141592653589793), []byte{0x40, 0x09, 0x21, 0xfb, 0x54, 0x44, 0x2d, 0x18}, 8, nil},\n\t\t{fEncodeDouble, float64(math.Inf(-1)), []byte{0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeDouble, float64(math.Inf(0)), []byte{0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t// Expected Failure -- Short write\n\t\t{fEncodeDouble, float64(3.141592653589793), []byte{0x40, 0x09, 0x21, 0xfb, 0x54, 0x44, 0x2d}, 7, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// Enum\n\t\t{fEncodeEnum, int32(0), []byte{0x00, 0x00, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeEnum, int32(1), []byte{0x00, 0x00, 0x00, 0x01}, 4, nil},\n\t\t// Expected Failures -- Invalid enum values\n\t\t{fEncodeEnum, int32(2), []byte{}, 0, &MarshalError{ErrorCode: ErrBadEnumValue}},\n\t\t{fEncodeEnum, int32(1234), []byte{}, 0, &MarshalError{ErrorCode: ErrBadEnumValue}},\n\n\t\t// FixedOpaque\n\t\t{fEncodeFixedOpaque, []byte{0x01}, []byte{0x01, 0x00, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeFixedOpaque, []byte{0x01, 0x02}, []byte{0x01, 0x02, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeFixedOpaque, []byte{0x01, 0x02, 0x03}, []byte{0x01, 0x02, 0x03, 0x00}, 4, nil},\n\t\t{fEncodeFixedOpaque, []byte{0x01, 0x02, 0x03, 0x04}, []byte{0x01, 0x02, 0x03, 0x04}, 4, nil},\n\t\t{fEncodeFixedOpaque, []byte{0x01, 0x02, 0x03, 0x04, 0x05}, []byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x00, 0x00, 0x00}, 8, nil},\n\t\t// Expected Failure -- Short write\n\t\t{fEncodeFixedOpaque, []byte{0x01}, []byte{0x01, 0x00, 0x00}, 3, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// Float\n\t\t{fEncodeFloat, float32(0), []byte{0x00, 0x00, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeFloat, float32(3.14), []byte{0x40, 0x48, 0xF5, 0xC3}, 4, nil},\n\t\t{fEncodeFloat, float32(1234567.0), []byte{0x49, 0x96, 0xB4, 0x38}, 4, nil},\n\t\t{fEncodeFloat, float32(math.Inf(-1)), []byte{0xFF, 0x80, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeFloat, float32(math.Inf(0)), []byte{0x7F, 0x80, 0x00, 0x00}, 4, nil},\n\t\t// Expected Failure -- Short write\n\t\t{fEncodeFloat, float32(3.14), []byte{0x40, 0x48, 0xF5}, 3, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// Hyper\n\t\t{fEncodeHyper, int64(0), []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeHyper, int64(1 << 34), []byte{0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeHyper, int64(1 << 42), []byte{0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeHyper, int64(9223372036854775807), []byte{0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, 8, nil},\n\t\t{fEncodeHyper, int64(-1), []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, 8, nil},\n\t\t{fEncodeHyper, int64(-9223372036854775808), []byte{0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t// Expected Failure -- Short write\n\t\t{fEncodeHyper, int64(-9223372036854775808), []byte{0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, 7, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// Int\n\t\t{fEncodeInt, int32(0), []byte{0x00, 0x00, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeInt, int32(262144), []byte{0x00, 0x04, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeInt, int32(2147483647), []byte{0x7F, 0xFF, 0xFF, 0xFF}, 4, nil},\n\t\t{fEncodeInt, int32(-1), []byte{0xFF, 0xFF, 0xFF, 0xFF}, 4, nil},\n\t\t{fEncodeInt, int32(-2147483648), []byte{0x80, 0x00, 0x00, 0x00}, 4, nil},\n\t\t// Expected Failure -- Short write\n\t\t{fEncodeInt, int32(2147483647), []byte{0x7F, 0xFF, 0xFF}, 3, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// Opaque\n\t\t{fEncodeOpaque, []byte{0x01}, []byte{0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeOpaque, []byte{0x01, 0x02, 0x03}, []byte{0x00, 0x00, 0x00, 0x03, 0x01, 0x02, 0x03, 0x00}, 8, nil},\n\t\t// Expected Failures -- Short write in length and payload\n\t\t{fEncodeOpaque, []byte{0x01}, []byte{0x00, 0x00, 0x00}, 3, &MarshalError{ErrorCode: ErrIO}},\n\t\t{fEncodeOpaque, []byte{0x01}, []byte{0x00, 0x00, 0x00, 0x01, 0x01}, 5, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// String\n\t\t{fEncodeString, \"\", []byte{0x00, 0x00, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeString, \"xdr\", []byte{0x00, 0x00, 0x00, 0x03, 0x78, 0x64, 0x72, 0x00}, 8, nil},\n\t\t{fEncodeString, \"τ=2π\", []byte{0x00, 0x00, 0x00, 0x06, 0xCF, 0x84, 0x3D, 0x32, 0xCF, 0x80, 0x00, 0x00}, 12, nil},\n\t\t// Expected Failures -- Short write in length and payload\n\t\t{fEncodeString, \"xdr\", []byte{0x00, 0x00, 0x00}, 3, &MarshalError{ErrorCode: ErrIO}},\n\t\t{fEncodeString, \"xdr\", []byte{0x00, 0x00, 0x00, 0x03, 0x78}, 5, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// Uhyper\n\t\t{fEncodeUhyper, uint64(0), []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeUhyper, uint64(1 << 34), []byte{0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeUhyper, uint64(1 << 42), []byte{0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t{fEncodeUhyper, uint64(18446744073709551615), []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, 8, nil},\n\t\t{fEncodeUhyper, uint64(9223372036854775808), []byte{0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, 8, nil},\n\t\t// Expected Failure -- Short write\n\t\t{fEncodeUhyper, uint64(9223372036854775808), []byte{0x80}, 1, &MarshalError{ErrorCode: ErrIO}},\n\n\t\t// Uint\n\t\t{fEncodeUint, uint32(0), []byte{0x00, 0x00, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeUint, uint32(262144), []byte{0x00, 0x04, 0x00, 0x00}, 4, nil},\n\t\t{fEncodeUint, uint32(4294967295), []byte{0xFF, 0xFF, 0xFF, 0xFF}, 4, nil},\n\t\t// Expected Failure -- Short write\n\t\t{fEncodeUint, uint32(262144), []byte{0x00, 0x04, 0x00}, 3, &MarshalError{ErrorCode: ErrIO}},\n\t}\n\n\tvalidEnums := make(map[int32]bool)\n\tvalidEnums[0] = true\n\tvalidEnums[1] = true\n\n\tvar err error\n\tvar n int\n\n\tfor i, test := range tests {\n\t\terr = nil\n\t\tdata := newFixedWriter(test.wantN)\n\t\tenc := NewEncoder(data)\n\t\tswitch test.f {\n\t\tcase fEncodeBool:\n\t\t\tin := test.in.(bool)\n\t\t\tn, err = enc.EncodeBool(in)\n\t\tcase fEncodeDouble:\n\t\t\tin := test.in.(float64)\n\t\t\tn, err = enc.EncodeDouble(in)\n\t\tcase fEncodeEnum:\n\t\t\tin := test.in.(int32)\n\t\t\tn, err = enc.EncodeEnum(in, validEnums)\n\t\tcase fEncodeFixedOpaque:\n\t\t\tin := test.in.([]byte)\n\t\t\tn, err = enc.EncodeFixedOpaque(in)\n\t\tcase fEncodeFloat:\n\t\t\tin := test.in.(float32)\n\t\t\tn, err = enc.EncodeFloat(in)\n\t\tcase fEncodeHyper:\n\t\t\tin := test.in.(int64)\n\t\t\tn, err = enc.EncodeHyper(in)\n\t\tcase fEncodeInt:\n\t\t\tin := test.in.(int32)\n\t\t\tn, err = enc.EncodeInt(in)\n\t\tcase fEncodeOpaque:\n\t\t\tin := test.in.([]byte)\n\t\t\tn, err = enc.EncodeOpaque(in)\n\t\tcase fEncodeString:\n\t\t\tin := test.in.(string)\n\t\t\tn, err = enc.EncodeString(in)\n\t\tcase fEncodeUhyper:\n\t\t\tin := test.in.(uint64)\n\t\t\tn, err = enc.EncodeUhyper(in)\n\t\tcase fEncodeUint:\n\t\t\tin := test.in.(uint32)\n\t\t\tn, err = enc.EncodeUint(in)\n\t\tdefault:\n\t\t\tt.Errorf(\"%v #%d unrecognized function\", test.f, i)\n\t\t\tcontinue\n\t\t}\n\n\t\t// First ensure the number of bytes written is the expected\n\t\t// value and the error is the expected one.\n\t\ttestName := fmt.Sprintf(\"%v #%d\", test.f, i)\n\t\ttestExpectedMRet(t, testName, n, test.wantN, err, test.err)\n\n\t\t// Finally, ensure the written bytes are what is expected.\n\t\trv := data.Bytes()\n\t\tif len(rv) != len(test.wantBytes) {\n\t\t\tt.Errorf(\"%s: unexpected len - got: %v want: %v\\n\",\n\t\t\t\ttestName, len(rv), len(test.wantBytes))\n\t\t\tcontinue\n\t\t}\n\t\tif !reflect.DeepEqual(rv, test.wantBytes) {\n\t\t\tt.Errorf(\"%s: unexpected result - got: %v want: %v\\n\",\n\t\t\t\ttestName, rv, test.wantBytes)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ae359b50955fe6971b6533b52c371d9a", "score": "0.48946336", "text": "func (m *MockAdminOptions) SetHostQueueOpsArrayPoolSize(value pool.Size) Options {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SetHostQueueOpsArrayPoolSize\", value)\n\tret0, _ := ret[0].(Options)\n\treturn ret0\n}", "title": "" }, { "docid": "9e27addf30a5936a5c171b01e520954c", "score": "0.48930457", "text": "func (p *UUIDPool) fillPool() {\n\tfor i := 0; i < len(p.pool)/2; i++ {\n\t\tp.pool <- uuid.New()\n\t}\n}", "title": "" }, { "docid": "475ef75f4b84fddfd4866cee41318fba", "score": "0.4887391", "text": "func (m *MockMachinePoolConverter) ToInternal(arg0 interface{}, arg1 *api.MachinePool) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"ToInternal\", arg0, arg1)\n}", "title": "" } ]
17abb6a031d59e04db8ad6e4ef41195d
ValidateHostname ensures that a cluster name is also a valid host name according to RFC 1123.
[ { "docid": "6ac1254a6e134190db065781a86630a1", "score": "0.86385965", "text": "func validateHostname(name string) error {\n\tif len(name) == 0 {\n\t\treturn xerrors.Errorf(\"no name provided %w\", ErrorClusterInvalidName)\n\t}\n\n\tif name[0] == '-' || name[len(name)-1] == '-' {\n\t\treturn xerrors.Errorf(\"hostname [%s] must not start or end with - (dash): %w\", name, ErrorClusterInvalidName)\n\t}\n\n\tfor _, c := range name {\n\t\tswitch {\n\t\tcase '0' <= c && c <= '9':\n\t\tcase 'a' <= c && c <= 'z':\n\t\tcase 'A' <= c && c <= 'Z':\n\t\tcase c == '-':\n\t\t\tbreak\n\t\tdefault:\n\t\t\treturn xerrors.Errorf(\"hostname [%s] contains characters other than 'Aa-Zz', '0-9' or '-': %w\", ErrorClusterInvalidName)\n\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" } ]
[ { "docid": "7a9278bdf855f9f591509cd049d1d044", "score": "0.74752694", "text": "func IsValidHostname(host string) bool {\n\n\treturn host != \"\" && !strings.HasPrefix(host, \".\") && !strings.HasSuffix(host, \":\")\n}", "title": "" }, { "docid": "8273f1d30387138a3ee28358e8d48c8a", "score": "0.69806457", "text": "func NaivelyValidateHostname(hostname string) bool {\n\treturn hostnameRegexp.MatchString(hostname)\n}", "title": "" }, { "docid": "fc3057d894bd504bad494f702d568b97", "score": "0.69745153", "text": "func validHostname(host string) bool {\n\thost = strings.TrimSuffix(host, \".\")\n\n\tif len(host) == 0 {\n\t\treturn false\n\t}\n\n\tfor i, part := range strings.Split(host, \".\") {\n\t\tif part == \"\" {\n\t\t\t// Empty label.\n\t\t\treturn false\n\t\t}\n\t\tif i == 0 && part == \"*\" {\n\t\t\t// Only allow full left-most wildcards, as those are the only ones\n\t\t\t// we match, and matching literal '*' characters is probably never\n\t\t\t// the expected behavior.\n\t\t\tcontinue\n\t\t}\n\t\tfor j, c := range part {\n\t\t\tif 'a' <= c && c <= 'z' {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif '0' <= c && c <= '9' {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif 'A' <= c && c <= 'Z' {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif c == '-' && j != 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif c == '_' || c == ':' {\n\t\t\t\t// Not valid characters in hostnames, but commonly\n\t\t\t\t// found in deployments outside the WebPKI.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "fc3057d894bd504bad494f702d568b97", "score": "0.69745153", "text": "func validHostname(host string) bool {\n\thost = strings.TrimSuffix(host, \".\")\n\n\tif len(host) == 0 {\n\t\treturn false\n\t}\n\n\tfor i, part := range strings.Split(host, \".\") {\n\t\tif part == \"\" {\n\t\t\t// Empty label.\n\t\t\treturn false\n\t\t}\n\t\tif i == 0 && part == \"*\" {\n\t\t\t// Only allow full left-most wildcards, as those are the only ones\n\t\t\t// we match, and matching literal '*' characters is probably never\n\t\t\t// the expected behavior.\n\t\t\tcontinue\n\t\t}\n\t\tfor j, c := range part {\n\t\t\tif 'a' <= c && c <= 'z' {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif '0' <= c && c <= '9' {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif 'A' <= c && c <= 'Z' {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif c == '-' && j != 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif c == '_' || c == ':' {\n\t\t\t\t// Not valid characters in hostnames, but commonly\n\t\t\t\t// found in deployments outside the WebPKI.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "e5977d7cf97f19b6dd30d36b40ab2ff4", "score": "0.6965516", "text": "func isValidHostname(s string) bool {\n\t// The root domain name is valid. See golang.org/issue/45715.\n\tif s == \".\" {\n\t\treturn true\n\t}\n\n\t// See RFC 1035, RFC 3696.\n\t// Presentation format has dots before every label except the first, and the\n\t// terminal empty label is optional here because we assume fully-qualified\n\t// (absolute) input. We must therefore reserve space for the first and last\n\t// labels' length octets in wire format, where they are necessary and the\n\t// maximum total length is 255.\n\t// So our _effective_ maximum is 253, but 254 is not rejected if the last\n\t// character is a dot.\n\tl := len(s)\n\tif l == 0 || l > 254 || l == 254 && s[l-1] != '.' {\n\t\treturn false\n\t}\n\n\tlast := byte('.')\n\tnonNumeric := false // true once we've seen a letter or hyphen\n\tpartlen := 0\n\tfor i := 0; i < len(s); i++ {\n\t\tc := s[i]\n\t\tswitch {\n\t\tdefault:\n\t\t\treturn false\n\t\tcase 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_':\n\t\t\tnonNumeric = true\n\t\t\tpartlen++\n\t\tcase '0' <= c && c <= '9':\n\t\t\t// fine\n\t\t\tpartlen++\n\t\tcase c == '-':\n\t\t\t// Byte before dash cannot be dot.\n\t\t\tif last == '.' {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tpartlen++\n\t\t\tnonNumeric = true\n\t\tcase c == '.':\n\t\t\t// Byte before dot cannot be dot, dash.\n\t\t\tif last == '.' || last == '-' {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tif partlen > 63 || partlen == 0 {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tpartlen = 0\n\t\t}\n\t\tlast = c\n\t}\n\tif last == '-' || partlen > 63 {\n\t\treturn false\n\t}\n\n\treturn nonNumeric\n}", "title": "" }, { "docid": "45b2441335c8f332896bfcb006cb66fb", "score": "0.69538265", "text": "func validateClusterName(v string) error {\n\tif err := validateNonEmpty(v); err != nil {\n\t\treturn err\n\t}\n\n\tif length := utf8.RuneCountInString(v); length < 1 || length > 253 {\n\t\treturn errors.New(\"must be between 1 and 253 characters\")\n\t}\n\n\tif strings.ToLower(v) != v {\n\t\treturn errors.New(\"must be lower case\")\n\t}\n\n\tif !isMatch(\"^[a-z0-9-.]*$\", v) {\n\t\treturn errors.New(\"only lower case alphanumeric [a-z0-9], dashes and dots are allowed\")\n\t}\n\n\tisAlphaNum := regexp.MustCompile(\"^[a-z0-9]$\").MatchString\n\n\t// If we got this far, we know the string is ASCII and has at least one character\n\tif !isAlphaNum(v[:1]) || !isAlphaNum(v[len(v)-1:]) {\n\t\treturn errors.New(\"must start and end with a lower case alphanumeric character [a-z0-9]\")\n\t}\n\n\tfor _, segment := range strings.Split(v, \".\") {\n\t\t// Each segment can have up to 63 characters\n\t\tif utf8.RuneCountInString(segment) > 63 {\n\t\t\treturn errors.New(\"no segment between dots can be more than 63 characters\")\n\t\t}\n\t\tif !isAlphaNum(segment[:1]) || !isAlphaNum(segment[len(segment)-1:]) {\n\t\t\treturn errors.New(\"segments between dots must start and end with a lower case alphanumeric character [a-z0-9]\")\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "95afa63370f4bf7e8f2429041775ab19", "score": "0.67483175", "text": "func SanitizeHostname(hostname string) string {\n\thostname = TrimCommonSuffixes(hostname)\n\treturn SanitizeLabel(hostname)\n}", "title": "" }, { "docid": "f082cfc3b5de2c43f8b1518e286e35ff", "score": "0.6739102", "text": "func (v *StringIsResolvableHostname) Validate(e *validator.Errors) {\n\tif isResolvableHostname(v.Field) {\n\t\treturn\n\t}\n\n\te.Add(v.Name, StringIsResolvableHostnameError(v))\n}", "title": "" }, { "docid": "6351abd8d0a1a5ccde77422fde993904", "score": "0.66603136", "text": "func (h *HostNameValidator) Validate(v interface{}) (munged interface{}, err error) {\n\tif val, ok := v.(string); ok {\n\t\tif err := RFC5891DNSIgnoreCase(val); err != nil {\n\t\t\treturn munged, jsonmap.NewValidationError(\"invalid hostname: %s\", err.Error())\n\t\t}\n\t\ts, err := idna.ToASCII(val)\n\t\tif err != nil {\n\t\t\treturn munged, jsonmap.NewValidationError(\"punycode error: %s\", err.Error())\n\t\t} else if len(s) > 255 {\n\t\t\treturn munged, jsonmap.NewValidationError(\"punycode translation exceeds maximum length\")\n\t\t}\n\t\treturn val, nil\n\t}\n\treturn munged, jsonmap.NewValidationError(\"invalid type\")\n}", "title": "" }, { "docid": "52d0b9ef2644ef613b94b618f8921b93", "score": "0.6548652", "text": "func (c *Certificate) VerifyHostname(h string) error {\n\t// IP addresses may be written in [ ].\n\tcandidateIP := h\n\tif len(h) >= 3 && h[0] == '[' && h[len(h)-1] == ']' {\n\t\tcandidateIP = h[1 : len(h)-1]\n\t}\n\tif ip := net.ParseIP(candidateIP); ip != nil {\n\t\t// We only match IP addresses against IP SANs.\n\t\t// See RFC 6125, Appendix B.2.\n\t\tfor _, candidate := range c.IPAddresses {\n\t\t\tif ip.Equal(candidate) {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\treturn HostnameError{c, candidateIP}\n\t}\n\n\tlowered := toLowerCaseASCII(h)\n\n\tif c.commonNameAsHostname() {\n\t\tif matchHostnames(toLowerCaseASCII(c.Subject.CommonName), lowered) {\n\t\t\treturn nil\n\t\t}\n\t} else {\n\t\tfor _, match := range c.DNSNames {\n\t\t\tif matchHostnames(toLowerCaseASCII(match), lowered) {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}\n\n\treturn HostnameError{c, h}\n}", "title": "" }, { "docid": "1d0b4bc6dcc541fe5dc8ed89ca04cf7a", "score": "0.6501832", "text": "func (v HostnameValidator) Validate() error {\n\treturn nil\n}", "title": "" }, { "docid": "fac302126fc685ffccde1473d6fa4ee3", "score": "0.6182571", "text": "func Hostname(expected string) echo.Checker {\n\treturn Each(func(r echoClient.Response) error {\n\t\tif r.Hostname != expected {\n\t\t\treturn fmt.Errorf(\"expected hostname %s, received %s\", expected, r.Hostname)\n\t\t}\n\t\treturn nil\n\t})\n}", "title": "" }, { "docid": "0226411a07db08b6a8f3f1f1f3362422", "score": "0.6168319", "text": "func (d *common) validateName(name string) error {\n\tif name == \"\" {\n\t\treturn fmt.Errorf(\"Name is required\")\n\t}\n\n\t// Don't allow ACL names to start with special port selector character to avoid allow LXD to define\n\t// special port selectors without risking conflict with user defined ACL names.\n\tif strings.HasPrefix(name, \"#\") {\n\t\treturn fmt.Errorf(\"Name cannot start with reserved character `#`\")\n\t}\n\n\t// Ensures we can differentiate an ACL name from an IP in rules that reference this ACL.\n\terr := shared.ValidHostname(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "e9774e8c8c8fdaaed15e19567c0595f9", "score": "0.6161597", "text": "func ValidNameserver(nameserver string) (string, error) {\n\n\tif nameserver == \"\" {\n\t\treturn \"\", fmt.Errorf(\"RFC2136 nameserver missing\")\n\t}\n\n\t// SplitHostPort Behavior\n\t// namserver host port err\n\t// 8.8.8.8 \"\" \"\" missing port in address\n\t// 8.8.8.8: \"8.8.8.8\" \"\" <nil>\n\t// 8.8.8.8.8:53 \"8.8.8.8\" 53 <nil>\n\t// nameserver.com \"\" \"\" missing port in address\n\t// nameserver.com: \"nameserver.com\" \"\" <nil>\n\t// nameserver.com:53 \"nameserver.com\" 53 <nil>\n\t// :53 \"\" 53 <nil>\n\thost, port, err := net.SplitHostPort(strings.TrimSpace(nameserver))\n\n\tif err != nil {\n\t\tif strings.Contains(err.Error(), \"missing port\") {\n\t\t\thost = nameserver\n\t\t}\n\t}\n\n\tif port == \"\" {\n\t\tport = defaultPort\n\t}\n\n\tif host != \"\" {\n\t\tif ipaddr := net.ParseIP(host); ipaddr == nil {\n\t\t\treturn \"\", fmt.Errorf(\"RFC2136 nameserver must be a valid IP Address, not %v\", host)\n\t\t}\n\t} else {\n\t\treturn \"\", fmt.Errorf(\"RFC2136 nameserver has no IP Address defined, %v\", nameserver)\n\t}\n\n\tnameserver = host + \":\" + port\n\n\treturn nameserver, nil\n}", "title": "" }, { "docid": "aa1ded16a0b4dd5ed5ef59924047e799", "score": "0.6132035", "text": "func (o *Node) SetHostname(v string) {\n\to.Hostname = v\n}", "title": "" }, { "docid": "d066111545218b471c109db889ab3732", "score": "0.6086352", "text": "func (h *Host) Validate() error {\n\terr := validateHostname(h.Name)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif h.IngressAddr == h.SubstanceAddr {\n\t\treturn fmt.Errorf(\"duplicate ingress port and egress port (host=%s, addr=%s)\", h.Name, h.IngressAddr.String())\n\t}\n\n\tif strings.Contains(h.EgressHost, \":\") {\n\t\treturn fmt.Errorf(\"egrsshost can not contain port number: host=%s, egress=%s\", h.Name, h.EgressHost)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "9d549a5b92c8b543a97164bdf2e34b95", "score": "0.60859424", "text": "func sanitizeHostname(hostname string) string {\n\treturn strings.Replace(hostname, \":\", \"-\", -1)\n}", "title": "" }, { "docid": "4d3ddb8dff97baccf82c87fc9fc615ec", "score": "0.60781175", "text": "func (o *HypercacheInstanceReadSummary) SetHostname(v string) {\n\to.Hostname = v\n}", "title": "" }, { "docid": "7cc265d99e820232002670945498ca44", "score": "0.5956135", "text": "func kubeClusterNamePartValidator(part string) error {\n\t// validate the suffix part adding a simple stub prefix \"a\" and\n\t// validating it as a full kube cluster name.\n\treturn types.ValidateKubeClusterName(\"a\" + part)\n}", "title": "" }, { "docid": "e5a0416552310012187f6d918df71a98", "score": "0.59370446", "text": "func (d CertDetector) verifyHostname(cert *x509.Certificate, domain string) string {\n\tif cert.VerifyHostname(domain) == nil {\n\t\treturn \"matched\"\n\t}\n\treturn \"mismatched\"\n}", "title": "" }, { "docid": "48afb2b7f404f43f869f8be63d51d1d9", "score": "0.5928795", "text": "func (m *AgentMutation) SetHostname(s string) {\n\tm.hostname = &s\n}", "title": "" }, { "docid": "cab165826c7a6beb80abb9c6c296cf36", "score": "0.5920144", "text": "func (c *Consumer) ResolveHostname(instance common.DebuggerInstance) string {\n\tlogPrefix := \"Consumer::ResolveHostname\"\n\n\tcurrHost := net.JoinHostPort(util.Localhost(), c.nsServerPort)\n\tinfo, err := util.FetchNewClusterInfoCache(currHost)\n\tif err != nil {\n\t\tlogging.Errorf(\"%s [%s:%s:%d] Unable to fetch cluster info cache, err : %v\",\n\t\t\tlogPrefix, c.workerName, c.tcpPort, c.Pid(), err)\n\t\treturn util.Localhost()\n\t}\n\n\texternalIP, err := info.GetExternalIPOfThisNode(instance.NodesExternalIP)\n\tif err != nil {\n\t\tlogging.Errorf(\"%s [%s:%s:%d] Unable to resolve host name, err : %v\",\n\t\t\tlogPrefix, c.workerName, c.tcpPort, c.Pid(), err)\n\t\treturn util.Localhost()\n\t}\n\treturn externalIP\n}", "title": "" }, { "docid": "b11f74a29cfbba2d12fe2ae4a13b0b6f", "score": "0.59192383", "text": "func (v *Validation) IsHostnamePort(val string) bool {\n\thost, port, err := net.SplitHostPort(val)\n\tif err != nil {\n\t\treturn false\n\t}\n\t// Port must be a iny <= 65535.\n\tif portNum, err := strconv.ParseInt(port, 10, 32); err != nil || portNum > 65535 || portNum < 1 {\n\t\treturn false\n\t}\n\t// If host is specified, it should match a DNS name\n\tif host != \"\" {\n\t\treturn constant.HostnameRegexRFC1123.MatchString(host)\n\t}\n\treturn true\n}", "title": "" }, { "docid": "042b189e26e208ecdb18b7e6f3689e37", "score": "0.5881655", "text": "func (c *Certificate) commonNameAsHostname() bool {\n\treturn !ignoreCN && !c.hasSANExtension() && validHostname(c.Subject.CommonName)\n}", "title": "" }, { "docid": "87ddaceeeea5c092111818834f1984b6", "score": "0.58652264", "text": "func NewHostname(parent *Hostnames) *Hostname {\n\thostname := &Hostname{parent: parent, CnameType: CnameTypeEdgeHostname}\n\thostname.Init()\n\n\treturn hostname\n}", "title": "" }, { "docid": "033e6dd21ac09e6db01e4994faca2989", "score": "0.5824656", "text": "func (m *PageMutation) SetHostname(s string) {\n\tm.hostname = &s\n}", "title": "" }, { "docid": "0f8d83d43b5fc8646b15201fc7a74c70", "score": "0.5818775", "text": "func isResolvableHostname(s string) bool {\n\t// resolvable host is OK\n\taddrs, err := net.LookupHost(s)\n\tif err != nil || len(addrs) == 0 {\n\t\treturn false\n\t}\n\n\t// IP addr is BAD\n\tif ip := net.ParseIP(s); ip != nil {\n\t\treturn false\n\t}\n\n\treturn true\n}", "title": "" }, { "docid": "b54f3e83fa1beb45a1158227f3b5ce02", "score": "0.58180195", "text": "func (m *ReferrerMutation) SetHostname(s string) {\n\tm.hostname = &s\n}", "title": "" }, { "docid": "2cd92bd93b5ddeb02467485e7551d11e", "score": "0.57996184", "text": "func (m *Provider) IsValidClusterName(clusterName string) (bool, error) {\n\treturn true, nil\n}", "title": "" }, { "docid": "df730a3e02cc9987fe41a1c72c396b47", "score": "0.57966256", "text": "func (s *RegisterInstanceInput) SetHostname(v string) *RegisterInstanceInput {\n\ts.Hostname = &v\n\treturn s\n}", "title": "" }, { "docid": "cf0a28994ab64dc0a0f7266a265970c8", "score": "0.5780167", "text": "func (s *CreateInstanceInput) SetHostname(v string) *CreateInstanceInput {\n\ts.Hostname = &v\n\treturn s\n}", "title": "" }, { "docid": "77ab0063bc2ba7465a7bbdb09baed903", "score": "0.5776438", "text": "func (h *Host) parseHostname(addr string) (string, error) {\n\ta, err := mail.ParseAddress(addr)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn strings.Split(a.Address, \"@\")[1], nil\n}", "title": "" }, { "docid": "4e96389cf0cb9151a4f3f96af287b4ee", "score": "0.57705235", "text": "func inputHostname() (hostname string) {\n\tfor {\n\t\tfmt.Print(\"\\nhostname: \")\n\t\tfmt.Scanln(&hostname)\n\t\tif len(hostname) == 0 {\n\t\t\tfmt.Println(\"Error. Hostname cannot be empty.\")\n\t\t} else {\n\t\t\treturn\n\t\t}\n\t}\n}", "title": "" }, { "docid": "95f6d34f6fe04353c73b10ad3a845bf6", "score": "0.57643574", "text": "func (s *UpdateInstanceInput) SetHostname(v string) *UpdateInstanceInput {\n\ts.Hostname = &v\n\treturn s\n}", "title": "" }, { "docid": "7865c899a0971abf07907088a5ca0b41", "score": "0.5740865", "text": "func (v *validator) ValidateHost() error {\n\treturn v.validateHostErr\n}", "title": "" }, { "docid": "a8d17621800989c0f34de375082c3a1d", "score": "0.5729294", "text": "func (s *Instance) SetHostname(v string) *Instance {\n\ts.Hostname = &v\n\treturn s\n}", "title": "" }, { "docid": "49c5cfa0be10bdd0eb130bd3df7a109c", "score": "0.57276577", "text": "func (c *Container) ComputeHostname() (string, error) {\n\thostnameStyle := strings.ToLower(c.TitusInfo.GetPassthroughAttributes()[hostnameStyleParam])\n\tswitch hostnameStyle {\n\tcase \"\":\n\t\treturn strings.ToLower(c.TaskID), nil\n\tcase \"ec2\":\n\t\thostname := fmt.Sprintf(\"ip-%s\", strings.Replace(c.Allocation.IPV4Address.Address.Address, \".\", \"-\", 3))\n\t\treturn hostname, nil\n\tdefault:\n\t\treturn \"\", &InvalidConfigurationError{Reason: fmt.Errorf(\"Unknown hostname style: %s\", hostnameStyle)}\n\t}\n}", "title": "" }, { "docid": "4fb616b512009376ba673d7a72a810f3", "score": "0.5698362", "text": "func (tcps *TCPServer) setHostname(host string) bool {\n\t_, port, _ := net.SplitHostPort(string(tcps.myAddr))\n\tnewAddr := Address(net.JoinHostPort(host, port))\n\t// try to ping ourselves\n\tif !Ping(newAddr) {\n\t\treturn false\n\t}\n\ttcps.myAddr = newAddr\n\treturn true\n}", "title": "" }, { "docid": "5f1d6e89c924e2e0db5fca35775ea19c", "score": "0.56927603", "text": "func IsValidDomainName(s string) bool {\n\t// See RFC 1035, RFC 3696.\n\tif len(s) == 0 {\n\t\treturn false\n\t}\n\tif len(s) > 255 {\n\t\treturn false\n\t}\n\n\tlast := byte('.')\n\tok := false // Ok once we've seen a letter.\n\tpartlen := 0\n\tfor i := 0; i < len(s); i++ {\n\t\tc := s[i]\n\t\tswitch {\n\t\tdefault:\n\t\t\treturn false\n\t\tcase 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_':\n\t\t\tok = true\n\t\t\tpartlen++\n\t\tcase '0' <= c && c <= '9':\n\t\t\t// fine\n\t\t\tpartlen++\n\t\tcase c == '-':\n\t\t\t// Byte before dash cannot be dot.\n\t\t\tif last == '.' {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tpartlen++\n\t\tcase c == '.':\n\t\t\t// Byte before dot cannot be dot, dash.\n\t\t\tif last == '.' || last == '-' {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tif partlen > 63 || partlen == 0 {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tpartlen = 0\n\t\t}\n\t\tlast = c\n\t}\n\tif last == '-' || partlen > 63 {\n\t\treturn false\n\t}\n\n\treturn ok\n}", "title": "" }, { "docid": "edbbf3da692061a550efe78530d7b3d6", "score": "0.55900824", "text": "func (s *GetHostnameSuggestionOutput) SetHostname(v string) *GetHostnameSuggestionOutput {\n\ts.Hostname = &v\n\treturn s\n}", "title": "" }, { "docid": "88805519a4c6b8e7df0644781c007b68", "score": "0.5589609", "text": "func (hostnames *Hostnames) NewHostname() *Hostname {\n\thostname := NewHostname(hostnames)\n\thostnames.Hostnames.Items = append(hostnames.Hostnames.Items, hostname)\n\treturn hostname\n}", "title": "" }, { "docid": "18e0ab81247ad9028b4c6c0161e12a28", "score": "0.5589408", "text": "func validateName(name string) error {\n\trobotNameReg := `^[a-z0-9]+(?:[._-][a-z0-9]+)*$`\n\tlegal := regexp.MustCompile(robotNameReg).MatchString(name)\n\tif !legal {\n\t\treturn errors.BadRequestError(nil).WithMessage(\"robot name is not in lower case or contains illegal characters\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "ad13b0439204cae64984b1482f9f5c58", "score": "0.5587468", "text": "func domainNameValid(name string) error {\n\tif containErr := containsInvalidParts(name); containErr != nil {\n\t\treturn containErr\n\t}\n\tif utf8.RuneCountInString(name) > 50 {\n\t\treturn errors.New(\"Domain name must be at most 50.\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "7ceb3f0c6dd100cf586aea5d95a73dae", "score": "0.5585596", "text": "func NewHostname(path string, PubKey *rsa.PublicKey) shared.EncAsym {\n\thostname := genHostname(PubKey)\n\n\tencodeToFile(hostname, path)\n\n\treturn hostname\n}", "title": "" }, { "docid": "0d9201b8504e03a11c68e2a47163b8a5", "score": "0.5582035", "text": "func (nsHost) Hostname(value string) fld.Field {\n\treturn ecsString(\"host.hostname\", value)\n}", "title": "" }, { "docid": "a9acc6792f8456bc477f6c4e3b71431d", "score": "0.55804706", "text": "func (n *common) ValidateName(name string) error {\n\terr := validate.IsURLSegmentSafe(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif strings.Contains(name, \":\") {\n\t\treturn fmt.Errorf(\"Cannot contain %q\", \":\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "11e9da58d816b773ba1256487feda4df", "score": "0.55684733", "text": "func (s *PluginSummary) SetHostname(v string) *PluginSummary {\n\ts.Hostname = &v\n\treturn s\n}", "title": "" }, { "docid": "a43806fd8f96dabe28e4e6fc625486f9", "score": "0.555346", "text": "func validateCluster(clusterName string, ecsClient ecsclient.ECSClient) error {\n\tisClusterActive, err := ecsClient.IsActiveCluster(clusterName)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !isClusterActive {\n\t\treturn fmt.Errorf(\"Cluster '%s' is not active. Ensure that it exists\", clusterName)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c7322ddc873cd16477fdbd538c8e0619", "score": "0.55294377", "text": "func validateSidecarOrGatewayHostnamePart(hostname string, isGateway bool) (errs error) {\n\t// short name hosts are not allowed\n\tif hostname != \"*\" && !strings.Contains(hostname, \".\") {\n\t\terrs = appendErrors(errs, fmt.Errorf(\"short names (non FQDN) are not allowed\"))\n\t}\n\n\tif err := ValidateWildcardDomain(hostname); err != nil {\n\t\tif !isGateway {\n\t\t\terrs = appendErrors(errs, err)\n\t\t}\n\n\t\t// Gateway allows IP as the host string, as well\n\t\tipAddr := net.ParseIP(hostname)\n\t\tif ipAddr == nil {\n\t\t\terrs = appendErrors(errs, err)\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "4ed5e6863b46dc91ebeb772171637e23", "score": "0.5525292", "text": "func ParseAndValidateServerName(serverName ServerName) (host string, port int, valid bool) {\n\t// Don't go any further if the server name is an empty string.\n\tif len(serverName) == 0 {\n\t\treturn\n\t}\n\n\thost, port = splitServerName(serverName)\n\n\t// the host part must be one of:\n\t// - a valid (ascii) dns name\n\t// - an IPv4 address\n\t// - an IPv6 address\n\n\tif host[0] == '[' {\n\t\t// must be a valid IPv6 address\n\t\tif host[len(host)-1] != ']' {\n\t\t\treturn\n\t\t}\n\t\tip := host[1 : len(host)-1]\n\t\tif net.ParseIP(ip) == nil {\n\t\t\treturn\n\t\t}\n\t\tvalid = true\n\t\treturn\n\t}\n\n\t// try parsing as an IPv4 address\n\tip := net.ParseIP(host)\n\tif ip != nil && ip.To4() != nil {\n\t\tvalid = true\n\t\treturn\n\t}\n\n\t// must be a valid DNS Name\n\tfor _, r := range host {\n\t\tif !isDNSNameChar(r) {\n\t\t\treturn\n\t\t}\n\t}\n\n\tvalid = true\n\treturn\n}", "title": "" }, { "docid": "6f0598ca7bc3cf91fe273f9843bfa78b", "score": "0.55109555", "text": "func (p *Parser) parseHostname() (string, error){\n\tvar hostname string\n\tlog := string(p.buff)\n\tre := regexp.MustCompile(`([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}_\\w+)`)\n\thostname = re.FindString(log)\n\tif hostname == \"\" {\n\t\treturn hostname, ErrHostnameNotFound\n\t}\n\treturn hostname,nil\n}", "title": "" }, { "docid": "22c5295c65d3b187d966b0b716da91cb", "score": "0.5499831", "text": "func ValidateDomain(name string) error {\n\t// The federation domain name need not strictly be domain names, we\n\t// accept valid dns names with subdomain components.\n\tif errs := validation.IsDNS1123Subdomain(name); len(errs) != 0 {\n\t\treturn fmt.Errorf(\"%q not a valid domain name: %q\", name, errs)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e5ac21c3ed132c71a26c934cbb925aa9", "score": "0.54939777", "text": "func (w *Writer) SetHostname(hostname string) {\n\tw.hostname = hostname\n}", "title": "" }, { "docid": "647c266c1bcea0925fa914405969829a", "score": "0.5459992", "text": "func hostCompatibleBucketName(u *url.URL, bucket string) bool {\n\t// Bucket might be DNS compatible but dots in the hostname will fail\n\t// certificate validation, so do not use host-style.\n\tif u.Scheme == \"https\" && strings.Contains(bucket, \".\") {\n\t\treturn false\n\t}\n\n\t// if the bucket is DNS compatible\n\treturn dnsCompatibleBucketName(bucket)\n}", "title": "" }, { "docid": "a3266a9a5f44e14d9bbf77b26168098a", "score": "0.54407525", "text": "func (h *Host) validate() error {\n\tif h == nil {\n\t\treturn fmt.Errorf(\"host field is nil\")\n\t}\n\n\tif h.Name == \"\" {\n\t\treturn fmt.Errorf(\"host field is set to empty string\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "616df819ca475bfc1c85f1ac420e7601", "score": "0.5427399", "text": "func (o *HypercacheInstanceReadSummary) GetHostnameOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Hostname, true\n}", "title": "" }, { "docid": "a23e9ce1b52d633b3889f4a8a2d567d2", "score": "0.5422651", "text": "func justHostname(s string) string {\n\tif strings.Contains(s, \":\") {\n\t\thostname, _, _ := net.SplitHostPort(s)\n\t\treturn hostname\n\t} else {\n\t\treturn s\n\t}\n}", "title": "" }, { "docid": "af4625ef073e8e028396aa80c8cac4ef", "score": "0.5421467", "text": "func (in *ActionVpsSwapWithInput) SetHostname(value bool) *ActionVpsSwapWithInput {\n\tin.Hostname = value\n\n\tif in._selectedParameters == nil {\n\t\tin._selectedParameters = make(map[string]interface{})\n\t}\n\n\tin._selectedParameters[\"Hostname\"] = nil\n\treturn in\n}", "title": "" }, { "docid": "0f48c0a01f7c9f6cfdb93e5e7326f9b9", "score": "0.5410768", "text": "func Hostname() (string, error) {\n\treturn getTrimmed(\"instance/hostname\")\n}", "title": "" }, { "docid": "74f99befcf236415bb20092ce31dd58a", "score": "0.5394337", "text": "func IsValidName(name string) bool {\n\tre := regexp.MustCompile(`^[A-Za-z\\s\\-]*$`)\n\treturn re.MatchString(name)\n}", "title": "" }, { "docid": "d888a6add0c3131109a4dd6abaca65f2", "score": "0.5388694", "text": "func (o *Node) GetHostnameOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Hostname, true\n}", "title": "" }, { "docid": "a515fea4f42b27fe9f031c9c048449d4", "score": "0.53850096", "text": "func Hostname() string { return hostname }", "title": "" }, { "docid": "703da95183499fda05d22b654edb1c3c", "score": "0.5369718", "text": "func (h *HostNameAndPortValidator) Validate(v interface{}) (munged interface{}, err error) {\n\tif val, ok := v.(string); ok {\n\t\thostname, port, err := net.SplitHostPort(val)\n\t\tif err != nil {\n\t\t\treturn munged, jsonmap.NewValidationError(\"invalid hostname/port: %s\", err.Error())\n\t\t}\n\n\t\t// Validate hostname\n\t\thv := &HostNameValidator{}\n\t\tmunged, err := hv.Validate(hostname)\n\t\tif err != nil {\n\t\t\treturn munged, err\n\t\t}\n\t\tif hostname, ok = munged.(string); !ok {\n\t\t\treturn munged, jsonmap.NewValidationError(\"invalid hostname type\")\n\t\t}\n\n\t\t// Validate port\n\t\tpv := &PortValidator{}\n\t\tmunged, err = pv.Validate(port)\n\t\tif err != nil {\n\t\t\treturn munged, err\n\t\t}\n\t\tif port, ok = munged.(string); !ok {\n\t\t\treturn munged, jsonmap.NewValidationError(\"invalid port type\")\n\t\t}\n\n\t\t// Omit empty port\n\t\tif port != \"\" {\n\t\t\treturn net.JoinHostPort(hostname, port), nil\n\t\t}\n\t\treturn hostname, nil\n\t}\n\treturn munged, jsonmap.NewValidationError(\"invalid type\")\n}", "title": "" }, { "docid": "1a108771f3c8fb1a24d244b28b3b409b", "score": "0.5369658", "text": "func validateRedisHostClause(url *url.URL) (string, error) {\n\thost := strings.Split(url.Host, \":\")[0]\n\n\tif \"\" == host {\n\t\treturn \"\", ErrRedisHostIsEmpty\n\t}\n\n\treturn host, nil\n}", "title": "" }, { "docid": "bc35d5fc7733f9d439df76c8b4fa519d", "score": "0.53657675", "text": "func IsValidBucketName(bucket string) bool {\n\t// Special case when bucket is equal to one of the meta buckets.\n\tif isMinioMetaBucketName(bucket) {\n\t\treturn true\n\t}\n\tif len(bucket) < 3 || len(bucket) > 63 {\n\t\treturn false\n\t}\n\n\t// Split on dot and check each piece conforms to rules.\n\tallNumbers := true\n\tpieces := strings.Split(bucket, dnsDelimiter)\n\tfor _, piece := range pieces {\n\t\tif len(piece) == 0 || piece[0] == '-' ||\n\t\t\tpiece[len(piece)-1] == '-' {\n\t\t\t// Current piece has 0-length or starts or\n\t\t\t// ends with a hyphen.\n\t\t\treturn false\n\t\t}\n\t\t// Now only need to check if each piece is a valid\n\t\t// 'label' in AWS terminology and if the bucket looks\n\t\t// like an IP address.\n\t\tisNotNumber := false\n\t\tfor i := 0; i < len(piece); i++ {\n\t\t\tswitch {\n\t\t\tcase (piece[i] >= 'a' && piece[i] <= 'z' ||\n\t\t\t\tpiece[i] == '-'):\n\t\t\t\t// Found a non-digit character, so\n\t\t\t\t// this piece is not a number.\n\t\t\t\tisNotNumber = true\n\t\t\tcase piece[i] >= '0' && piece[i] <= '9':\n\t\t\t\t// Nothing to do.\n\t\t\tdefault:\n\t\t\t\t// Found invalid character.\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t\tallNumbers = allNumbers && !isNotNumber\n\t}\n\t// Does the bucket name look like an IP address?\n\treturn !(len(pieces) == 4 && allNumbers)\n}", "title": "" }, { "docid": "2a40f95f184444b6d6efdf151018d780", "score": "0.5363437", "text": "func isPlainHostName(host string) bool {\n\treturn strings.Index(host, \".\") == -1\n}", "title": "" }, { "docid": "920d851ebe933305967be601b06e5560", "score": "0.536019", "text": "func (lncn LogicalNCN) GetHostname() string {\n\tif lncn.Hostname == \"\" {\n\t\treturn lncn.Xname\n\t}\n\treturn lncn.Hostname\n}", "title": "" }, { "docid": "d49c3c78683fb510a5d7e13f624b589f", "score": "0.5342104", "text": "func (r *MySQLCluster) PodHostname(index int) string {\n\treturn fmt.Sprintf(\"%s.%s.%s.svc\", r.PodName(index), r.HeadlessServiceName(), r.Namespace)\n}", "title": "" }, { "docid": "a8b13cbf8b58916500242a87d92b20e0", "score": "0.5325468", "text": "func newHostnameApplier(kb config, tmpFile string) net.Applier {\n\tdata := HostnameData{Comment: versionHeader(kb.MachineUUID), Hostname: kb.Hostname}\n\tvalidator := HostnameValidator{tmpFile}\n\n\treturn net.NewNetworkApplier(data, validator, nil)\n}", "title": "" }, { "docid": "1be077755bbf74b58c2534c9ec6a52b1", "score": "0.5322852", "text": "func (o VirtualNodeSpecServiceDiscoveryDnsOutput) Hostname() pulumi.StringOutput {\n\treturn o.ApplyT(func(v VirtualNodeSpecServiceDiscoveryDns) string { return v.Hostname }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "d8a05244bdae5c365730de96ff80173c", "score": "0.531449", "text": "func (c *Client) Hostname(host string) (*ServerResponse, error) {\n\tapiPath := fmt.Sprintf(\"/v1/setHostname?newHostname=%s&\", host)\n\tresponse, err := c.doRequest(apiPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn response, nil\n}", "title": "" }, { "docid": "4c45ca4673468e359bf797eb0894fd06", "score": "0.53099954", "text": "func resolveHostname(hostname string) string {\n\tswitch hostname {\n\tcase registry.IndexHostname, registry.IndexName, registry.DefaultV2Registry.Host:\n\t\treturn registry.IndexServer\n\t}\n\treturn hostname\n}", "title": "" }, { "docid": "8d63e17cb19f72b138f886385a12a1ba", "score": "0.53095216", "text": "func convertToHostname(url string) string {\n\tstripped := url\n\tif strings.HasPrefix(url, \"http://\") {\n\t\tstripped = strings.TrimPrefix(url, \"http://\")\n\t} else if strings.HasPrefix(url, \"https://\") {\n\t\tstripped = strings.TrimPrefix(url, \"https://\")\n\t}\n\n\tnameParts := strings.SplitN(stripped, \"/\", 2)\n\n\treturn nameParts[0]\n}", "title": "" }, { "docid": "8a885bc4aef1cf5bddfb79ba52667ace", "score": "0.53075933", "text": "func NormalizeClusterName(name string) string {\n\tconst maximumNameLength = 23\n\n\tif len(name) > maximumNameLength {\n\t\treturn string(name[0:maximumNameLength])\n\t}\n\n\treturn name\n}", "title": "" }, { "docid": "361def5b524381124505312f0e8acbd1", "score": "0.5297478", "text": "func ValidateDomainByResolvingIt(domain string) error {\n\tif !IsValidDomain(domain) {\n\t\treturn ErrInvalidDomain\n\t}\n\taddr, err := net.LookupHost(domain)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif len(addr) == 0 {\n\t\treturn ErrInvalidDomain\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "33afdd096dcab15186da5fedac729556", "score": "0.52913356", "text": "func validateServiceName(serviceName string) bool {\n\tre := regexp.MustCompile(`^[-+_.:,a-zA-Z0-9]{1,128}$`)\n\treturn re.MatchString(serviceName)\n}", "title": "" }, { "docid": "adee526bded7a91139a2c9fd56960897", "score": "0.5279916", "text": "func (HostnameCheck) Name() string {\n\treturn \"Hostname\"\n}", "title": "" }, { "docid": "8537f9c3ba8d81957b0000e374e6be7a", "score": "0.52782017", "text": "func (err ErrUnwritableHostCredentials) Hostname() svchost.Hostname {\n\treturn svchost.Hostname(err)\n}", "title": "" }, { "docid": "de7884746afca4c8bff41d2f53505798", "score": "0.52679205", "text": "func IsValidBucketName(fl validator.FieldLevel) bool {\n\tbucket := fl.Field().String()\n\n\tif len(bucket) < 3 || len(bucket) > 63 {\n\t\treturn false\n\t}\n\n\t// Split on dot and check each piece conforms to rules.\n\tpieces := strings.Split(bucket, dnsDelimiter)\n\n\t// Does the bucket name look like an IP address?\n\treturn !(len(pieces) == 4 && isAllNumbers(pieces))\n}", "title": "" }, { "docid": "ec8e8fa863c8da53e8c3bb2a245813e5", "score": "0.5264874", "text": "func isDomainName(s string) bool {\n\t// The root domain name is valid. See golang.org/issue/45715.\n\tif s == \".\" {\n\t\treturn true\n\t}\n\n\t// See RFC 1035, RFC 3696.\n\t// Presentation format has dots before every label except the first, and the\n\t// terminal empty label is optional here because we assume fully-qualified\n\t// (absolute) input. We must therefore reserve space for the first and last\n\t// labels' length octets in wire format, where they are necessary and the\n\t// maximum total length is 255.\n\t// So our _effective_ maximum is 253, but 254 is not rejected if the last\n\t// character is a dot.\n\tl := len(s)\n\tif l == 0 || l > 254 || l == 254 && s[l-1] != '.' {\n\t\treturn false\n\t}\n\n\tlast := byte('.')\n\tnonNumeric := false // true once we've seen a letter or hyphen\n\tpartlen := 0\n\tfor i := 0; i < len(s); i++ {\n\t\tc := s[i]\n\t\tswitch {\n\t\tdefault:\n\t\t\treturn false\n\t\tcase 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_':\n\t\t\tnonNumeric = true\n\t\t\tpartlen++\n\t\tcase '0' <= c && c <= '9':\n\t\t\t// fine\n\t\t\tpartlen++\n\t\tcase c == '-':\n\t\t\t// Byte before dash cannot be dot.\n\t\t\tif last == '.' {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tpartlen++\n\t\t\tnonNumeric = true\n\t\tcase c == '.':\n\t\t\t// Byte before dot cannot be dot, dash.\n\t\t\tif last == '.' || last == '-' {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tif partlen > 63 || partlen == 0 {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tpartlen = 0\n\t\t}\n\t\tlast = c\n\t}\n\tif last == '-' || partlen > 63 {\n\t\treturn false\n\t}\n\n\treturn nonNumeric\n}", "title": "" }, { "docid": "4b3be6c6b8b10caf6076e422b427808d", "score": "0.52644783", "text": "func (o *Node) GetHostname() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Hostname\n}", "title": "" }, { "docid": "a813d4545a3601fcd8de749014fd967a", "score": "0.5262696", "text": "func ValidateDomainName(domain string) error {\n\t// Check whether the domain is greater than 3 and fewer than 63 characters in length\n\tif len(domain) < 3 || len(domain) > 63 {\n\t\terr := fmt.Errorf(\"domain name %v has fewer than 3 or greater than 63 characters\", domain)\n\t\treturn err\n\t}\n\t// Ensure each part of the domain name only contains lower/upper case characters, numbers and dashes\n\tif !allowedDomainRegex.MatchString(domain) {\n\t\terr := fmt.Errorf(\"domain name %v contains invalid characters\", domain)\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "816a371423e8e0c20511366ca339f875", "score": "0.524491", "text": "func (m *ClusterDomainServer) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateDomainCredentials(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateName(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "56c5da8931aff15ee41a4c9ed8d6e536", "score": "0.5236534", "text": "func TestValidateName(t *gotesting.T) {\n\tfor _, tc := range []struct {\n\t\tname string\n\t\tvalid bool\n\t}{\n\t\t{\"example.ChromeLogin\", true},\n\t\t{\"example.ChromeLogin2\", true},\n\t\t{\"example2.ChromeLogin\", true},\n\t\t{\"example.ChromeLogin.stress\", true},\n\t\t{\"example.ChromeLogin.more_stress\", true},\n\t\t{\"example.chromeLogin\", false},\n\t\t{\"example.7hromeLogin\", false},\n\t\t{\"example.Chrome_Login\", false},\n\t\t{\"example.Chrome@Login\", false},\n\t\t{\"Example.ChromeLogin\", false},\n\t\t{\"3xample.ChromeLogin\", false},\n\t\t{\"exam_ple.ChromeLogin\", false},\n\t\t{\"exam@ple.ChromeLogin\", false},\n\t\t{\"example.ChromeLogin.Stress\", false},\n\t\t{\"example.ChromeLogin.more-stress\", false},\n\t\t{\"example.ChromeLogin.more@stress\", false},\n\t} {\n\t\terr := validateName(tc.name)\n\t\tif err != nil && tc.valid {\n\t\t\tt.Errorf(\"validateName(%q) failed: %v\", tc.name, err)\n\t\t} else if err == nil && !tc.valid {\n\t\t\tt.Errorf(\"validateName(%q) didn't return expected error\", tc.name)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "ca90aadd7c329597b2340c7f17391ca0", "score": "0.5234611", "text": "func (payload *DeleteHostsPayload) Validate() (err error) {\n\tif payload.Hostname == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"Hostname\"))\n\t}\n\tif payload.Ipaddr == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"Ipaddr\"))\n\t}\n\treturn\n}", "title": "" }, { "docid": "92a5c30d099eb1303ffa5096e3e231e0", "score": "0.52341217", "text": "func (c *Conplicity) GetHostname() (err error) {\n\tif c.Config.HostnameFromRancher {\n\t\tresp, err := http.Get(\"http://rancher-metadata/latest/self/host/name\")\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer resp.Body.Close()\n\t\tbody, err := ioutil.ReadAll(resp.Body)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tc.Hostname = string(body)\n\t} else {\n\t\tc.Hostname, err = os.Hostname()\n\t}\n\treturn\n}", "title": "" }, { "docid": "eb2bd1388d51c9488cbebb186762b2ba", "score": "0.5231191", "text": "func (o *HypercacheInstanceReadSummary) GetHostname() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Hostname\n}", "title": "" }, { "docid": "54f6675f8aa4e28f502bdad0c6367dd8", "score": "0.5229028", "text": "func (node *LocalVNode) SetHostname(newHostname string) {\n\tlogger.Printf(\"Changing Hostname: %s -> %s\", node.Hostname(), newHostname)\n\n\tnode.VNode.Hostname = newHostname\n}", "title": "" }, { "docid": "e44768ec1d1a34c8fcc0c4469d97c5fb", "score": "0.5228623", "text": "func (o *operator) ValidateHost(ctx context.Context, cluster *common.Cluster, host *models.Host) (api.ValidationResult, error) {\n\tvar inventory models.Inventory\n\tif host.Inventory == \"\" {\n\t\to.log.Info(\"Empty Inventory of host with hostID \", host.ID)\n\t\treturn api.ValidationResult{Status: api.Pending, ValidationId: o.GetClusterValidationID(), Reasons: []string{\"Missing Inventory in some of the hosts\"}}, nil\n\t}\n\tif err := json.Unmarshal([]byte(host.Inventory), &inventory); err != nil {\n\t\to.log.Errorf(\"Failed to get inventory from host with id %s\", host.ID)\n\t\treturn api.ValidationResult{Status: api.Failure, ValidationId: o.GetClusterValidationID()}, err\n\t}\n\n\t// If the Role is set to Auto-assign for a host, it is not possible to determine whether the node will end up as a master or worker node.\n\tif host.Role == models.HostRoleAutoAssign {\n\t\tstatus := \"All host roles must be assigned to enable OCS.\"\n\t\to.log.Info(\"Validate Requirements status \", status)\n\t\treturn api.ValidationResult{Status: api.Failure, ValidationId: o.GetClusterValidationID(), Reasons: []string{status}}, nil\n\t} else if host.Role == models.HostRoleWorker {\n\t\tcpu, _ := o.GetCPURequirementForWorker(ctx, cluster)\n\t\tif inventory.CPU.Count < cpu {\n\t\t\treturn api.ValidationResult{Status: api.Failure, ValidationId: o.GetClusterValidationID(), Reasons: []string{fmt.Sprintf(\"Insufficient CPU to deploy CNV. Required CPU count is %d but found %d \", cpu, inventory.CPU.Count)}}, nil\n\t\t}\n\t\tmem, _ := o.GetMemoryRequirementForWorker(ctx, cluster)\n\t\tif inventory.Memory.UsableBytes < mem {\n\t\t\tusableMemory := conversions.BytesToMib(inventory.Memory.UsableBytes)\n\t\t\tmemBytes := conversions.BytesToMib(mem)\n\t\t\treturn api.ValidationResult{Status: api.Failure, ValidationId: o.GetClusterValidationID(), Reasons: []string{fmt.Sprintf(\"Insufficient memory to deploy CNV. Required memory is %d MiB but found %d MiB\", memBytes, usableMemory)}}, nil\n\t\t}\n\t} else if host.Role == models.HostRoleMaster {\n\t\t// TODO: validate available devices on worker node like gpu and sr-iov and check whether there is enough memory to support them\n\t\tcpu, _ := o.GetCPURequirementForMaster(ctx, cluster)\n\t\tif inventory.CPU.Count < cpu {\n\t\t\treturn api.ValidationResult{Status: api.Failure, ValidationId: o.GetClusterValidationID(), Reasons: []string{fmt.Sprintf(\"Insufficient CPU to deploy CNV. Required CPU count is %d but found %d \", cpu, inventory.CPU.Count)}}, nil\n\t\t}\n\t\tmem, _ := o.GetMemoryRequirementForMaster(ctx, cluster)\n\t\tif inventory.Memory.UsableBytes < mem {\n\t\t\tusableMemory := conversions.BytesToMib(inventory.Memory.UsableBytes)\n\t\t\tmemBytes := conversions.BytesToMib(mem)\n\t\t\treturn api.ValidationResult{Status: api.Failure, ValidationId: o.GetClusterValidationID(), Reasons: []string{fmt.Sprintf(\"Insufficient memory to deploy CNV. Required memory is %d MiB but found %d MiB\", memBytes, usableMemory)}}, nil\n\t\t}\n\t}\n\treturn api.ValidationResult{Status: api.Success, ValidationId: o.GetClusterValidationID()}, nil\n}", "title": "" }, { "docid": "d1b93327480312388a17a0fe7ad5acd1", "score": "0.5226717", "text": "func (m *ClusterStatsMongoHostInfo) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateExtra(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateOs(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateSystem(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e8a0abbadced82c4bcc1da48d256871f", "score": "0.522269", "text": "func genHostname(PubKey *rsa.PublicKey) shared.EncAsym {\n\thostname := GenRandString()\n\treturn encAsym([]byte(hostname), PubKey)\n}", "title": "" }, { "docid": "30d5d86363bf05afa0d63d67dbeb9e73", "score": "0.52213377", "text": "func (i *Installer) CheckHostnameFound(hostname string) error {\n\tconn, err := net.Dial(\"tcp\", fmt.Sprintf(\"check-%d.%s:443\", time.Now().Unix(), hostname))\n\tif err == nil {\n\t\tconn.Close()\n\t}\n\treturn err\n}", "title": "" }, { "docid": "26ab9b17abd4ff2566c94276b0f52622", "score": "0.5221139", "text": "func Hostname(hostname string) Option {\n\treturn func(tok *token) error {\n\t\ttok.hostname = hostname\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "5010320f385eb9da0f4f80a197ed6ca1", "score": "0.5217782", "text": "func validateHostline(hostline string) error {\n\tif strings.Count(hostline, \":\") != 3 {\n\t\treturn errors.New(invalid)\n\t}\n\tif strings.Count(hostline, \"@\") != 1 {\n\t\treturn errors.New(invalid)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "a6eee5b3934dd2c25d2c230d60bd9ec8", "score": "0.5214324", "text": "func (a *Addr) Hostname() string {\n\thn := a.URL.Hostname()\n\tswitch hn {\n\tcase \"0\":\n\t\treturn \"0.0.0.0\"\n\tdefault:\n\t\treturn hn\n\t}\n}", "title": "" }, { "docid": "122ebc11e4dcb90fee63984af9bf7c5d", "score": "0.52065915", "text": "func ValidateReplicaSetName(name string, prefix bool) (bool, string) {\n\treturn apivalidation.NameIsDNSSubdomain(name, prefix)\n}", "title": "" }, { "docid": "4a821b4edbf64d65bdcfa591e0d39605", "score": "0.52062005", "text": "func nameIsInvalid(name string) bool {\n\ts := strings.TrimSpace(name)\n\tif len(s) == 0 {\n\t\treturn true\n\t}\n\treturn strings.Contains(s, \":\") || strings.Contains(s, \"#\")\n}", "title": "" }, { "docid": "cc68d6173e15a9ac4f1afd8d1e594f84", "score": "0.5203105", "text": "func StandardHostnameFromHTTP(url string, headers map[string]string) (string, error) {\n\n\treq, err := http.NewRequest(\"GET\", url, nil)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to construct HTTP request: %v\", err)\n\t}\n\n\tfor k, v := range headers {\n\t\treq.Header.Add(k, v)\n\t}\n\n\tresp, err := http.DefaultClient.Do(req)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer resp.Body.Close() // nolint: errcheck\n\n\tif resp.StatusCode < 200 || resp.StatusCode > 299 {\n\t\treturn \"\", fmt.Errorf(\"non-2XX response: (%d) %s\", resp.StatusCode, resp.Status)\n\t}\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to read response: %v\", err)\n\t}\n\tif len(body) < 4 {\n\t\treturn \"\", fmt.Errorf(\"hostname implausibly short: %s\", string(body))\n\t}\n\n\treturn string(body), nil\n}", "title": "" }, { "docid": "8f4b15abb6eebf9133d1582b592588b9", "score": "0.5194069", "text": "func validateUnixOrLocalhost(addr string) bool {\n\tif strings.HasPrefix(addr, \"unix:\") {\n\t\treturn true\n\t}\n\tif strings.HasPrefix(addr, \"127.0.0.1:\") {\n\t\treturn true\n\t}\n\tif strings.HasPrefix(addr, \"[::1]:\") {\n\t\treturn true\n\t}\n\tif strings.HasPrefix(addr, \"localhost:\") {\n\t\treturn true\n\t}\n\treturn false\n}", "title": "" }, { "docid": "bd262bc411a406bc26c9aee3233191a8", "score": "0.51937914", "text": "func (c *Client) Hostname() (string, error) {\n\tvar hostname string\n\terr := c.doGet(\"hostname\", func(r io.Reader) error {\n\t\thostnameraw, err := ioutil.ReadAll(r)\n\t\thostname = string(hostnameraw)\n\t\treturn err\n\t})\n\treturn hostname, err\n}", "title": "" } ]
8d051a48aabe468ac95fefd8083efc0e
GetTrackDescriptor implements the TrackDescriptorClassification interface for type SelectVideoTrackByAttribute.
[ { "docid": "1f44cfc93bb718ef03444e914e8106b8", "score": "0.7089697", "text": "func (s *SelectVideoTrackByAttribute) GetTrackDescriptor() *TrackDescriptor {\n\treturn &TrackDescriptor{\n\t\tODataType: s.ODataType,\n\t}\n}", "title": "" } ]
[ { "docid": "64042d172cc93f6aa5bcb4f210c3b60c", "score": "0.6605681", "text": "func (v *VideoTrackDescriptor) GetVideoTrackDescriptor() *VideoTrackDescriptor { return v }", "title": "" }, { "docid": "8435dc00d3b049f29b184fd0585a1e40", "score": "0.65489215", "text": "func (s *SelectVideoTrackByAttribute) GetVideoTrackDescriptor() *VideoTrackDescriptor {\n\treturn &VideoTrackDescriptor{\n\t\tODataType: s.ODataType,\n\t}\n}", "title": "" }, { "docid": "01860d8ed7cf7453a3826b5992ea8d1c", "score": "0.6461414", "text": "func (s *SelectVideoTrackByID) GetTrackDescriptor() *TrackDescriptor {\n\treturn &TrackDescriptor{\n\t\tODataType: s.ODataType,\n\t}\n}", "title": "" }, { "docid": "9b30edd7449cfcd27e8b1d13cf701010", "score": "0.6433555", "text": "func (s *SelectAudioTrackByAttribute) GetTrackDescriptor() *TrackDescriptor {\n\treturn &TrackDescriptor{\n\t\tODataType: s.ODataType,\n\t}\n}", "title": "" }, { "docid": "ac3aef3fda42872a50c051b688c312c3", "score": "0.6182846", "text": "func (s *SelectVideoTrackByID) GetVideoTrackDescriptor() *VideoTrackDescriptor {\n\treturn &VideoTrackDescriptor{\n\t\tODataType: s.ODataType,\n\t}\n}", "title": "" }, { "docid": "cac33f14a1b869369053ac86357f130b", "score": "0.5891224", "text": "func (t *TrackDescriptor) GetTrackDescriptor() *TrackDescriptor { return t }", "title": "" }, { "docid": "e49ffdea39c368dc6bc2785ee8bf6aff", "score": "0.57834923", "text": "func (*DetectedAttribute) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{19}\n}", "title": "" }, { "docid": "bfc47c8c7169d17e765f63e4a8e87d05", "score": "0.57149935", "text": "func (s *SelectAudioTrackByID) GetTrackDescriptor() *TrackDescriptor {\n\treturn &TrackDescriptor{\n\t\tODataType: s.ODataType,\n\t}\n}", "title": "" }, { "docid": "65633c43350f3c844ed9114674c30f28", "score": "0.55767715", "text": "func (*Track) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{18}\n}", "title": "" }, { "docid": "8672768e0e1dd5c927a616ad169b3165", "score": "0.5473016", "text": "func (v *VideoTrackDescriptor) GetTrackDescriptor() *TrackDescriptor {\n\treturn &TrackDescriptor{\n\t\tODataType: v.ODataType,\n\t}\n}", "title": "" }, { "docid": "ba9c065bbc72a07f67d9f3a56f8145df", "score": "0.52027005", "text": "func (*CelebrityTrack) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{21}\n}", "title": "" }, { "docid": "6fd438d3988e99709f6a3b09606dc59b", "score": "0.50856954", "text": "func (s *SelectAudioTrackByAttribute) GetAudioTrackDescriptor() *AudioTrackDescriptor {\n\treturn &AudioTrackDescriptor{\n\t\tChannelMapping: s.ChannelMapping,\n\t\tODataType: s.ODataType,\n\t}\n}", "title": "" }, { "docid": "c556e062eeb251ef80d226ba4c8c7008", "score": "0.5031236", "text": "func (a *AudioTrackDescriptor) GetTrackDescriptor() *TrackDescriptor {\n\treturn &TrackDescriptor{\n\t\tODataType: a.ODataType,\n\t}\n}", "title": "" }, { "docid": "8df094bfc8ed133fd673f22d26077489", "score": "0.493211", "text": "func (*MediaAttribute_Video) Descriptor() ([]byte, []int) {\n\treturn file_contacts_media_proto_rawDescGZIP(), []int{0, 2}\n}", "title": "" }, { "docid": "ff682ebf5e08304da39896388894c3b2", "score": "0.48116413", "text": "func (*VideoSegment) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{9}\n}", "title": "" }, { "docid": "9433da17339cf84667e4b508a046d3c9", "score": "0.4722724", "text": "func (*AutoMlVideoClassification) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_schema_trainingjob_definition_automl_video_classification_proto_rawDescGZIP(), []int{0}\n}", "title": "" }, { "docid": "b00cddfcc114a6a7de076a388c8f9c53", "score": "0.466191", "text": "func (*TrackID) Descriptor() ([]byte, []int) {\n\treturn file_api_api_proto_rawDescGZIP(), []int{3}\n}", "title": "" }, { "docid": "23349b9acadc1c0b1c10343a8f3e9b32", "score": "0.46362725", "text": "func DecodeTrack(buf io.Reader) (track Track, err error) {\n\terr = binary.Read(buf, binary.LittleEndian, &track.id)\n\n\tif err != nil {\n\t\treturn track, err\n\t}\n\n\tvar nameLen uint8\n\terr = binary.Read(buf, binary.LittleEndian, &nameLen)\n\tif err != nil {\n\t\treturn track, err\n\t}\n\n\ttrack.name = make([]byte, nameLen)\n\terr = binary.Read(buf, binary.LittleEndian, &track.name)\n\tif err != nil {\n\t\treturn track, err\n\t}\n\n\terr = binary.Read(buf, binary.LittleEndian, &track.steps)\n\treturn track, err\n}", "title": "" }, { "docid": "42d4debcb4dcf89389d3509b89adbfdd", "score": "0.4629325", "text": "func (*VideoAnnotationProgress) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{28}\n}", "title": "" }, { "docid": "aa21196c7954eb9b781238a1367c85bc", "score": "0.46210793", "text": "func (*LabelDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{2}\n}", "title": "" }, { "docid": "cf3a74ab1e7cb5a3594890ae7ecfd619", "score": "0.46189463", "text": "func (*FaceDetectionAnnotation) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{24}\n}", "title": "" }, { "docid": "9e35db48734489278cbaea7ca13e8ea2", "score": "0.46151975", "text": "func (*Youtube_Video) Descriptor() ([]byte, []int) {\n\treturn file_chameleon_smelter_v1_crawl_item_youtube_proto_rawDescGZIP(), []int{0, 2}\n}", "title": "" }, { "docid": "f94e76830ecd823bb2d09ac92122d7af", "score": "0.4608154", "text": "func (s *SelectAudioTrackByID) GetAudioTrackDescriptor() *AudioTrackDescriptor {\n\treturn &AudioTrackDescriptor{\n\t\tChannelMapping: s.ChannelMapping,\n\t\tODataType: s.ODataType,\n\t}\n}", "title": "" }, { "docid": "ea440b4db4853d1c862ba386d686e5ee", "score": "0.46052355", "text": "func (*AnnotateVideoRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{0}\n}", "title": "" }, { "docid": "a414a135a2ae0932fc70880eec245dcd", "score": "0.46023306", "text": "func (*AnnotateVideoProgress) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{29}\n}", "title": "" }, { "docid": "1f56fee2896e626bfbb96b24336a033a", "score": "0.458999", "text": "func (*ObjectTrackingAnnotation) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{41}\n}", "title": "" }, { "docid": "e34b8a487b41422edc20545421919240", "score": "0.45827055", "text": "func (a *AudioTrackDescriptor) GetAudioTrackDescriptor() *AudioTrackDescriptor { return a }", "title": "" }, { "docid": "48bf1ea62f498db512a52c7e89bd5d30", "score": "0.45738867", "text": "func (*StreamingAnnotateVideoRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{43}\n}", "title": "" }, { "docid": "07f81deb7f29c6986d777fcc45f91d08", "score": "0.45631173", "text": "func (*StreamingAutomlObjectTrackingConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{53}\n}", "title": "" }, { "docid": "9614cb8a6f93024dbbbd46ed32d704ae", "score": "0.45616025", "text": "func (p *Player) VideoTrackDescriptors() ([]*MediaTrackDescriptor, error) {\n\tif err := p.assertInit(); err != nil {\n\t\treturn nil, err\n\t}\n\n\tcDescriptors := C.libvlc_video_get_track_description(p.player)\n\treturn parseMediaTrackDescriptorList(cDescriptors)\n}", "title": "" }, { "docid": "3a14fb095d60e1e007945fff1c64df4d", "score": "0.45511872", "text": "func (*AnnotateVideoResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{27}\n}", "title": "" }, { "docid": "a37bb3a3610993d6fef71e82e8826738", "score": "0.45336667", "text": "func (*VideoContext) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{1}\n}", "title": "" }, { "docid": "d0a466dfaf32deeeaaa7ab441b0a9ca9", "score": "0.45147917", "text": "func (*ObjectTrackingFrame) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{40}\n}", "title": "" }, { "docid": "ed36a4269ff35908c187d3a8b5265fd2", "score": "0.450598", "text": "func (*FaceDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{6}\n}", "title": "" }, { "docid": "0bddc08bcb86384b6dee85bf2e3c36a4", "score": "0.4504842", "text": "func (*StreamingAnnotateVideoResponse) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{45}\n}", "title": "" }, { "docid": "7448757d2b4f066926cd04f9116824c6", "score": "0.44869438", "text": "func (*StreamingLabelDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{48}\n}", "title": "" }, { "docid": "48a1650d833c65dd37cf82ca5ac6a4b7", "score": "0.44843173", "text": "func DetectVideo(ctx context.Context, filepath string) error {\n\tapi := fmt.Sprintf(\"api/detect_video?file_path=%s\", filepath)\n\tif _, err := HTTPGet(ctx, api); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "07eedf671ac8aaf6068a86cb28f323a6", "score": "0.44819623", "text": "func (*ObjectTrackingConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{4}\n}", "title": "" }, { "docid": "e5d34ed4e2f63e2628985c819841f243", "score": "0.44715682", "text": "func (*StreamingAutomlClassificationConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{52}\n}", "title": "" }, { "docid": "fca6ca6d317f8be6d53b3bba2c1672ff", "score": "0.4459752", "text": "func (*StreamingVideoAnnotationResults) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{46}\n}", "title": "" }, { "docid": "b92fcc8f963bbb4167ebea886eda2607", "score": "0.44477594", "text": "func (*ExplicitContentDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{5}\n}", "title": "" }, { "docid": "79ff7f787eb5e855d3fe5e6bb62d282e", "score": "0.44336566", "text": "func (s *GridViewConfiguration) SetVideoAttribute(v *VideoAttribute) *GridViewConfiguration {\n\ts.VideoAttribute = v\n\treturn s\n}", "title": "" }, { "docid": "0b4d61ac8725022ccca29791b7c60dd1", "score": "0.44289407", "text": "func (*VideoAnnotationResults) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{26}\n}", "title": "" }, { "docid": "74f6ce5d6f8f641fd5675b2c3524a94d", "score": "0.44254678", "text": "func (*StreamingExplicitContentDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{49}\n}", "title": "" }, { "docid": "4948ef899f85d5da9b7c5a7beb4c32ad", "score": "0.44253975", "text": "func (*LabelFrame) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{11}\n}", "title": "" }, { "docid": "3998bf63e7a5d221d53c2b0fd0aea13c", "score": "0.44238797", "text": "func (*ClientCallToggleVideo) Descriptor() ([]byte, []int) {\n\treturn file_client_call_proto_rawDescGZIP(), []int{0}\n}", "title": "" }, { "docid": "2c58f3e95c2c8253f8ea67432df3459d", "score": "0.4419612", "text": "func (s VideoAttribute) GoString() string {\n\treturn s.String()\n}", "title": "" }, { "docid": "8fdabc9414e4b7ebdaa572b017d7cc0f", "score": "0.4399625", "text": "func (*TrackDescription) Descriptor() ([]byte, []int) {\n\treturn file_api_api_proto_rawDescGZIP(), []int{1}\n}", "title": "" }, { "docid": "5f0a9b325c5d5cd0a2fa1af36de8690f", "score": "0.43926078", "text": "func (*StreamingVideoConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{44}\n}", "title": "" }, { "docid": "6e4ba63e269b033e2521824853f742dc", "score": "0.43906394", "text": "func (*StreamingObjectTrackingConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{50}\n}", "title": "" }, { "docid": "0f154f8e4f6893566c8a06d388c7f9c5", "score": "0.43716475", "text": "func (p *Player) VideoTrackID() (int, error) {\n\tif err := p.assertInit(); err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn int(C.libvlc_video_get_track(p.player)), nil\n}", "title": "" }, { "docid": "6158f2825e40946d0ce5bdf64ffce6d2", "score": "0.43689573", "text": "func (*LabelSegment) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{10}\n}", "title": "" }, { "docid": "72633c9afb755a521b5c6193c4618bad", "score": "0.43495184", "text": "func PossibleTrackAttributeValues() []TrackAttribute {\n\treturn []TrackAttribute{\n\t\tTrackAttributeBitrate,\n\t\tTrackAttributeLanguage,\n\t}\n}", "title": "" }, { "docid": "b88e9d207f21f217d285c383c89e5d45", "score": "0.43443868", "text": "func (LabelDetectionMode) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{0}\n}", "title": "" }, { "docid": "e2c80ca6ec97b575a279dc67dc4863d3", "score": "0.43134952", "text": "func (*PersonDetectionAnnotation) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{25}\n}", "title": "" }, { "docid": "32dd09d6c8eaa6ddb813359ef7a7b830", "score": "0.4307334", "text": "func (*CelebrityTrack_RecognizedCelebrity) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{21, 0}\n}", "title": "" }, { "docid": "428216514d4870422b0d3c10ea3887d5", "score": "0.42950714", "text": "func (*LabelAnnotation) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{13}\n}", "title": "" }, { "docid": "4b892a19cd52d4315cce17b22cc2611b", "score": "0.4283145", "text": "func (*Youtube_Video_Player) Descriptor() ([]byte, []int) {\n\treturn file_chameleon_smelter_v1_crawl_item_youtube_proto_rawDescGZIP(), []int{0, 2, 1}\n}", "title": "" }, { "docid": "d2ed329a7e225e819afd58513f754896", "score": "0.428285", "text": "func (*TextDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{8}\n}", "title": "" }, { "docid": "a34414fa0e6d27f41ff6f42c06c4d27c", "score": "0.4265672", "text": "func (*MuteRoomTrackRequest) Descriptor() ([]byte, []int) {\n\treturn file_livekit_room_proto_rawDescGZIP(), []int{9}\n}", "title": "" }, { "docid": "6003689535326e7fe15899ef43f27f79", "score": "0.42537826", "text": "func (v *WebRTCVideoReceiver) Track() *webrtc.Track {\n\treturn v.track\n}", "title": "" }, { "docid": "6003689535326e7fe15899ef43f27f79", "score": "0.42537826", "text": "func (v *WebRTCVideoReceiver) Track() *webrtc.Track {\n\treturn v.track\n}", "title": "" }, { "docid": "e3df3f465e0281f2fe9344c56fcb656a", "score": "0.42279136", "text": "func DescriptorGet(id CodecId) *Descriptor {\n\treturn (*Descriptor)(C.avcodec_descriptor_get((C.enum_AVCodecID)(id)))\n}", "title": "" }, { "docid": "d53ff3903f696c8b6841c0d886ce9ecd", "score": "0.42204672", "text": "func (*ShotChangeDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{3}\n}", "title": "" }, { "docid": "d79d646907add0d97bde65fe839e4436", "score": "0.42114067", "text": "func (*TracksDescriptions) Descriptor() ([]byte, []int) {\n\treturn file_api_api_proto_rawDescGZIP(), []int{2}\n}", "title": "" }, { "docid": "66aa17b21e8df96078a69e8409eeaa53", "score": "0.4205901", "text": "func (*CelebrityRecognitionAnnotation) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{22}\n}", "title": "" }, { "docid": "f81c5a4a55f9643a62c8a0d821d339e9", "score": "0.41893244", "text": "func (Feature) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{3}\n}", "title": "" }, { "docid": "8e226b66b35b8cc73fd7e5e48d8bcd0d", "score": "0.41760236", "text": "func (*StreamingShotChangeDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{47}\n}", "title": "" }, { "docid": "78785379afe932742958dea6ad1790f8", "score": "0.41706648", "text": "func (v *Video) GetVideo() *Video { return v }", "title": "" }, { "docid": "64b712c773579445d807be290556c71c", "score": "0.4155312", "text": "func (v *VideoTrack) GetTrackBase() *TrackBase {\n\treturn &TrackBase{\n\t\tODataType: v.ODataType,\n\t}\n}", "title": "" }, { "docid": "9ca61fae07cf4f33c5a3f49c572170f0", "score": "0.41550186", "text": "func (*ExplicitContentFrame) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{14}\n}", "title": "" }, { "docid": "49dee4cdd024621df08ad7392d406832", "score": "0.4152793", "text": "func (*Celebrity) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{20}\n}", "title": "" }, { "docid": "530a9a9e623a6b5042b681d971ab29c1", "score": "0.41390255", "text": "func (*Qos_ClassifierTemplate_TrafficClassifiers_TrafficClassifier_LocalIds) Descriptor() ([]byte, []int) {\n\treturn file_huaweiV8R12_qos_proto_rawDescGZIP(), []int{0, 2, 1, 0, 5}\n}", "title": "" }, { "docid": "0b41f79129d838a0369aa25e5908e72d", "score": "0.41317633", "text": "func (*PersonDetectionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{7}\n}", "title": "" }, { "docid": "09fb2b471a8762a0100fbe13bd517064", "score": "0.41212535", "text": "func (*Qos_ClassifierTemplate_TrafficClassifiers_TrafficClassifier_LocalIds_LocalId) Descriptor() ([]byte, []int) {\n\treturn file_huaweiV8R12_qos_proto_rawDescGZIP(), []int{0, 2, 1, 0, 5, 0}\n}", "title": "" }, { "docid": "1aaa4c3f3b0f3535c74d23c19d06112f", "score": "0.4118277", "text": "func (*Qos_ClassifierTemplate_TrafficClassifiers_TrafficClassifier_RuleAcls) Descriptor() ([]byte, []int) {\n\treturn file_huaweiV8R12_qos_proto_rawDescGZIP(), []int{0, 2, 1, 0, 0}\n}", "title": "" }, { "docid": "1b74f602a5688569d1b59eb626fb2fc1", "score": "0.41050956", "text": "func (*MuteRoomTrackResponse) Descriptor() ([]byte, []int) {\n\treturn file_livekit_room_proto_rawDescGZIP(), []int{10}\n}", "title": "" }, { "docid": "4a3d2e552bf532bd79841793141e6956", "score": "0.40897936", "text": "func (*TranscodeVideoOverride) Descriptor() ([]byte, []int) {\n\treturn file_vod_business_vod_workflow_proto_rawDescGZIP(), []int{4}\n}", "title": "" }, { "docid": "93f3135018c8a0c9a68ed94c70e92c62", "score": "0.4057697", "text": "func (*StreamingAutomlActionRecognitionConfig) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{51}\n}", "title": "" }, { "docid": "b5f50f012e0329edae72e5eeea3833cb", "score": "0.4048215", "text": "func (*FileAccessReport) SpecificationForAttribute(name string) elemental.AttributeSpecification {\n\n\tif v, ok := FileAccessReportAttributesMap[name]; ok {\n\t\treturn v\n\t}\n\n\t// We could not find it, so let's check on the lower case indexed spec map\n\treturn FileAccessReportLowerCaseAttributesMap[name]\n}", "title": "" }, { "docid": "8d8974d685af9319ac8614a19bb462cb", "score": "0.40468353", "text": "func (*NormalizedVertex) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{35}\n}", "title": "" }, { "docid": "0f4d9b3fa39d658481b9cb447cf08ff8", "score": "0.40406886", "text": "func (*ExplicitContentAnnotation) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{15}\n}", "title": "" }, { "docid": "254fbd3f7bb9478abb92d2d40d8514cb", "score": "0.40346143", "text": "func (p *RTCPublisher) GetVideoTrack() *mediaserver.IncomingStreamTrack {\n\treturn p.videotrack\n}", "title": "" }, { "docid": "569cebb67b1a036dd868941fcb32fd0b", "score": "0.40304816", "text": "func ReadTrack(r io.Reader) (*Track, error) {\n\tt := Track{}\n\n\terr := binary.Read(r, binary.LittleEndian, &t.ID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnameLen := []byte{0}\n\t_, err = io.ReadFull(r, nameLen)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tt.Name = make([]byte, int(nameLen[0]))\n\t_, err = io.ReadFull(r, t.Name)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tmeasure := make([]byte, 16)\n\t_, err = io.ReadFull(r, measure)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor i := range t.Measure {\n\t\tt.Measure[i][0] = measure[4*i+0]\n\t\tt.Measure[i][1] = measure[4*i+1]\n\t\tt.Measure[i][2] = measure[4*i+2]\n\t\tt.Measure[i][3] = measure[4*i+3]\n\t}\n\n\treturn &t, nil\n}", "title": "" }, { "docid": "e21d5186f7c10e7dcbbdb7887f4ef8f0", "score": "0.40248686", "text": "func (*Qos_ClassifierTemplate_TrafficClassifiers_TrafficClassifier_RuleVlans_RuleVlan) Descriptor() ([]byte, []int) {\n\treturn file_huaweiV8R12_qos_proto_rawDescGZIP(), []int{0, 2, 1, 0, 15, 0}\n}", "title": "" }, { "docid": "2b3859c57ceadb1eccd4fa71caa39e81", "score": "0.4019619", "text": "func NewDescriptor(id ID, variant Variant) *Descriptor {\n\treturn &Descriptor{Key: Key{ID: id, Variant: variant}}\n}", "title": "" }, { "docid": "a9cf1a86f5f82aa54b2d37f30fe8c456", "score": "0.40006304", "text": "func (AdTracking) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_video_stitcher_v1_live_configs_proto_rawDescGZIP(), []int{0}\n}", "title": "" }, { "docid": "859457e9a87454596eb075c13e3c30ec", "score": "0.39944303", "text": "func (*Qos_ClassifierTemplate_TrafficClassifiers_TrafficClassifier_SrcDestLocalIds_SrcDestLocalId) Descriptor() ([]byte, []int) {\n\treturn file_huaweiV8R12_qos_proto_rawDescGZIP(), []int{0, 2, 1, 0, 16, 0}\n}", "title": "" }, { "docid": "5afd71cf0c2f1e938511ecc7371f489b", "score": "0.39751142", "text": "func (*DefaultEnforcerVersion) SpecificationForAttribute(name string) elemental.AttributeSpecification {\n\n\tif v, ok := DefaultEnforcerVersionAttributesMap[name]; ok {\n\t\treturn v\n\t}\n\n\t// We could not find it, so let's check on the lower case indexed spec map\n\treturn DefaultEnforcerVersionLowerCaseAttributesMap[name]\n}", "title": "" }, { "docid": "124f429b68a6d9006e4ece702a323902", "score": "0.3972807", "text": "func (*TapByResourceRequest_Match_Http) Descriptor() ([]byte, []int) {\n\treturn file_viz_tap_proto_rawDescGZIP(), []int{1, 0, 1}\n}", "title": "" }, { "docid": "357da836c6e91822329b1b8904a81f6b", "score": "0.3956261", "text": "func (*ProcessingUnit) SpecificationForAttribute(name string) elemental.AttributeSpecification {\n\n\tif v, ok := ProcessingUnitAttributesMap[name]; ok {\n\t\treturn v\n\t}\n\n\t// We could not find it, so let's check on the lower case indexed spec map\n\treturn ProcessingUnitLowerCaseAttributesMap[name]\n}", "title": "" }, { "docid": "6f2ae84cc5520a39ba2ab8c22a26a067", "score": "0.39465326", "text": "func (*AutoMlVideoClassificationInputs) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_aiplatform_v1_schema_trainingjob_definition_automl_video_classification_proto_rawDescGZIP(), []int{1}\n}", "title": "" }, { "docid": "4b8a5c7bc690037eceadb707b1bde64b", "score": "0.39366978", "text": "func (StreamingFeature) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{2}\n}", "title": "" }, { "docid": "7e0258c38af30b3ecc2065e58e7b3cc9", "score": "0.39301774", "text": "func (*PCSearchResult) SpecificationForAttribute(name string) elemental.AttributeSpecification {\n\n\tif v, ok := PCSearchResultAttributesMap[name]; ok {\n\t\treturn v\n\t}\n\n\t// We could not find it, so let's check on the lower case indexed spec map\n\treturn PCSearchResultLowerCaseAttributesMap[name]\n}", "title": "" }, { "docid": "ad2b8a3d7fe9ff8fa529f5542c16f41c", "score": "0.39271697", "text": "func (*DetectedLandmark) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{23}\n}", "title": "" }, { "docid": "e0eab7f7a5c3ac1a763fc1929bca7a9a", "score": "0.39261618", "text": "func (*Qos_ClassifierTemplate_TrafficClassifiers_TrafficClassifier_SrcDestLocalIds) Descriptor() ([]byte, []int) {\n\treturn file_huaweiV8R12_qos_proto_rawDescGZIP(), []int{0, 2, 1, 0, 16}\n}", "title": "" }, { "docid": "bc023755b34adadda7cb2b0deda05b37", "score": "0.3925586", "text": "func (*BuildEventId_ConfiguredLabelId) Descriptor() ([]byte, []int) {\n\treturn file_third_party_bazel_src_main_java_com_google_devtools_build_lib_buildeventstream_proto_build_event_stream_proto_rawDescGZIP(), []int{0, 17}\n}", "title": "" }, { "docid": "12b4dbe429fde4490cdf59795b49cf38", "score": "0.39231715", "text": "func (*TextSegment) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_videointelligence_v1p3beta1_video_intelligence_proto_rawDescGZIP(), []int{37}\n}", "title": "" }, { "docid": "09c0dcd509924227f575c5c9fd475848", "score": "0.3921267", "text": "func (DeviceInfo_VideoDecoder) EnumDescriptor() ([]byte, []int) {\n\treturn file_decision_proto_rawDescGZIP(), []int{2, 0}\n}", "title": "" }, { "docid": "5e9aa52af3e770c760a3e1e41d24f7d0", "score": "0.39160955", "text": "func (*GetUserAttributeByKeyResponse) Descriptor() ([]byte, []int) {\n\treturn file_pikachu_v1_pikachu_proto_rawDescGZIP(), []int{47}\n}", "title": "" } ]
7f816147bf387d4d262e1d621ad87de3
ClearLeft will return a new byte slice will the latest (size) bits as zeros.
[ { "docid": "b580e15f77b233478ef193fa5be02dc9", "score": "0.8133588", "text": "func ClearLeft(digest []byte, size uint64) []byte {\n\treturn filter(digest, size, true)\n}", "title": "" } ]
[ { "docid": "3b9da337b9b08c49fae5535c1ef51da7", "score": "0.7239919", "text": "func SetLeft(digest []byte, size uint64) []byte {\n\treturn filter(digest, size, false)\n}", "title": "" }, { "docid": "1af2fa6162c350a628543d1c0d12b130", "score": "0.693831", "text": "func leftPad(input []byte, size int) (out []byte) {\n\tn := len(input)\n\tif n > size {\n\t\tn = size\n\t}\n\tout = make([]byte, size)\n\tcopy(out[len(out)-n:], input)\n\treturn\n}", "title": "" }, { "docid": "53881b622c53e09c59269d5e79becdf4", "score": "0.6427634", "text": "func LeftPadBytes(slice []byte, l int) []byte {\n\tif l <= len(slice) {\n\t\treturn slice\n\t}\n\n\tpadded := make([]byte, l)\n\tcopy(padded[l-len(slice):], slice)\n\n\treturn padded\n}", "title": "" }, { "docid": "53881b622c53e09c59269d5e79becdf4", "score": "0.6427634", "text": "func LeftPadBytes(slice []byte, l int) []byte {\n\tif l <= len(slice) {\n\t\treturn slice\n\t}\n\n\tpadded := make([]byte, l)\n\tcopy(padded[l-len(slice):], slice)\n\n\treturn padded\n}", "title": "" }, { "docid": "53881b622c53e09c59269d5e79becdf4", "score": "0.6427634", "text": "func LeftPadBytes(slice []byte, l int) []byte {\n\tif l <= len(slice) {\n\t\treturn slice\n\t}\n\n\tpadded := make([]byte, l)\n\tcopy(padded[l-len(slice):], slice)\n\n\treturn padded\n}", "title": "" }, { "docid": "53881b622c53e09c59269d5e79becdf4", "score": "0.6427634", "text": "func LeftPadBytes(slice []byte, l int) []byte {\n\tif l <= len(slice) {\n\t\treturn slice\n\t}\n\n\tpadded := make([]byte, l)\n\tcopy(padded[l-len(slice):], slice)\n\n\treturn padded\n}", "title": "" }, { "docid": "a9d22941552131f6646f4db8d4aa86c8", "score": "0.64001435", "text": "func PadLeft(data []byte, symbol, targetSize byte) []byte {\n\tpadding := int(targetSize) - len(data)\n\tif padding <= 0 {\n\t\treturn data\n\t}\n\tresult := make([]byte, targetSize)\n\tcopy(result[0:padding], bytes.Repeat([]byte{symbol}, padding))\n\tcopy(result[padding:], data)\n\treturn result\n}", "title": "" }, { "docid": "bb6080cccf660aa465d7ca492bce5644", "score": "0.6338266", "text": "func LeftShift(data []byte, shift uint64) []byte {\n\tif shift == 0 {\n\t\treturn data\n\t}\n\tvar dataLength = len(data)\n\tresult := make([]byte, dataLength)\n\tif shift > byteLength {\n\t\tvar shiftedData = append(make([]byte, 1), data[:dataLength-1]...)\n\t\tresult = LeftShift(shiftedData, shift-byteLength)\n\t} else {\n\t\tfor i := 0; i < dataLength; i++ {\n\t\t\tif i < dataLength-1 {\n\t\t\t\tresult[i+1] = data[i] << (byteLength - shift)\n\t\t\t}\n\t\t\tresult[i] = result[i] | (data[i] >> shift)\n\t\t}\n\t}\n\treturn result\n}", "title": "" }, { "docid": "227747a8e45849c7320c34492ee16b14", "score": "0.6242646", "text": "func padBytesLeft(b []byte) []byte {\n\tl := len(b)\n\tif l == 32 {\n\t\treturn b\n\t}\n\tbb := make([]byte, 32)\n\tcopy(bb[32-l:], b)\n\treturn bb\n}", "title": "" }, { "docid": "84b54680b2923eba0cff065ef78a8900", "score": "0.6037635", "text": "func (i Uint128) LeftShift() Uint128 {\n\tj := Uint128{low: i.low << 1}\n\n\tj.high = i.high << 1\n\tif i.low&0x8000000000000000 > 0 {\n\t\tj.high |= 1\n\t}\n\n\treturn j\n}", "title": "" }, { "docid": "35c26e70eb5e97a9e6ed491c4747d859", "score": "0.6011431", "text": "func (v *Vector) ShiftLeft(w *Vector, r uint) *Vector {\n\t//body, lenLast := makeVector(w.Len())\n\tstart := int(r / WordSize)\n\tl := r % WordSize\n\tmask := MaxInteger << (WordSize - l)\n\ttmpv := New().allocate(w.Len())\n\tfor i := start; i < len(w.body); i++ {\n\t\ttmpv.body[i-start] = w.body[i] << l\n\t\tif i < len(w.body)-1 {\n\t\t\ttmpv.body[i-start] ^= (w.body[i+1] & mask) >> (WordSize - l)\n\t\t}\n\t}\n\tv.body, v.lenLast = tmpv.body, tmpv.lenLast\n\treturn v\n}", "title": "" }, { "docid": "afcc7ce0d4f5b047d6a2df7c78fb6faf", "score": "0.5977328", "text": "func (v *Vector) RotateLeft(w *Vector, r uint) *Vector {\n\tif w.Len() == 0 {\n\t\tv.allocate(0)\n\t\treturn v\n\t}\n\tr = r % w.Len()\n\tu := new(Vector).ShiftLeft(w, r) // u == w << r\n\tv.ShiftRight(w, w.Len()-r) // v == w >> (len - r)\n\treturn v.Xor(v, u)\n}", "title": "" }, { "docid": "aa4e70b1fcb1731d99b93d1c8300411e", "score": "0.59350806", "text": "func copyWithLeftPad(dest, src []byte) {\n\tnumPaddingBytes := len(dest) - len(src)\n\tfor i := 0; i < numPaddingBytes; i++ {\n\t\tdest[i] = 0\n\t}\n\tcopy(dest[numPaddingBytes:], src)\n}", "title": "" }, { "docid": "2e584668c69db4e5ab7bed8fc1eb5255", "score": "0.593501", "text": "func padLeft(binary string, final int) string {\n\tlength := len(binary)\n\tif length >= final {\n\t\treturn binary\n\t}\n\tpadded := binary\n\tfor i := 0; i < final-length; i++ {\n\t\tpadded = \"0\" + padded\n\t}\n\treturn padded\n}", "title": "" }, { "docid": "157ab959660350ab7d884893c64adab3", "score": "0.5888096", "text": "func left_encode(value uint64) []byte {\n\tvar input [9]byte\n\tvar offset uint\n\tif value == 0 {\n\t\toffset = 8\n\t} else {\n\t\tbinary.BigEndian.PutUint64(input[1:], value)\n\t\tfor offset = 0; offset < 9; offset++ {\n\t\t\tif input[offset] != 0 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\tinput[offset-1] = byte(9 - offset)\n\treturn input[offset-1:]\n}", "title": "" }, { "docid": "911dff78863a7bd2cd7a4e8c9b005c77", "score": "0.58538324", "text": "func array_shift_bit_left(in []byte) []byte {\n\tout := make([]byte, len(in))\n\tvar b byte = 0x0\n\tfor i := len(in) - 1; i >= 0; i-- {\n\t\tout[i] = (in[i] << 1) | b // Shift and add previous shifted bit at lsb\n\t\tb = in[i] >> 7 // Get msb to be shifted on the next byte group\n\t}\n\treturn out\n}", "title": "" }, { "docid": "187131b13131f339e6df8aef91ee735a", "score": "0.57673305", "text": "func LeftPad(toPad dnaSequence, padLength int) dnaSequence {\n\tpadding := make([]nucleotide, 0, padLength)\n\tfor i := 0; i < padLength; i++ {\n\t\tpadding = append(padding, nucleotide(PadNucleotide()))\n\t}\n\n\treturn append(padding, toPad...)\n}", "title": "" }, { "docid": "151d576bbc71d7a2df49a0cad579365f", "score": "0.5724399", "text": "func PadLeft(str string, length int, pad byte) string {\n\tif len(str) >= length {\n\t\treturn str\n\t}\n\tvar buf bytes.Buffer\n\tfor i := 0; i < length-len(str); i++ {\n\t\tbuf.WriteByte(pad)\n\t}\n\tbuf.WriteString(str)\n\treturn buf.String()\n}", "title": "" }, { "docid": "adc9ae04b0177b08eb7f54232c6f03cd", "score": "0.569565", "text": "func PadLeft(str string, pad string, length int) string {\n\tfor {\n\t\tstr = pad + str\n\t\tif len(str) >= length {\n\t\t\treturn str[0:length]\n\t\t}\n\t}\n}", "title": "" }, { "docid": "a95e00df342f97451f8b583e436eeb17", "score": "0.56680226", "text": "func (i *IPv4Address) ShiftLeft(shiftCount uint) {\n\ti.Address <<= shiftCount\n\ti.Length -= shiftCount\n}", "title": "" }, { "docid": "5ef42fd8a37edbb4b04c6b01bab0d12e", "score": "0.562395", "text": "func (u Uint128) LeftShift(n uint) Uint128 {\n\tswitch {\n\tcase n == 0:\n\tcase n > 64:\n\t\tu.hi = u.lo << (n - 64)\n\t\tu.lo = 0\n\tcase n < 64:\n\t\tu.hi = (u.hi << n) | (u.lo >> (64 - n))\n\t\tu.lo <<= n\n\tdefault:\n\t\tu.hi = u.lo\n\t\tu.lo = 0\n\t}\n\treturn u\n}", "title": "" }, { "docid": "aa2565c8b7cbf66c7a403ab9912e94a7", "score": "0.55367845", "text": "func RotateLeft(x uint, k int) uint", "title": "" }, { "docid": "ff2cfab271fc36bc3dbc2922ca7957c9", "score": "0.5511892", "text": "func (c *Condition) FillLeft(s string, w int) string {\n\twidth := c.StringWidth(s)\n\tcount := w - width\n\tif count > 0 {\n\t\tb := make([]byte, count)\n\t\tfor i := range b {\n\t\t\tb[i] = ' '\n\t\t}\n\t\treturn string(b) + s\n\t}\n\treturn s\n}", "title": "" }, { "docid": "559a6e15aee1b756318eda5199560044", "score": "0.5490167", "text": "func SliceClear(s []int) []int {\n\t// amd64:`.*memclrNoHeapPointers`\n\tfor i := range s {\n\t\ts[i] = 0\n\t}\n\treturn s\n}", "title": "" }, { "docid": "6cf5abc3e2eeb9a32ada7a9b9802658d", "score": "0.5487116", "text": "func TrimZeroRight(b []byte) []byte {\n\tfor i := len(b) - 1; i > 0; i-- {\n\t\tif b[i] > 0 {\n\t\t\treturn b[:i+1]\n\t\t}\n\t}\n\treturn b[:0]\n}", "title": "" }, { "docid": "748a09d76a437f60b35bb388806039cf", "score": "0.54866487", "text": "func ResetBits(value []byte, start uint8) []byte {\n\tif int(start) >= len(value)*8 {\n\t\treturn value\n\t}\n\n\tstartByte := start / 8\n\tstartBit := start % 8\n\n\tresult := make([]byte, len(value))\n\tcopy(result, value[:startByte])\n\n\t// Reset bits in starting byte.\n\tmask := byte(0xFF)\n\tmask <<= 8 - startBit\n\tresult[startByte] = value[startByte] & mask\n\n\treturn result\n}", "title": "" }, { "docid": "3ea7f8c6f4622cfb3230e160701dbc34", "score": "0.54685956", "text": "func zeroPad(b []byte, leigth int) []byte {\n\tfor i := 0; i < leigth-len(b); i++ {\n\t\tb = append([]byte{0x00}, b...)\n\t}\n\n\treturn b\n}", "title": "" }, { "docid": "c53787218523f478eea51f5f54f1c166", "score": "0.54676574", "text": "func FillLeft(s string, w int) string {\n\treturn DefaultCondition.FillLeft(s, w)\n}", "title": "" }, { "docid": "710ce3491c1eee565e45f5f48c928cce", "score": "0.545429", "text": "func clearSlice(buf []byte) {\n\tfor i := range buf {\n\t\tbuf[i] = '0'\n\t}\n}", "title": "" }, { "docid": "833b417f88df59ab96054635fa0521e6", "score": "0.54394794", "text": "func PadLeft(str string, width int, pad string) string {\n\treturn Tile(pad, width-Len(str)) + str\n}", "title": "" }, { "docid": "40c279eaa5fd3b625a6d6d32c0b4ccac", "score": "0.5401088", "text": "func CutFromLeft(newWidth, newHeight int) Modification {\n\treturn CutFn(\n\t\tfunc(w int) int { return w - newWidth },\n\t\tfunc(h int) int { return h - newHeight },\n\t\tfunc(int) int { return newWidth },\n\t\tfunc(int) int { return newHeight })\n}", "title": "" }, { "docid": "fb408c9207cb5a91f95c6c286cba0400", "score": "0.53849316", "text": "func (b *Box) PadLeft(pad int) *Box {\n\tb.Content.Pad.Left = pad\n\treturn b\n}", "title": "" }, { "docid": "c7d5c94110b16fe31ad9817c7987a368", "score": "0.53831583", "text": "func (q *BytesQueue) Reset() {\n\t// Just reset indexes\n\tq.tail = leftMarginIndex\n\tq.head = leftMarginIndex\n\tq.rightMargin = leftMarginIndex\n\tq.count = 0\n}", "title": "" }, { "docid": "2f9bedcdf6f30c01fedab343f47daf7f", "score": "0.5340644", "text": "func (bf *Bitfield) Zero() {\n\tbf.Data = make([]byte, (bf.Length/8)+1)\n}", "title": "" }, { "docid": "a70e1812edb761c852b78ec21bf61dc3", "score": "0.53316975", "text": "func (c *Cipher) ZeroUnPadding(src []byte) []byte {\n\tfor i := len(src) - 1; ; i-- {\n\t\tif src[i] != 0 {\n\t\t\treturn src[:i+1]\n\t\t}\n\t}\n\treturn emptyBytes\n}", "title": "" }, { "docid": "5db7462a6f9ad52c5ca6e1368ddbb90e", "score": "0.532607", "text": "func padLeft(str string, pad string, length int) string {\n\tfinalStr := \"\"\n\n\tlenPad := length - len(str)\n\n\tif lenPad > 0 {\n\t\tfinalStr = strings.Repeat(pad, lenPad)\n\t}\n\n\tfinalStr += str\n\n\treturn finalStr\n}", "title": "" }, { "docid": "57d0a97e5552d5acd2bf8f9303fab834", "score": "0.5281691", "text": "func (node *Node) PadLeft(d int, n int) {\n\thead := New(d)\n\ttail := head\n\n\tfor i := 1; i < n; i++ {\n\t\ttail.next = New(d)\n\t\ttail = tail.next\n\t}\n\n\t// hack to copy node content to the tail\n\ttemp := *node\n\ttail.next = &temp\n\n\t*node = *head\n}", "title": "" }, { "docid": "5eccce879ed76d4753ad7e16afb60aef", "score": "0.52813846", "text": "func TrimLeft(s string, cutSet ...string) string {\n\tif len(cutSet) > 0 {\n\t\treturn strings.TrimLeft(s, cutSet[0])\n\t}\n\n\treturn strings.TrimLeft(s, \" \")\n}", "title": "" }, { "docid": "a3a4f2e7806c7b5482eca68623d1da9f", "score": "0.5258767", "text": "func flushLeft(graph *Graph) {\n\tnode := graph.Nodes[0]\n\tminleft := node.Center.X - node.Radius.X\n\tfor _, node := range graph.Nodes[1:] {\n\t\tif node.Center.X-node.Radius.X < minleft {\n\t\t\tminleft = node.Center.X - node.Radius.X\n\t\t}\n\t}\n\n\tfor _, node := range graph.Nodes {\n\t\tnode.Center.X -= minleft\n\t}\n}", "title": "" }, { "docid": "7e9e3bf0de7cbe5388f3408572afae98", "score": "0.5251958", "text": "func (b *BitVector) Clear() *BitVector {\n *b = 0\n\n return b\n}", "title": "" }, { "docid": "ff832419b88b89c3ae44f25a6fafc47e", "score": "0.52291226", "text": "func (me *BytesWriter) Reset() { me.Data = me.Data[0:0] }", "title": "" }, { "docid": "da9652ab9598633414aa0a63bd515445", "score": "0.5222403", "text": "func (b *Buf2) Reset() { b.b = b.b[:0] }", "title": "" }, { "docid": "cd7840dfc23949a04cd5ddd7a5eb0e88", "score": "0.52157354", "text": "func TrimLeft(s string, cutSet ...string) string {\n\tif ln := len(cutSet); ln > 0 && cutSet[0] != \"\" {\n\t\tif ln == 1 {\n\t\t\treturn strings.TrimLeft(s, cutSet[0])\n\t\t}\n\n\t\treturn strings.TrimLeft(s, strings.Join(cutSet, \"\"))\n\t}\n\n\treturn strings.TrimLeft(s, \" \")\n}", "title": "" }, { "docid": "e8bff9c009e6a4ad9db082b5e655839e", "score": "0.51746804", "text": "func (r *ByteStore) Truncate(newLen int64) error {\n\tif newLen > r.GetPosition() {\n\t\treturn errors.New(\"newLen > r.GetPosition()\")\n\t}\n\n\tif newLen < 0 {\n\t\treturn errors.New(\"newLen < 0\")\n\t}\n\n\tblockIndex := newLen >> r.blockBits\n\tnextWrite := newLen & r.blockMask\n\tif nextWrite == 0 {\n\t\tblockIndex--\n\t\tnextWrite = r.blockSize\n\t}\n\n\tr.blocks.RemoveRange(int(blockIndex+1), r.blocks.Size())\n\n\tif newLen == 0 {\n\t\tr.current = nil\n\t} else {\n\t\tvar ok bool\n\t\tr.current, ok = r.blocks.Get(int(blockIndex))\n\t\tif !ok {\n\t\t\treturn ErrItemNotFound\n\t\t}\n\t}\n\n\tif newLen != r.GetPosition() {\n\t\treturn errors.New(\"newLen != r.GetPosition()\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "0e3519b05377302e54431b4ef6171e35", "score": "0.5168571", "text": "func wipeBytes(buf []byte) {\n\t// Iterate over the slice...\n\tfor i := 0; i < len(buf); i++ {\n\t\t// ... setting each element to zero.\n\t\tbuf[i] = byte(0)\n\t}\n}", "title": "" }, { "docid": "b8281e4d4575b860fb6e68a8320204b2", "score": "0.51518637", "text": "func RemainingLengthEncode(x int) []byte {\n\tvar output []byte\n\tfor {\n\t\tencodedByte := x % 128\n\n\t\tx = x / 128\n\n\t\t// if there are more data to encode, set the top bit of this byte\n\n\t\tif x > 0 {\n\t\t\tencodedByte = encodedByte | 128\n\t\t}\n\t\toutput = append(output, byte(encodedByte))\n\t\tif x <= 0 {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn output\n}", "title": "" }, { "docid": "b965de5230d4b25d102971e8061cc663", "score": "0.51384234", "text": "func (r *TCPReaderStream) stripEmpty() {\n\tfor len(r.current) > 0 && len(r.current[0].Bytes) == 0 {\n\t\tr.current = r.current[1:]\n\t\tr.lossReported = false\n\t}\n}", "title": "" }, { "docid": "c5e989bd53ca0b426e930e629431d9e7", "score": "0.5117465", "text": "func cutLeft(input string, options map[string]interface{}) string {\n\toptionString, found := options[\"length\"]\n\tif found {\n\t\tlength, err := strconv.Atoi(fmt.Sprintf(\"%v\", optionString))\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"the '%s' option value '%s' for the '%s' function is not a valid integer\", \"length\", fmt.Sprintf(\"%v\", optionString), \"cutLeft\")\n\t\t\treturn input\n\t\t}\n\t\tif length < 0 {\n\t\t\tlog.Errorf(\"the '%s' option value '%s' for the '%s' function cannot be negative\", \"length\", fmt.Sprintf(\"%v\", optionString), \"cutLeft\")\n\t\t\treturn input\n\t\t}\n\t\tif len(input) > length {\n\t\t\treturn input[len(input)-length : len(input)]\n\t\t} else {\n\t\t\treturn input\n\t\t}\n\t} else {\n\t\treturn input\n\t}\n}", "title": "" }, { "docid": "62124033b6f9ce078c0f4452c69fbda7", "score": "0.5112405", "text": "func rotLeft(a []int32, d int32) []int32 {\n\tfor i := int32(0); i < d; i++ {\n\t\tfv, tr := a[0], a[1:]\n\t\ta = append(tr, fv)\n\t}\n\treturn a\n}", "title": "" }, { "docid": "81d2cfa1345d3f9868e87ca0b5ebcce5", "score": "0.5110869", "text": "func Left(text string, width int) []string {\n\treturn LeftSlice([]string{text}, width)\n}", "title": "" }, { "docid": "a56b65772a7ffdee0da118e2ec5fc7cb", "score": "0.5106326", "text": "func (w *buffer) reset() {\n\tfor i := range (*w)[:cap(*w)] {\n\t\t(*w)[i] = 0\n\t}\n\t*w = (*w)[:0]\n}", "title": "" }, { "docid": "9f16e6386a9a71f1b78541cf5f910cda", "score": "0.51010895", "text": "func removePadding(payload []byte) ([]byte, byte) {\n\tif len(payload) < 1 {\n\t\treturn payload, 0\n\t}\n\n\tpaddingLen := payload[len(payload)-1]\n\tt := uint(len(payload)-1) - uint(paddingLen)\n\t// if len(payload) >= (paddingLen - 1) then the MSB of t is zero\n\tgood := byte(int32(^t) >> 31)\n\n\ttoCheck := 255 // the maximum possible padding length\n\t// The length of the padded data is public, so we can use an if here\n\tif toCheck+1 > len(payload) {\n\t\ttoCheck = len(payload) - 1\n\t}\n\n\tfor i := 0; i < toCheck; i++ {\n\t\tt := uint(paddingLen) - uint(i)\n\t\t// if i <= paddingLen then the MSB of t is zero\n\t\tmask := byte(int32(^t) >> 31)\n\t\tb := payload[len(payload)-1-i]\n\t\tgood &^= mask&paddingLen ^ mask&b\n\t}\n\n\t// We AND together the bits of good and replicate the result across\n\t// all the bits.\n\tgood &= good << 4\n\tgood &= good << 2\n\tgood &= good << 1\n\tgood = uint8(int8(good) >> 7)\n\n\ttoRemove := good&paddingLen + 1\n\treturn payload[:len(payload)-int(toRemove)], good\n}", "title": "" }, { "docid": "3e06136eeee2d6f31423fbc40e0a4452", "score": "0.50893563", "text": "func Clear(b []byte, bit uint) {\n\tbitNumberInByte := bit % 8\n\tbyteIndex := uint(len(b)-1) - (bit-bitNumberInByte)/8\n\ttargetBit := byte(1 << bitNumberInByte)\n\n\tb[byteIndex] &= ^targetBit\n}", "title": "" }, { "docid": "0e783dfd80f674e9297a371050645f95", "score": "0.50777376", "text": "func leftShift(d *internalDecimal, shiftPos uint) {\n\tdelta := leftShiftLookup[shiftPos].additionalDigits\n\tif prefixIsLessThan(d.digits[0:d.usedDigits], leftShiftLookup[shiftPos].limit) {\n\t\tdelta--\n\t}\n\n\tr := d.usedDigits // read position\n\tw := d.usedDigits + delta // write position\n\n\t// Pick up a digit, put down a digit.\n\tvar n uint\n\tfor r--; r >= 0; r-- {\n\t\tn += (uint(d.digits[r]) - '0') << shiftPos\n\t\tquo := n / 10\n\t\trem := n - 10*quo\n\t\tw--\n\t\tif w < len(d.digits) {\n\t\t\td.digits[w] = byte(rem + '0')\n\t\t} else if rem != 0 {\n\t\t\td.truncInd = true\n\t\t}\n\t\tn = quo\n\t}\n\n\t// Put down extra digits.\n\tfor n > 0 {\n\t\tquo := n / 10\n\t\trem := n - 10*quo\n\t\tw--\n\t\tif w < len(d.digits) {\n\t\t\td.digits[w] = byte(rem + '0')\n\t\t} else if rem != 0 {\n\t\t\td.truncInd = true\n\t\t}\n\t\tn = quo\n\t}\n\n\td.usedDigits += delta\n\tif d.usedDigits >= len(d.digits) {\n\t\td.usedDigits = len(d.digits)\n\t}\n\td.decPoint += delta\n\ttrim(d)\n}", "title": "" }, { "docid": "b90a5b38a6c2f338c3271985f3ffe9bf", "score": "0.50741684", "text": "func justLeft(s string, width int, pad int, sty *Style) string {\n\tcontentLen := len(s)\n\tonRight := width - contentLen\n\tif onRight < 0 {\n\t\tonRight = 0\n\t}\n\tswitch {\n\tcase sty == nil:\n\t\treturn fmt.Sprintf(\"%s%s%s\", spaces(pad), s, spaces(onRight+pad))\n\tdefault:\n\t\treturn fmt.Sprintf(\"%s%s%s\", spaces(pad), sty.ApplyTo(s), spaces(onRight+pad))\n\t}\n}", "title": "" }, { "docid": "7e1758987921436964c0c26bbe885348", "score": "0.5071008", "text": "func AppendGetMoreZero(dst []byte) []byte {\n\treturn appendi32(dst, 0)\n}", "title": "" }, { "docid": "dce20bb140eb3a713d569964e2290840", "score": "0.50709605", "text": "func leftShift(a *decimal, k uint) {\n\tdelta := leftcheats[k].delta\n\tif prefixIsLessThan(a.d[0:a.nd], leftcheats[k].cutoff) {\n\t\tdelta--\n\t}\n\n\tr := a.nd // read index\n\tw := a.nd + delta // write index\n\n\t// Pick up a digit, put down a digit.\n\tvar n uint\n\tfor r--; r >= 0; r-- {\n\t\tn += (uint(a.d[r]) - '0') << k\n\t\tquo := n / 10\n\t\trem := n - 10*quo\n\t\tw--\n\t\tif w < len(a.d) {\n\t\t\ta.d[w] = byte(rem + '0')\n\t\t} else if rem != 0 {\n\t\t\ta.trunc = true\n\t\t}\n\t\tn = quo\n\t}\n\n\t// Put down extra digits.\n\tfor n > 0 {\n\t\tquo := n / 10\n\t\trem := n - 10*quo\n\t\tw--\n\t\tif w < len(a.d) {\n\t\t\ta.d[w] = byte(rem + '0')\n\t\t} else if rem != 0 {\n\t\t\ta.trunc = true\n\t\t}\n\t\tn = quo\n\t}\n\n\ta.nd += delta\n\tif a.nd >= len(a.d) {\n\t\ta.nd = len(a.d)\n\t}\n\ta.dp += delta\n\ttrim(a)\n}", "title": "" }, { "docid": "e6e15d01426c978a5987a6db81a0e03b", "score": "0.50649256", "text": "func ZeroUnPadding(data []byte) ([]byte, error) {\n\tpadding := 0\n\tlength := len(data)\n\tif length == 0 {\n\t\treturn nil, errors.New(\"cannot remove padding for zero length byte array\")\n\t}\n\tfor i := length - 1; i >= 0; i-- {\n\t\t// byte is same to int8, stands for ascii code\n\t\tif data[i] == 0 {\n\t\t\tpadding++\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn data[:length-padding], nil\n}", "title": "" }, { "docid": "e6e15d01426c978a5987a6db81a0e03b", "score": "0.50649256", "text": "func ZeroUnPadding(data []byte) ([]byte, error) {\n\tpadding := 0\n\tlength := len(data)\n\tif length == 0 {\n\t\treturn nil, errors.New(\"cannot remove padding for zero length byte array\")\n\t}\n\tfor i := length - 1; i >= 0; i-- {\n\t\t// byte is same to int8, stands for ascii code\n\t\tif data[i] == 0 {\n\t\t\tpadding++\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn data[:length-padding], nil\n}", "title": "" }, { "docid": "988c1c21e92149b4d87ad8d7571fb3a5", "score": "0.5062044", "text": "func leftRotatebyOne(arr []int) {\n temp := arr[0]\n var i int\n for i = 0; i < len(arr)-1; i++ {\n arr[i] = arr[i+1]\n }\n arr[i] = temp\n}", "title": "" }, { "docid": "a850ee8904ddfb447db491ceaf36d548", "score": "0.5055647", "text": "func (NilDownCounter) Clear() int64 { return 0 }", "title": "" }, { "docid": "d0f92cfff222261f1c338b5a81488d4a", "score": "0.50292176", "text": "func leftChop(position int) *sync.Mutex {\n\tif position == 0 {\n\t\treturn &Chopsticks[PhilosopherCount-1]\n\t}\n\n\treturn &Chopsticks[position-1]\n}", "title": "" }, { "docid": "9372eee3c41d34de3b173881f56648d6", "score": "0.5026965", "text": "func Left(wfn WidthFn, cw int, s string) string {\n\treturn s + spaces(padSpaceNum(wfn, cw, s))\n}", "title": "" }, { "docid": "c9268c6a4be8c7dd32821c9c9888656a", "score": "0.50262845", "text": "func clearBit64(n uint64, pos uint64) uint64 {\n\tpos = 64 - pos\n\tmask := uint64(^(1 << pos))\n\tn &= mask\n\treturn n\n}", "title": "" }, { "docid": "fd607b1544f52cd3a10d7b68bebd5442", "score": "0.5023577", "text": "func (f *FieldVector) HalfLeft() *FieldVector {\n\treturn f.SubFieldVector(0, f.Size()/2)\n}", "title": "" }, { "docid": "2b9a2510319d01929f83f2c987374f46", "score": "0.50204724", "text": "func (t *ipTree) trimLeft(top *ipTree) *ipTree {\n\tif t == nil {\n\t\treturn nil\n\t}\n\n\tif ContainsNet(top.net, t.net) {\n\t\treturn t.left.trimLeft(top)\n\t}\n\tt.setRight(t.right.trimLeft(top))\n\treturn t\n}", "title": "" }, { "docid": "324949521a8040ccd705aeae4cc0d598", "score": "0.50004655", "text": "func (node *Node) rotateLeft(t *Tree) *Node {\n\tnode = node._copy()\n\tr := node.getRightNode(t)\n\tremoveOrphan(t, r)\n\t_r := r._copy()\n\n\t_rlHash, _rlCached := _r.leftHash, _r.leftNode\n\t_r.leftHash, _r.leftNode = node.hash, node\n\tnode.rightHash, node.rightNode = _rlHash, _rlCached\n\n\tnode.calcHeightAndSize(t)\n\t_r.calcHeightAndSize(t)\n\n\treturn _r\n}", "title": "" }, { "docid": "4016dfe6517d1e7afd1df22a096e5cd6", "score": "0.4978287", "text": "func (c *Cipher) ZeroPadding(src []byte, blockSize int) []byte {\n\tpadding := blockSize - len(src)%blockSize\n\tdst := bytes.Repeat([]byte{0}, padding)\n\treturn append(src, dst...)\n}", "title": "" }, { "docid": "a998f04a583640202e6ce80492ea97d9", "score": "0.49765143", "text": "func (buffer *Buffer) Clear() {\n\tbzero.Uint8(buffer.slice)\n\tbuffer.a = 0\n\tbuffer.b = 0\n}", "title": "" }, { "docid": "abda054d3bebcd2fadd2e026e3b1a41a", "score": "0.49758062", "text": "func (c *Client) LeftPop(key string) (string, error) {\n\tworkItemId, err := c.Redis.LPop(c.Context, key).Result()\n\tif err != nil {\n\t\tif err == redis.Nil {\n\t\t\treturn \"\", err\n\t\t} else {\n\t\t\treturn \"\", errors.Wrapf(err, \"%s could not retrieve value\\n\", key)\n\t\t}\n\t}\n\treturn workItemId, nil\n}", "title": "" }, { "docid": "b97a6b1cc67552889fee62094594ae74", "score": "0.49584302", "text": "func unpad(data []byte, blockSize int) (ret []byte) {\r\n\tbytesToRemove := data[len(data) - 1]\r\n\treturn data[:len(data) - int(bytesToRemove)]\r\n}", "title": "" }, { "docid": "4426f7c2c7bf23e616d19a32ecba0abc", "score": "0.49574718", "text": "func (rb *ringBuf) Reset() {\n\t// atomic.StoreUint64((*uint64)(unsafe.Pointer(&rb.head)), MaxUint64)\n\tatomic.StoreUint32(&rb.head, MaxUint32)\n\tatomic.StoreUint32(&rb.tail, MaxUint32)\n\tfor i := 0; i < (int)(rb.cap); i++ {\n\t\trb.data[i].readWrite = 0 // bit 0: readable, bit 1: writable\n\t}\n\t//atomic.StoreUint64((*uint64)(unsafe.Pointer(&rb.head)), 0)\n\tatomic.StoreUint32(&rb.head, 0)\n\tatomic.StoreUint32(&rb.tail, 0)\n}", "title": "" }, { "docid": "e28746a24777a238c02ab6a6854a9047", "score": "0.49574554", "text": "func (gl *Glitch) ChannelShiftLeft() {\n\tb := gl.Bounds\n\n\tfor y := b.Min.Y; y < b.Max.Y; y++ {\n\t\tfor x := b.Min.X; x < b.Max.X; x++ {\n\t\t\tr, g, b, a := gl.Output.At(x, y).RGBA()\n\t\t\tsc := color.RGBA{\n\t\t\t\tR: uint8(g),\n\t\t\t\tG: uint8(b),\n\t\t\t\tB: uint8(r),\n\t\t\t\tA: uint8(a),\n\t\t\t}\n\t\t\tgl.Output.Set(x, y, sc)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "73de7fcb3109d743e1cc72f5409eaaf5", "score": "0.49439427", "text": "func (d *Deque) PushLeft(item int) {\n\ttemp := []int{item}\n\td.Items = append(temp, d.Items...)\n}", "title": "" }, { "docid": "e744a321557f541c473f526f6e8e6a7e", "score": "0.49419788", "text": "func RightPadBytes(slice []byte, l int) []byte {\n\tif l <= len(slice) {\n\t\treturn slice\n\t}\n\n\tpadded := make([]byte, l)\n\tcopy(padded, slice)\n\n\treturn padded\n}", "title": "" }, { "docid": "e7d439d6d37f890405b77fd155214542", "score": "0.49407342", "text": "func Test_DelAndLen(t *testing.T) {\n\ts := make([]int, 0, 10)\n\tt.Log(len(s))\n\tt.Log(cap(s))\n\ts = append(s, 10)\n\tt.Log(len(s))\n\tt.Log(cap(s))\n\ts = s[:0]\n\tt.Log(len(s)) // 0\n\tt.Log(cap(s)) // 10\n\n\ts = append(s, 10)\n\ts = append(s, 20)\n\ts = append(s, 30)\n\ti := 1\n\ts = append(s[:i], s[i+1:]...)\n\tt.Log(len(s)) // 2\n\tt.Log(cap(s)) // 10\n\tt.Log(s)\n}", "title": "" }, { "docid": "d3cafa0cc23e48dd5ae15a6046f60bff", "score": "0.49333084", "text": "func RotateToTheLeft(slice []int, times int) {\n\tsliceSize := len(slice)\n\ttimes %= sliceSize\n\tif times <= 0 || sliceSize < 2 {\n\t\treturn\n\t}\n\n\tfor row := 0; row < gcd(sliceSize, times); row++ {\n\t\titemToPutAtTheFinalStep := slice[row]\n\t\tpreviousMovedIndex := row\n\t\tfor true {\n\t\t\tnextIndexToMoveNtimesLeft := previousMovedIndex + times\n\t\t\tif nextIndexToMoveNtimesLeft >= sliceSize {\n\t\t\t\tnextIndexToMoveNtimesLeft -= sliceSize\n\t\t\t}\n\t\t\tif nextIndexToMoveNtimesLeft == row {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tslice[previousMovedIndex] = slice[nextIndexToMoveNtimesLeft]\n\t\t\tpreviousMovedIndex = nextIndexToMoveNtimesLeft\n\t\t}\n\t\tslice[previousMovedIndex] = itemToPutAtTheFinalStep\n\t}\n}", "title": "" }, { "docid": "c6c3198ed2d108143c0434fea038bead", "score": "0.49296945", "text": "func (m *SchemaMutator) ClearMinLength() *SchemaMutator {\n\tm.lock.Lock()\n\tdefer m.lock.Unlock()\n\tm.proxy.minLength = nil\n\treturn m\n}", "title": "" }, { "docid": "e946fa21c7d493efd64d2547d4e4b242", "score": "0.4917375", "text": "func leftShiftFaulty(d *internalDecimal, shiftPos uint) {\n\tr := d.usedDigits \t\t\t // read position\n\tw := d.usedDigits + int(shiftPos) // write position\n\n\t// Pick up a digit, put down a digit.\n\tvar n uint\n\tfor r--; r >= 0; r-- {\n\t\tn += (uint(d.digits[r]) - '0') << shiftPos\n\t\tquo := n / 10\n\t\trem := n - 10*quo\n\t\tw--\n\t\tif w < len(d.digits) {\n\t\t\td.digits[w] = byte(rem + '0')\n\t\t} else if rem != 0 {\n\t\t\td.truncInd = true\n\t\t}\n\t\tn = quo\n\t}\n\n\t// Put down extra digits.\n\tfor n > 0 {\n\t\tquo := n / 10\n\t\trem := n - 10*quo\n\t\tw--\n\t\tif w < len(d.digits) {\n\t\t\td.digits[w] = byte(rem + '0')\n\t\t} else if rem != 0 {\n\t\t\td.truncInd = true\n\t\t}\n\t\tn = quo\n\t}\n\n\td.usedDigits += int(shiftPos-n)\n\tif d.usedDigits >= len(d.digits) {\n\t\td.usedDigits = len(d.digits)\n\t}\n\td.decPoint += int(shiftPos-n)\n\ttrim(d)\n}", "title": "" }, { "docid": "58cd1737685902b2324dcebbaf6482cc", "score": "0.49014884", "text": "func (arr *BitArray) Reset() {\n\tarr.mux.Lock()\n\tdefer arr.mux.Unlock()\n\n\tarr.data = make([]byte, arr.size/8+1)\n}", "title": "" }, { "docid": "727126c5ff174d7aa7aaba4a62637789", "score": "0.48980504", "text": "func (rb *RingBuffer) Clear() {\n\trb.buf = make([]interface{}, rb.size)\n\trb.length = 0\n\trb.writer = 0\n\trb.reader = 0\n}", "title": "" }, { "docid": "a07330028051a3acc6f1a99be2d04dc3", "score": "0.48961368", "text": "func RightShift(data []byte, shift uint64) []byte {\n\tif shift == 0 {\n\t\treturn data\n\t}\n\tvar dataLength = len(data)\n\tresult := make([]byte, dataLength)\n\tif shift > byteLength {\n\t\tcopy(result, data[1:])\n\t\tresult = RightShift(result, shift-byteLength)\n\t} else {\n\t\tfor i := dataLength - 1; i >= 0; i-- {\n\t\t\tif i > 0 {\n\t\t\t\tresult[i-1] = data[i] >> (byteLength - shift)\n\t\t\t}\n\t\t\tresult[i] = result[i] | (data[i] << shift)\n\t\t}\n\t}\n\treturn result\n}", "title": "" }, { "docid": "b867387210390bab387dec9600413201", "score": "0.48956835", "text": "func (s *RandomDataset) Left() int {\n\treturn int(s.n - (s.ind + 1))\n}", "title": "" }, { "docid": "53289d6b74b395165cededfc21fb0334", "score": "0.48707357", "text": "func (o *Struct) RotateLeft() Object {\n\t_rotate(angleStep, o)\n\treturn o\n}", "title": "" }, { "docid": "82e50319f00ce5da42de2098421f3f05", "score": "0.48705593", "text": "func (d *Deque) PopLeft() int {\n\tleft := d.Items[0]\n\td.Items = d.Items[1:]\n\treturn left\n}", "title": "" }, { "docid": "c23ece5a75345acc3d3d6fbce3dfcc2d", "score": "0.4862458", "text": "func (id ID) LeadingZerosLen() int {\n\tfor i := 0; i < len(id); i++ {\n\t\tfor j := 0; j < 8; j++ {\n\t\t\tif (id[i]>>uint8(7-j))&0x1 != 0 {\n\t\t\t\treturn i*8 + j\n\t\t\t}\n\t\t}\n\t}\n\treturn len(id) * 8\n}", "title": "" }, { "docid": "5a95897a72c38dd5d0fe7dd604581453", "score": "0.48606214", "text": "func Left(n int) string {\n\tif n <= 0 {\n\t\tpanic(\"invalid argument\")\n\t}\n\treturn escape(strconv.Itoa(n) + \"D\")\n}", "title": "" }, { "docid": "949149a76cfdd8701d114935aaf1b158", "score": "0.48508298", "text": "func WReapLeft(separator string, text ...string) string {\n\tfor i, t := range text {\n\t\ttext[i] = strings.Trim(t, separator)\n\t}\n\treturn WrapLeft(Wrapper(separator, text...), separator)\n}", "title": "" }, { "docid": "c836198cc7ff5512c4cf7f855518bd3c", "score": "0.48441562", "text": "func (com *command) removeTrailingZero(data []byte) []byte {\n\tif l := len(data); l > 0 && data[l-1] == 0 {\n\t\tdata = data[:l-1]\n\t}\n\treturn data\n}", "title": "" }, { "docid": "da6faf620c761b99559362caa19501d1", "score": "0.48335382", "text": "func Left(s ListBoxState) int {\n\treturn horizontal(s.Selected, s.Items.Len(), -s.Height)\n}", "title": "" }, { "docid": "19a8454bc07a79c3a76ec77b125ed0b6", "score": "0.48302808", "text": "func (r *RingBuffer) Reset() {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\n\tr.r = 0\n\tr.w = 0\n\tr.isFull = false\n}", "title": "" }, { "docid": "f1bc80a53902d574afbf7228f6f4935f", "score": "0.48196742", "text": "func (c *ColInt256) Reset() {\n\t*c = (*c)[:0]\n}", "title": "" }, { "docid": "734d523b476c99d8cfb3527b642d9b4d", "score": "0.48164082", "text": "func (b *ringBuffer) Clear(n uint64) error {\n\tif b.writeCapacity+n > b.length {\n\t\treturn errors.New(\"Can't clear that much space\")\n\t}\n\n\tb.writeCapacity += n\n\treturn nil\n}", "title": "" }, { "docid": "0f1521bb1c3abd5a0dfb5b8e5aba50ed", "score": "0.481046", "text": "func BarTrimLeft() BarOption {\n\treturn func(s *bState) {\n\t\ts.trimLeftSpace = true\n\t}\n}", "title": "" }, { "docid": "0d0acd7a15f2fc170c8479d11f1a6307", "score": "0.4805099", "text": "func Zero(data []byte) {\n\tfor i := 0; i < len(data); i++ {\n\t\tdata[i] = 0\n\t}\n}", "title": "" }, { "docid": "2ebe5605a65106f7c5aa00a91ff6b137", "score": "0.48044482", "text": "func (list *UnrolledList_uint8) Clear() {\n\tlist.items = nil\n\tlist.size = 0\n}", "title": "" }, { "docid": "70491fd1b7789e406fd45cb2aba5da7b", "score": "0.47996888", "text": "func (c *ColDate32) Reset() {\n\t*c = (*c)[:0]\n}", "title": "" }, { "docid": "7abea3a858a56cb4840df024d88bf640", "score": "0.47971678", "text": "func (ba *BitArray) Reset(buf []byte) {\n\tif buf == nil {\n\t\tba.payload = ba.payload[:0]\n\t\treturn\n\t}\n\tba.payload = buf\n}", "title": "" }, { "docid": "e63d70482f041f756f092a9fc7a1f12f", "score": "0.47951913", "text": "func RemoveTrailingZero(data []byte) []byte { com := &command{}; return com.removeTrailingZero(data) }", "title": "" }, { "docid": "ddc8108e18a613dab3709071ed3c109a", "score": "0.47806504", "text": "func (s *Bits) UnsetBefore(index uint) {\n\t*s &= ^(1<<index - 1)\n}", "title": "" } ]
e098936bfd5ba81014c4400f9b1b6e0c
UpdateNodeInfo is a paid mutator transaction binding the contract method 0xc5ea6ea1. Solidity: function updateNodeInfo(string Name, string Email, string Location, string Url) returns()
[ { "docid": "c8ecf8372d01660b60bdf81a3824f936", "score": "0.8211598", "text": "func (_Governance *GovernanceTransactorSession) UpdateNodeInfo(Name string, Email string, Location string, Url string) (*types.Transaction, error) {\n\treturn _Governance.Contract.UpdateNodeInfo(&_Governance.TransactOpts, Name, Email, Location, Url)\n}", "title": "" } ]
[ { "docid": "7db3f432dd840ce799c922ec8b98cce9", "score": "0.8239588", "text": "func (_Governance *GovernanceTransactor) UpdateNodeInfo(opts *bind.TransactOpts, Name string, Email string, Location string, Url string) (*types.Transaction, error) {\n\treturn _Governance.contract.Transact(opts, \"updateNodeInfo\", Name, Email, Location, Url)\n}", "title": "" }, { "docid": "2fc53015e5aaac044f0f59a3caddd09e", "score": "0.82114524", "text": "func (_Governance *GovernanceSession) UpdateNodeInfo(Name string, Email string, Location string, Url string) (*types.Transaction, error) {\n\treturn _Governance.Contract.UpdateNodeInfo(&_Governance.TransactOpts, Name, Email, Location, Url)\n}", "title": "" }, { "docid": "9b2ea4f7b9c31ae0f8749c297288968f", "score": "0.54645884", "text": "func (pm *ProtocolManager) NodeInfo() *NodeInfo {\n\tnumOfBlocks := pm.store.GetLatestBlockIndex()\n\treturn &NodeInfo{\n\t\tNetwork: pm.net.NetworkID,\n\t\tGenesis: common.Hash(*pm.store.GetGenesisHash()),\n\t\tEpoch: pm.store.GetEpoch(),\n\t\tNumOfBlocks: numOfBlocks,\n\t}\n}", "title": "" }, { "docid": "f8edd8cc9d3ddd050da5215ea727bd6b", "score": "0.54004973", "text": "func (node *node) updateNodeInfo() {\n\tperiodUpdateTime := config.DEFAULTGENBLOCKTIME / TIMESOFUPDATETIME\n\tticker := time.NewTicker(time.Second * (time.Duration(periodUpdateTime)) * 2)\n\tfor {\n\t\tselect {\n\t\tcase <-ticker.C:\n\t\t\tnode.SendPingToNbr()\n\t\t\tnode.SyncBlks()\n\t\t\tnode.HeartBeatMonitor()\n\t\t}\n\t}\n\t// TODO when to close the timer\n\t//close(quit)\n}", "title": "" }, { "docid": "e482b31e3152e2fd898e1587ad1095ae", "score": "0.53837484", "text": "func (g *Graph) Update(name, unit, color, timezone string, purgeCacheUrls []string, selfSufficient string, isSecret bool, publishOptionalData bool) (*Result, error) {\n\tparam, err := g.createUpdateRequestParameter(name, unit, color, timezone, purgeCacheUrls, selfSufficient, isSecret, publishOptionalData)\n\tif err != nil {\n\t\treturn &Result{}, errors.Wrapf(err, \"failed to create graph update parameter\")\n\t}\n\n\treturn doRequestAndParseResponse(param)\n}", "title": "" }, { "docid": "2e9d9a9a043e82e443ec7ee10daafa42", "score": "0.53378737", "text": "func (mfs *MemFS) Update(node *Node, newInfo os.FileInfo) {\n\tif node == nil || newInfo == nil {\n\t\treturn\n\t}\n\n\terr := node.update(newInfo, mfs.withContent)\n\tif err != nil {\n\t\tlog.Println(\"lib/memfs: Update: \" + err.Error())\n\t}\n}", "title": "" }, { "docid": "0d8eebf51a254d832b7dab0523458325", "score": "0.533162", "text": "func (node *node) updateNodeInfo() {\n\tticker := time.NewTicker(time.Second * PERIODUPDATETIME-2)\n\tquit := make(chan struct{})\n\tfor {\n\t\tselect {\n\t\tcase <-ticker.C:\n\t\t\tnode.SendPingToNbr()\n\t\t\tnode.GetBlkHdrs()\n\t\t\tnode.SyncBlk()\n\t\t\tnode.HeartBeatMonitor()\n\t\tcase <-quit:\n\t\t\tticker.Stop()\n\t\t\treturn\n\t\t}\n\t}\n\t// TODO when to close the timer\n\t//close(quit)\n}", "title": "" }, { "docid": "7f81431aa2729e5150d069c06797e929", "score": "0.53302884", "text": "func (p *proxier) OnNodeUpdate(oldNode, node *corev1.Node) {\n\tif node.Name != p.hostname {\n\t\treturn\n\t}\n\n\tif reflect.DeepEqual(p.nodeLabels, node.Labels) {\n\t\treturn\n\t}\n\n\tp.serviceEndpointsMapsMutex.Lock()\n\tp.nodeLabels = map[string]string{}\n\tfor k, v := range node.Labels {\n\t\tp.nodeLabels[k] = v\n\t}\n\tp.serviceEndpointsMapsMutex.Unlock()\n\tklog.V(4).InfoS(\"Updated proxier Node labels\", \"labels\", node.Labels)\n\n\tp.syncProxyRules()\n}", "title": "" }, { "docid": "bc6580b3f50ff1605a468f16e8b93f5c", "score": "0.52797824", "text": "func (h *nodes) Update(a *api.Node) (*api.Node, error) {\n\t// When creating or updating a node, initialize global defaults if they\n\t// are not yet initialized.\n\tif err := h.c.EnsureInitialized(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn a, h.c.update(*a, h)\n}", "title": "" }, { "docid": "6b71b4a6941b0a40bd9aa045077f8e71", "score": "0.52430654", "text": "func NodeInfoUpdated() bool {\n\tfor {\n\t\ttime.Sleep(5 * time.Second)\n\t\tNodeMux.Lock()\n\t\tif len(NodeToRTND) > 0 {\n\t\t\tNodeMux.Unlock()\n\t\t\treturn true\n\t\t}\n\t\tNodeMux.Unlock()\n\t}\n}", "title": "" }, { "docid": "8d674f8890ebdff461a79ec59342b30b", "score": "0.5174", "text": "func NodeInfo(cfg config.ServerConfig) (*pb.NodeInfo, error) {\n\tnode, err := nodeutil.GetNodeInfo(cfg.Node)\n\tnode.Addr = pb.Addr{\n\t\tBindAddr: cfg.Grpc.Addr,\n\t}\n\tif err != nil {\n\t\treturn node, err\n\t}\n\treturn node, nil\n}", "title": "" }, { "docid": "9204cc462b8dfa8fb7a0cd6ec7b0b7e7", "score": "0.51681536", "text": "func UpdatePingNode(uri string, executorID string) error {\n\tlog.Debugln(\"UpdatePingNode ENTER\")\n\tlog.Debugln(\"URI:\", uri)\n\tlog.Debugln(\"ExecutorID:\", executorID)\n\n\turl := uri + \"/api/node/ping\"\n\n\tstate := &types.PingNode{\n\t\tAcknowledged: false,\n\t\tExecutorID: executorID,\n\t}\n\n\tresponse, err := json.MarshalIndent(state, \"\", \" \")\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to marshall state object:\", err)\n\t\tlog.Debugln(\"UpdatePingNode LEAVE\")\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(\"POST\", url, bytes.NewBuffer(response))\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to create new HTTP request:\", err)\n\t\tlog.Debugln(\"UpdatePingNode LEAVE\")\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to make HTTP call:\", err)\n\t\tlog.Debugln(\"UpdatePingNode LEAVE\")\n\t\treturn err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tbody, err := ioutil.ReadAll(io.LimitReader(resp.Body, 1048576))\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to read the HTTP Body:\", err)\n\t\tlog.Debugln(\"UpdatePingNode LEAVE\")\n\t\treturn err\n\t}\n\n\tlog.Debugln(\"response Status:\", resp.Status)\n\tlog.Debugln(\"response Headers:\", resp.Header)\n\tlog.Debugln(\"response Body:\", string(body))\n\n\tvar newstate types.PingNode\n\terr = json.Unmarshal(body, &newstate)\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to unmarshal the UpdateState object:\", err)\n\t\tlog.Debugln(\"UpdatePingNode LEAVE\")\n\t\treturn err\n\t}\n\n\tlog.Debugln(\"Acknowledged:\", newstate.Acknowledged)\n\tlog.Debugln(\"ExecutorID:\", newstate.ExecutorID)\n\n\tif !newstate.Acknowledged {\n\t\tlog.Errorln(\"Failed to receive an acknowledgement\")\n\t\tlog.Debugln(\"UpdatePingNode LEAVE\")\n\t\treturn ErrStateChangeNotAcknowledged\n\t}\n\n\tlog.Debugln(\"UpdatePingNode Succeeded\")\n\tlog.Debugln(\"UpdatePingNode LEAVE\")\n\treturn nil\n}", "title": "" }, { "docid": "f55697c7e0c19333e1e30a59150d92ff", "score": "0.51572627", "text": "func (m *Metrics) SendNodeInfo(nm *types.NodeMetrics) {\n\tnodename := nm.Name\n\tpodname := nm.Podname\n\tmemory := nm.Memory\n\tmemoryUsed := nm.MemoryUsed\n\tstorage := nm.Storage\n\tstorageUsed := nm.StorageUsed\n\tcpuUsed := nm.CPUUsed\n\n\tif m.MemoryCapacity != nil {\n\t\tm.MemoryCapacity.WithLabelValues(podname, nodename).Set(memory)\n\t}\n\n\tif m.MemoryUsed != nil {\n\t\tm.MemoryUsed.WithLabelValues(podname, nodename).Set(memoryUsed)\n\t}\n\n\tif m.StorageCapacity != nil {\n\t\tm.StorageCapacity.WithLabelValues(podname, nodename).Set(storage)\n\t}\n\n\tif m.StorageUsed != nil {\n\t\tm.StorageUsed.WithLabelValues(podname, nodename).Set(storageUsed)\n\t}\n\n\tif m.CPUUsed != nil {\n\t\tm.CPUUsed.WithLabelValues(podname, nodename).Set(cpuUsed)\n\t}\n\n\tcleanedNodeName := utils.CleanStatsdMetrics(nodename)\n\tfor cpuid, value := range nm.CPU {\n\t\tval := float64(value)\n\n\t\tif m.CPUMap != nil {\n\t\t\tm.CPUMap.WithLabelValues(podname, nodename, cpuid).Set(val)\n\t\t}\n\n\t\tif m.StatsdAddr == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tif err := m.gauge(fmt.Sprintf(cpuMap, cleanedNodeName, cpuid), val); err != nil {\n\t\t\tlog.Errorf(nil, \"[SendNodeInfo] Error occurred while sending cpu data to statsd: %v\", err) //nolint\n\t\t}\n\t}\n\n\tif m.StatsdAddr == \"\" {\n\t\treturn\n\t}\n\n\tif err := m.gauge(fmt.Sprintf(memStats, cleanedNodeName), memory); err != nil {\n\t\tlog.Errorf(nil, \"[SendNodeInfo] Error occurred while sending memory data to statsd: %v\", err) //nolint\n\t}\n\n\tif err := m.gauge(fmt.Sprintf(storageStats, cleanedNodeName), storage); err != nil {\n\t\tlog.Errorf(nil, \"[SendNodeInfo] Error occurred while sending storage data to statsd: %v\", err) //nolint\n\t}\n\n\tif err := m.gauge(fmt.Sprintf(memUsedStats, cleanedNodeName), memoryUsed); err != nil {\n\t\tlog.Errorf(nil, \"[SendNodeInfo] Error occurred while sending memory used data to statsd: %v\", err) //nolint\n\t}\n\n\tif err := m.gauge(fmt.Sprintf(storageUsedStats, cleanedNodeName), storageUsed); err != nil {\n\t\tlog.Errorf(nil, \"[SendNodeInfo] Error occurred while sending storage used data to statsd: %v\", err) //nolint\n\t}\n\n\tif err := m.gauge(fmt.Sprintf(cpuUsedStats, cleanedNodeName), cpuUsed); err != nil {\n\t\tlog.Errorf(nil, \"[SendNodeInfo] Error occurred while sending cpu used data to statsd: %v\", err) //nolint\n\t}\n}", "title": "" }, { "docid": "28622a918a8fcec176cf143c8cdbf055", "score": "0.51424575", "text": "func (tc *NoExecuteTaintManager) NodeUpdated(oldNode *v1.Node, newNode *v1.Node) {\n\tnodeName := \"\"\n\toldTaints := []v1.Taint{}\n\tif oldNode != nil {\n\t\tnodeName = oldNode.Name\n\t\toldTaints = getNoExecuteTaints(oldNode.Spec.Taints)\n\t}\n\n\tnewTaints := []v1.Taint{}\n\tif newNode != nil {\n\t\tnodeName = newNode.Name\n\t\tnewTaints = getNoExecuteTaints(newNode.Spec.Taints)\n\t}\n\n\tif oldNode != nil && newNode != nil && helper.Semantic.DeepEqual(oldTaints, newTaints) {\n\t\treturn\n\t}\n\tupdateItem := nodeUpdateItem{\n\t\tnodeName: nodeName,\n\t}\n\n\ttc.nodeUpdateQueue.Add(updateItem)\n}", "title": "" }, { "docid": "d3f5d78d3c7b5208203d5de2de802c80", "score": "0.5129432", "text": "func nodeInfo(w http.ResponseWriter, r *http.Request) {\n\tresult := New()\n\n\tnode, err := ethClient.Call(\"admin_nodeInfo\")\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tresult.State = 0\n\t\tresult.Content[\"error\"] = err\n\t\tRender.JSON(w, http.StatusOK, result)\n\t\treturn\n\t}\n\n\tRender.JSON(w, http.StatusOK, nodeFormat(node))\n\treturn\n}", "title": "" }, { "docid": "e1e37151c28e62e493913c18c9957ece", "score": "0.5099382", "text": "func (client *CommitClient) NodeInfo(ctx context.Context) (*pb.Info, error) {\n\tvar span zipkin.Span\n\tif client.Tracer != nil {\n\t\tspan, ctx = client.Tracer.StartSpanFromContext(ctx, \"NodeInfo\")\n\t\tdefer span.Finish()\n\t}\n\treturn client.Connection.NodeInfo(ctx, &empty.Empty{})\n}", "title": "" }, { "docid": "448a1212ee748a8c2fb3c070d39dfb54", "score": "0.5082021", "text": "func (n *Node) SetInfo(status *rpctypes.ResponseStatus, netinfo *rpctypes.ResponseNetInfo) {\n\tn.LastSeen = time.Now()\n\tn.Network = status.Network\n\tn.BlockHeight = status.LatestBlockHeight\n\tn.NetInfo = netinfo\n\t// n.Validator\n}", "title": "" }, { "docid": "d812824e1745aa86af199cf3173911c5", "score": "0.50750375", "text": "func notifyNodeUpdate(nodeToUpdate *pb.Node, arrival bool) error {\n\tvar err error\n\tnodeToUpdate.NotifyOthers = DONT_NOTIFY_OTHERS\n\tfor addr := range nodeMap {\n\t\tif addr == gcprAddr || nodeToUpdate.Grpc_IP == addr {\n\t\t\tcontinue\n\t\t}\n\t\tconn, err := grpc.Dial(addr, grpc.WithInsecure())\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Failed to dial to %s with %v\", addr, err)\n\t\t}\n\t\tdefer conn.Close()\n\t\tc := pb.NewRendezvousClient(conn)\n\t\tctx, cancel := context.WithTimeout(context.Background(), time.Second)\n\t\tdefer cancel()\n\t\tif arrival {\n\t\t\t_, err = c.NodeArrival(ctx, nodeToUpdate)\n\t\t} else {\n\t\t\t_, err = c.NodeRemoval(ctx, nodeToUpdate)\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error from %s of %v\", addr, err)\n\t\t}\n\t}\n\t//only returns the last error unfortunately\n\treturn err\n}", "title": "" }, { "docid": "a0160b166c80b070b15beb0376238c87", "score": "0.5049052", "text": "func updateInfo(w http.ResponseWriter, r *http.Request) {\n\t// Request to this endpoint are assumed to send a full valid deviceInfo\n\tparams := mux.Vars(r)\n\tid := params[\"id\"]\n\tlog.Printf(\"Received POST request on /info/%s \", id)\n\tvar info *deviceInfo\n\terr := json.NewDecoder(r.Body).Decode(&info)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\terrorString := fmt.Sprintf(\"Error decoding the JSON body : %s\", err)\n\t\tlog.Printf(errorString)\n\t\tjson.NewEncoder(w).Encode(errorString)\n\t} else {\n\t\t// Check if name exists and copy it so it can be displayed in the frontend\n\t\t_, ok := INFOMAP[id]\n\t\tvar name string\n\t\tif ok {\n\t\t\tname = INFOMAP[id].Name\n\t\t} else {\n\t\t\tname = \"noName\"\n\t\t}\n\t\tinfo.Name = name\n\t\t// Assign the new info, I guess this is a memory leak (the address is local to the scope) but it seems to work\n\t\tINFOMAP[id] = info\n\t\tjson.NewEncoder(w).Encode(info)\n\t}\n\tpersistToDisk()\n}", "title": "" }, { "docid": "96be53e26b1fecc097570c251e3dd60a", "score": "0.5045628", "text": "func (proxier *Proxier) OnNodeUpdate(oldNode, node *v1.Node) {\n\tif node.Name != proxier.hostname {\n\t\tklog.ErrorS(nil, \"Received a watch event for a node that doesn't match the current node\", \"eventNode\", node.Name, \"currentNode\", proxier.hostname)\n\t\treturn\n\t}\n\n\tif reflect.DeepEqual(proxier.nodeLabels, node.Labels) {\n\t\treturn\n\t}\n\n\tproxier.mu.Lock()\n\tproxier.nodeLabels = map[string]string{}\n\tfor k, v := range node.Labels {\n\t\tproxier.nodeLabels[k] = v\n\t}\n\tproxier.mu.Unlock()\n\tklog.V(4).InfoS(\"Updated proxier node labels\", \"labels\", node.Labels)\n\n\tproxier.Sync()\n}", "title": "" }, { "docid": "f71bfbd169a5e7c68bef10d435352643", "score": "0.50166875", "text": "func (service *Service) UpdateInfo(settings *portainer.TunnelServerInfo) error {\n\treturn service.connection.UpdateObject(BucketName, []byte(infoKey), settings)\n}", "title": "" }, { "docid": "89e698c9c7295f7b26c270726f801f30", "score": "0.50166154", "text": "func (ns *NodeServer) NodeGetInfo(context.Context, *csi.NodeGetInfoRequest) (*csi.NodeGetInfoResponse, error) {\n\treturn &csi.NodeGetInfoResponse{}, nil\n}", "title": "" }, { "docid": "48c2fa5acf81346d2584998261316455", "score": "0.50061905", "text": "func (sw *Switch) SetNodeInfo(nodeInfo *NodeInfo) {\n\tsw.nodeInfo = nodeInfo\n}", "title": "" }, { "docid": "9c1d9fb9dcce52baa3ca9422372e7611", "score": "0.5002507", "text": "func (kv *KvStore) SetNodeInfo(nodeID string, persona int, state int) error {\n\tlog.Debugln(\"SetNodeInfo ENTER\")\n\tlog.Debugln(\"persona:\", persona)\n\tlog.Debugln(\"state:\", state)\n\n\trootConfig := kv.RootKey + \"/configuration\"\n\tkv.Store.Put(rootConfig, []byte(\"\"), nil)\n\trootNode := kv.RootKey + \"/configuration/\" + nodeID\n\tkv.Store.Put(rootNode, []byte(\"\"), nil)\n\n\tif persona != -1 {\n\t\tlog.Debugln(\"Changing persona to\", persona)\n\t\terr := kv.Store.Put(rootNode+\"/persona\", []byte(strconv.Itoa(persona)), nil)\n\t\tif err != nil {\n\t\t\tlog.Errorln(\"Failed to set version on store:\", err)\n\t\t\tlog.Debugln(\"SetNodeInfo LEAVE\")\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tlog.Debugln(\"Skip changing persona\")\n\t}\n\tif state != -1 {\n\t\tlog.Debugln(\"Changing state to\", state)\n\t\terr := kv.Store.Put(rootNode+\"/state\", []byte(strconv.Itoa(state)), nil)\n\t\tif err != nil {\n\t\t\tlog.Errorln(\"Failed to set version on store:\", err)\n\t\t\tlog.Debugln(\"SetNodeInfo LEAVE\")\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tlog.Debugln(\"Skip changing state\")\n\t}\n\n\tswitch persona {\n\tcase types.PersonaMdmPrimary:\n\t\tlog.Debugln(\"Saving primary MDM node ID:\", nodeID)\n\t\terr := kv.Store.Put(rootConfig+\"/primary\", []byte(nodeID), nil)\n\t\tif err != nil {\n\t\t\tlog.Errorln(\"Failed to set primary on store:\", err)\n\t\t\tlog.Debugln(\"SetNodeInfo LEAVE\")\n\t\t\treturn err\n\t\t}\n\n\tcase types.PersonaMdmSecondary:\n\t\tlog.Debugln(\"Saving secondary MDM node ID:\", nodeID)\n\t\terr := kv.Store.Put(rootConfig+\"/secondary\", []byte(nodeID), nil)\n\t\tif err != nil {\n\t\t\tlog.Errorln(\"Failed to set secondary on store:\", err)\n\t\t\tlog.Debugln(\"SetNodeInfo LEAVE\")\n\t\t\treturn err\n\t\t}\n\n\tcase types.PersonaTb:\n\t\tlog.Debugln(\"Saving tiebreaker MDM node ID:\", nodeID)\n\t\terr := kv.Store.Put(rootConfig+\"/tiebreaker\", []byte(nodeID), nil)\n\t\tif err != nil {\n\t\t\tlog.Errorln(\"Failed to set tiebreaker on store:\", err)\n\t\t\tlog.Debugln(\"SetNodeInfo LEAVE\")\n\t\t\treturn err\n\t\t}\n\t}\n\n\tlog.Debugln(\"SetNodeInfo Succeeded\")\n\tlog.Debugln(\"SetNodeInfo LEAVE\")\n\treturn nil\n}", "title": "" }, { "docid": "8f1b572d935c0990c100c4ceb0ce6ade", "score": "0.49901077", "text": "func AddNodeInfo(w http.ResponseWriter, r *http.Request) {\n\tif (*r).Method == \"OPTIONS\" {\n\t\treturn\n\t}\n\tneighbourMap := db.GetDataBase()\n\tvar NodeData model.NodeData\n\treqBody, err := ioutil.ReadAll(r.Body)\n\n\tresp := model.Response{}\n\t// // set Default value\n\tresp.Default()\n\n\tif err != nil {\n\t\tlog.Println(\"ERROR: Payload Error\", err)\n\t\tresp.BadRequest()\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t} else {\n\t\terr := json.Unmarshal(reqBody, &NodeData)\n\t\tif err != nil {\n\t\t\tlog.Println(\"ERROR: Payload Error\", err)\n\t\t\tresp.BadRequest()\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t} else {\n\t\t\tneighbourMap.AddNode(NodeData.Node.MAC, NodeData)\n\t\t\tlog.Println(\"INFO: [NO]: Node :\", NodeData.Node.Name, \" info added. IP : \", NodeData.Node.IP)\n\t\t\tresp.Code = http.StatusOK\n\t\t\tresp.Message = \"Data Base Updated\"\n\t\t\tresp.Data = nil\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t}\n\t}\n\tjson.NewEncoder(w).Encode(resp)\n\n}", "title": "" }, { "docid": "8c891f23868c54eea09269167aa8ab22", "score": "0.49827173", "text": "func (w *Wireguard) EndpointWireguardUpdate(name string, publicKey wgtypes.Key, interfaceAddr ip.Addr) {\n\tlogCtx := w.logCtx.WithFields(log.Fields{\"node\": name, \"publicKey\": publicKey, \"interfaceAddr\": interfaceAddr})\n\tlogCtx.Debug(\"EndpointWireguardUpdate\")\n\tif !w.Enabled() {\n\t\tlogCtx.Debug(\"Not enabled - ignoring\")\n\t\treturn\n\t}\n\n\tif name == w.hostname {\n\t\tlogCtx.Debug(\"Local wireguard info updated\")\n\t\tif w.ourPublicKey == nil || *w.ourPublicKey != publicKey {\n\t\t\t// Public key does not match that stored. Flag as not in-sync, we will update the value from the dataplane\n\t\t\t// and publish.\n\t\t\tlogCtx.Debug(\"Stored public key does not match key queried from dataplane\")\n\t\t\tw.ourPublicKey = &publicKey\n\t\t\tw.inSyncWireguard = false\n\t\t}\n\n\t\tif interfaceAddr == nil && w.config.EncryptHostTraffic && w.ourHostAddr != nil {\n\t\t\t// If there is no interface address configured and we are encrypting host traffic, use the host IP as the\n\t\t\t// interface address.\n\t\t\tlogCtx = log.WithField(\"interfaceAddr\", w.ourHostAddr)\n\t\t\tlogCtx.Debug(\"Use node IP as wireguard device IP for host encryption without IPPools\")\n\t\t\tinterfaceAddr = w.ourHostAddr\n\t\t}\n\t\tif w.ourInterfaceAddr != interfaceAddr {\n\t\t\tlogCtx.Debug(\"Local interface addr updated\")\n\t\t\tw.ourInterfaceAddr = interfaceAddr\n\t\t\tw.inSyncInterfaceAddr = false\n\t\t}\n\t\treturn\n\t}\n\n\t// Only update the public key in the node data for nodes. The local node will not have this set, this prevents the\n\t// wireguard config processing from attempting to add the local node as a peer.\n\tupdate := w.getOrInitNodeUpdateData(name)\n\tif existing, ok := w.nodes[name]; ok && existing.publicKey == publicKey {\n\t\t// Public key not updated\n\t\tlogCtx.Debug(\"Public key unchanged from programmed\")\n\t\tupdate.publicKey = nil\n\t} else {\n\t\t// Public key updated (or this is a previously unseen node)\n\t\tlogCtx.Debug(\"Storing updated public key\")\n\t\tupdate.publicKey = &publicKey\n\t}\n\tw.setNodeUpdate(name, update)\n}", "title": "" }, { "docid": "7a966f657611e9a336cc8e61cfc2c6a8", "score": "0.49605465", "text": "func (_Staking *StakingTransactorSession) UpdateNodeStaking(_nodeAddr common.Address, _newTokenAmount *big.Int, _newDropburnAmount *big.Int, _newCut *big.Int, _newDesc string, _newLogoUrl string) (*types.Transaction, error) {\n\treturn _Staking.Contract.UpdateNodeStaking(&_Staking.TransactOpts, _nodeAddr, _newTokenAmount, _newDropburnAmount, _newCut, _newDesc, _newLogoUrl)\n}", "title": "" }, { "docid": "01e2a597962ed38a152f19884cab408f", "score": "0.49579865", "text": "func (catalog *NodeCatalog) Info(info map[string]interface{}) (bool, bool) {\n\tsender := info[\"sender\"].(string)\n\tnode, exists := catalog.findNode(sender)\n\tvar reconnected bool\n\tif exists {\n\t\treconnected = node.Update(sender, info)\n\t} else {\n\t\tnode := CreateNode(sender, false, catalog.logger.WithField(\"remote-node\", sender))\n\t\tnode.Update(sender, info)\n\t\tcatalog.Add(node)\n\t}\n\treturn exists, reconnected\n}", "title": "" }, { "docid": "57ee2fe8e1407147b33dbff4251d1685", "score": "0.49446765", "text": "func (_Staking *StakingSession) UpdateNodeStaking(_nodeAddr common.Address, _newTokenAmount *big.Int, _newDropburnAmount *big.Int, _newCut *big.Int, _newDesc string, _newLogoUrl string) (*types.Transaction, error) {\n\treturn _Staking.Contract.UpdateNodeStaking(&_Staking.TransactOpts, _nodeAddr, _newTokenAmount, _newDropburnAmount, _newCut, _newDesc, _newLogoUrl)\n}", "title": "" }, { "docid": "0e93c1c372317052556f0ae71276e2e8", "score": "0.4939845", "text": "func (n *Node) Info() (Info, error) {\n\tnodes := n.nodes.list()\n\tnodeResults := make([]NodeInfo, len(nodes))\n\tfor i, nd := range nodes {\n\t\tinfo := NodeInfo{\n\t\t\tUID: nd.UID,\n\t\t\tName: nd.Name,\n\t\t\tVersion: nd.Version,\n\t\t\tNumClients: nd.NumClients,\n\t\t\tNumUsers: nd.NumUsers,\n\t\t\tNumChannels: nd.NumChannels,\n\t\t\tUptime: nd.Uptime,\n\t\t}\n\t\tif nd.Metrics != nil {\n\t\t\tinfo.Metrics = &Metrics{\n\t\t\t\tInterval: nd.Metrics.Interval,\n\t\t\t\tItems: nd.Metrics.Items,\n\t\t\t}\n\t\t}\n\t\tnodeResults[i] = info\n\t}\n\n\treturn Info{\n\t\tNodes: nodeResults,\n\t}, nil\n}", "title": "" }, { "docid": "2d1d2264dc9379c6e038ee54209032d0", "score": "0.4936197", "text": "func (_Staking *StakingTransactor) UpdateNodeStaking(opts *bind.TransactOpts, _nodeAddr common.Address, _newTokenAmount *big.Int, _newDropburnAmount *big.Int, _newCut *big.Int, _newDesc string, _newLogoUrl string) (*types.Transaction, error) {\n\treturn _Staking.contract.Transact(opts, \"updateNodeStaking\", _nodeAddr, _newTokenAmount, _newDropburnAmount, _newCut, _newDesc, _newLogoUrl)\n}", "title": "" }, { "docid": "770818015ea174c5605ef2b1b14dae56", "score": "0.4928656", "text": "func (c *ciliumNodes) Update(ctx context.Context, ciliumNode *v2.CiliumNode, opts v1.UpdateOptions) (result *v2.CiliumNode, err error) {\n\tresult = &v2.CiliumNode{}\n\terr = c.client.Put().\n\t\tResource(\"ciliumnodes\").\n\t\tName(ciliumNode.Name).\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\t\tBody(ciliumNode).\n\t\tDo(ctx).\n\t\tInto(result)\n\treturn\n}", "title": "" }, { "docid": "fc31188fd1f0354996d7019a2e524eee", "score": "0.49284664", "text": "func (this *RpcServer) GetNodeInfo(req *action.RpcBase, resp *action.RpcBase) error {\n\tresp.NodeInfo = this.node.GetNodeInfo()\n\tresp.Result = true\n\treturn nil\n}", "title": "" }, { "docid": "b519992fff216bfc12bc5c9d3e9530d9", "score": "0.48803693", "text": "func (c *AnalyticsNode) Update(contrailClient contrailclient.ApiClient) error {\n\tanalyticsInfoLog.Printf(\"Updating %s %s\\n\", c.Hostname, nodeType)\n\tobj, err := contrailclient.GetContrailObjectByName(contrailClient, string(nodeType), c.Hostname)\n\tif err != nil {\n\t\treturn err\n\t}\n\ttypedNode := obj.(*contrailtypes.AnalyticsNode)\n\ttypedNode.SetFQName(\"\", []string{\"default-global-system-config\", c.Hostname})\n\ttypedNode.SetAnalyticsNodeIpAddress(c.IPAddress)\n\tannotations := contrailclient.ConvertMapToContrailKeyValuePairs(c.Annotations)\n\ttypedNode.SetAnnotations(&annotations)\n\treturn contrailClient.Update(typedNode)\n}", "title": "" }, { "docid": "3b021d2476f1cf8b25601e5aaca1f9ca", "score": "0.4854968", "text": "func (csr *ClusterStateRegistry) UpdateNodes(nodes []*apiv1.Node, nodeInfosForGroups map[string]*schedulerframework.NodeInfo, currentTime time.Time) error {\n\tcsr.updateNodeGroupMetrics()\n\ttargetSizes, err := getTargetSizes(csr.cloudProvider)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcloudProviderNodeInstances, err := csr.getCloudProviderNodeInstances()\n\tif err != nil {\n\t\treturn err\n\t}\n\tcloudProviderNodesRemoved := csr.getCloudProviderDeletedNodes(nodes)\n\tnotRegistered := getNotRegisteredNodes(nodes, cloudProviderNodeInstances, currentTime)\n\n\tcsr.Lock()\n\tdefer csr.Unlock()\n\n\tcsr.nodes = nodes\n\tcsr.nodeInfosForGroups = nodeInfosForGroups\n\tcsr.previousCloudProviderNodeInstances = csr.cloudProviderNodeInstances\n\tcsr.cloudProviderNodeInstances = cloudProviderNodeInstances\n\n\tcsr.updateUnregisteredNodes(notRegistered)\n\tcsr.updateCloudProviderDeletedNodes(cloudProviderNodesRemoved)\n\tcsr.updateReadinessStats(currentTime)\n\n\t// update acceptable ranges based on requests from last loop and targetSizes\n\t// updateScaleRequests relies on acceptableRanges being up to date\n\tcsr.updateAcceptableRanges(targetSizes)\n\tcsr.updateScaleRequests(currentTime)\n\tcsr.handleInstanceCreationErrors(currentTime)\n\t// recalculate acceptable ranges after removing timed out requests\n\tcsr.updateAcceptableRanges(targetSizes)\n\tcsr.updateIncorrectNodeGroupSizes(currentTime)\n\treturn nil\n}", "title": "" }, { "docid": "654e868684d56460c70c1892a884fd77", "score": "0.48351836", "text": "func (f *NodeClient) NodeGetInfo(ctx context.Context, in *csipb.NodeGetInfoRequest, opts ...grpc.CallOption) (*csipb.NodeGetInfoResponse, error) {\n\tif f.nextErr != nil {\n\t\treturn nil, f.nextErr\n\t}\n\treturn f.nodeGetInfoResp, nil\n}", "title": "" }, { "docid": "66d2d8eac1f55aadfa2f8fa02e25fee0", "score": "0.48077607", "text": "func (h *HashRing) UpdateNode(nodeKey string, weight int) {\r\n\th.mu.Lock()\r\n\tdefer h.mu.Unlock()\r\n\th.weights[nodeKey] = weight\r\n\th.generate()\r\n}", "title": "" }, { "docid": "5fd7ca756eddb0a1410f100a3fb98f8e", "score": "0.48073938", "text": "func UpdateAddNode(uri string, executorID string) error {\n\tlog.Debugln(\"UpdateAddNode ENTER\")\n\tlog.Debugln(\"URI:\", uri)\n\tlog.Debugln(\"ExecutorID:\", executorID)\n\n\turl := uri + \"/api/node/cluster\"\n\n\tstate := &types.AddNode{\n\t\tAcknowledged: false,\n\t\tExecutorID: executorID,\n\t}\n\n\tresponse, err := json.MarshalIndent(state, \"\", \" \")\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to marshall state object:\", err)\n\t\tlog.Debugln(\"UpdateAddNode LEAVE\")\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(\"POST\", url, bytes.NewBuffer(response))\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to create new HTTP request:\", err)\n\t\tlog.Debugln(\"UpdateAddNode LEAVE\")\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to make HTTP call:\", err)\n\t\tlog.Debugln(\"UpdateAddNode LEAVE\")\n\t\treturn err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tbody, err := ioutil.ReadAll(io.LimitReader(resp.Body, 1048576))\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to read the HTTP Body:\", err)\n\t\tlog.Debugln(\"UpdateAddNode LEAVE\")\n\t\treturn err\n\t}\n\n\tlog.Debugln(\"response Status:\", resp.Status)\n\tlog.Debugln(\"response Headers:\", resp.Header)\n\tlog.Debugln(\"response Body:\", string(body))\n\n\tvar newstate types.AddNode\n\terr = json.Unmarshal(body, &newstate)\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to unmarshal the UpdateState object:\", err)\n\t\tlog.Debugln(\"UpdateAddNode LEAVE\")\n\t\treturn err\n\t}\n\n\tlog.Debugln(\"Acknowledged:\", newstate.Acknowledged)\n\tlog.Debugln(\"ExecutorID:\", newstate.ExecutorID)\n\n\tif !newstate.Acknowledged {\n\t\tlog.Errorln(\"Failed to receive an acknowledgement\")\n\t\tlog.Debugln(\"UpdateAddNode LEAVE\")\n\t\treturn ErrStateChangeNotAcknowledged\n\t}\n\n\tlog.Errorln(\"UpdateAddNode Succeeded\")\n\tlog.Debugln(\"UpdateAddNode LEAVE\")\n\treturn nil\n}", "title": "" }, { "docid": "da9dd193a198d992de1b6e213d847b53", "score": "0.48054108", "text": "func (a *API) UpdateMeshNetworkNodes(nodesstr string) (result string) {\n\tdefer func() {\n\t\tlog.Trace(fmt.Sprintf(\"Api UpdateMeshNetworkNodes nodesstr=%s,out result=%v\", nodesstr, result))\n\t}()\n\tvar nodes []*network.NodeInfo\n\terr := json.Unmarshal([]byte(nodesstr), &nodes)\n\tif err != nil {\n\t\tlog.Error(err.Error())\n\t\terr = rerr.ErrArgumentError.AppendError(err)\n\t\treturn dto.NewErrorMobileResponse(err)\n\t}\n\terr = a.api.Photon.Protocol.UpdateMeshNetworkNodes(nodes)\n\tif err != nil {\n\t\tlog.Error(err.Error())\n\t\terr = rerr.ErrArgumentError.AppendError(err)\n\t\treturn dto.NewErrorMobileResponse(err)\n\t}\n\treturn dto.NewSuccessMobileResponse(nil)\n}", "title": "" }, { "docid": "5c69a027c552392f934053b06504cccd", "score": "0.48000538", "text": "func (ns *DefaultNodeServer) NodeGetInfo(ctx context.Context, req *csi.NodeGetInfoRequest) (*csi.NodeGetInfoResponse, error) {\n\tutil.TraceLog(ctx, \"Using default NodeGetInfo\")\n\n\tcsiTopology := &csi.Topology{\n\t\tSegments: ns.Driver.topology,\n\t}\n\n\treturn &csi.NodeGetInfoResponse{\n\t\tNodeId: ns.Driver.nodeID,\n\t\tAccessibleTopology: csiTopology,\n\t}, nil\n}", "title": "" }, { "docid": "27b887b079562afd817db1801f0c122b", "score": "0.47865656", "text": "func (s *SensuClient) UpdateNode(node *corev1.Node) error {\n\treturn s.ensureNode(node)\n}", "title": "" }, { "docid": "4414aaad268f1a820e52c61fbd4fd80a", "score": "0.47862893", "text": "func UpdateNodeDBInfoTask(taskID string, stepName string) error {\n\tstart := time.Now()\n\n\t// get task form database\n\ttask, err := cloudprovider.GetStorageModel().GetTask(context.Background(), taskID)\n\tif err != nil {\n\t\tblog.Errorf(\"UpdateNodeDBInfoTask[%s] task %s get detail task information from storage failed: %s, task retry\", taskID, taskID, err.Error())\n\t\treturn err\n\t}\n\n\t// task state check\n\tstate := &cloudprovider.TaskState{\n\t\tTask: task,\n\t\tJobResult: cloudprovider.NewJobSyncResult(task),\n\t}\n\t// check task already terminated\n\tif state.IsTerminated() {\n\t\tblog.Errorf(\"UpdateNodeDBInfoTask[%s] task %s is terminated, step %s skip\", taskID, taskID, stepName)\n\t\treturn fmt.Errorf(\"task %s terminated\", taskID)\n\t}\n\t// workflow switch current step to stepName when previous task exec successful\n\tstep, err := state.IsReadyToStep(stepName)\n\tif err != nil {\n\t\tblog.Errorf(\"UpdateNodeDBInfoTask[%s] task %s not turn ro run step %s, err %s\", taskID, taskID, stepName, err.Error())\n\t\treturn err\n\t}\n\t// previous step successful when retry task\n\tif step == nil {\n\t\tblog.Infof(\"UpdateNodeDBInfoTask[%s]: current step[%s] successful and skip\", taskID, stepName)\n\t\treturn nil\n\t}\n\n\tblog.Infof(\"UpdateNodeDBInfoTask[%s] task %s run current step %s, system: %s, old state: %s, params %v\",\n\t\ttaskID, taskID, stepName, step.System, step.Status, step.Params)\n\n\t// extract valid info\n\tpasswd := task.CommonParams[\"Passwd\"]\n\n\taddSuccessNodes := strings.Split(state.Task.CommonParams[\"addSuccessNodes\"], \",\")\n\taddFailedNodes := strings.Split(state.Task.CommonParams[\"addFailedNodes\"], \",\")\n\tfailedNodes := strings.Split(task.CommonParams[\"failedNodes\"], \",\")\n\ttimeoutNodes := strings.Split(task.CommonParams[\"timeoutNodes\"], \",\")\n\n\t// get nodes IDs and IPs\n\tipList := strings.Split(step.Params[\"NodeIPs\"], \",\")\n\tidList := strings.Split(step.Params[\"NodeIDs\"], \",\")\n\tif len(idList) != len(ipList) {\n\t\tblog.Errorf(\"UpdateNodeDBInfoTask[%s] [inner fatal] task %s step %s NodeID %d is not equal to InnerIP %d, fatal\", taskID, taskID, stepName,\n\t\t\tlen(idList), len(ipList))\n\t\t_ = state.UpdateStepFailure(start, stepName, fmt.Errorf(\"NodeID & InnerIP params err\"))\n\t\treturn fmt.Errorf(\"task %s parameter err\", taskID)\n\t}\n\tnodeIDToIPMap := make(map[string]string, 0)\n\tfor i := 0; i < len(ipList); i++ {\n\t\tnodeIDToIPMap[idList[i]] = ipList[i]\n\t}\n\n\tsuccessInstances := addSuccessNodes\n\tfailedInstances := make([]string, 0)\n\tif len(addFailedNodes) > 0 {\n\t\tfailedInstances = append(failedInstances, addFailedNodes...)\n\t}\n\tif len(failedNodes) > 0 {\n\t\tfailedInstances = append(failedInstances, failedNodes...)\n\t}\n\tif len(timeoutNodes) > 0 {\n\t\tfailedInstances = append(failedInstances, timeoutNodes...)\n\t}\n\n\tinstanceIPs := make([]string, 0)\n\tfor _, instanceID := range successInstances {\n\t\tif ip, ok := nodeIDToIPMap[instanceID]; ok {\n\t\t\tinstanceIPs = append(instanceIPs, ip)\n\t\t}\n\t}\n\n\t// update nodes status in DB\n\tfor i := range successInstances {\n\t\tnode, err := cloudprovider.GetStorageModel().GetNode(context.Background(), successInstances[i])\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tnode.Passwd = passwd\n\t\tnode.Status = common.StatusInitialization\n\t\terr = cloudprovider.GetStorageModel().UpdateNode(context.Background(), node)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t}\n\tblog.Infof(\"UpdateNodeDBInfoTask[%s] step %s successful\", taskID, stepName)\n\n\t// update failed nodes status in DB\n\tfor i := range failedInstances {\n\t\tnode, err := cloudprovider.GetStorageModel().GetNode(context.Background(), failedInstances[i])\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tnode.Passwd = passwd\n\t\tnode.Status = common.StatusAddNodesFailed\n\t\terr = cloudprovider.GetStorageModel().UpdateNode(context.Background(), node)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t}\n\n\t// save common ips\n\tif state.Task.CommonParams == nil {\n\t\ttask.CommonParams = make(map[string]string)\n\t}\n\tif len(instanceIPs) > 0 {\n\t\tstate.Task.CommonParams[\"nodeIPList\"] = strings.Join(instanceIPs, \",\")\n\t}\n\n\t// update step\n\tif err := state.UpdateStepSucc(start, stepName); err != nil {\n\t\tblog.Errorf(\"UpdateNodeDBInfoTask[%s] task %s %s update to storage fatal\", taskID, taskID, stepName)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "51067d57f825bae137dfdc1a6a351e15", "score": "0.4783513", "text": "func (p *Plugin) NodeGetInfo(\n\tctx context.Context,\n\treq *csi.NodeGetInfoRequest) (\n\t*csi.NodeGetInfoResponse, error) {\n\tglog.Info(\"start to GetNodeInfo\")\n\tdefer glog.Info(\"end to GetNodeInfo\")\n\n\t// TODO: For non-iscsi protocol, iqn should not be\n\t// used as NodeId here.\n\tiqns, _ := iscsi.GetInitiator()\n\tlocalIqn := \"\"\n\tif len(iqns) > 0 {\n\t\tlocalIqn = iqns[0]\n\t}\n\n\treturn &csi.NodeGetInfoResponse{\n\t\tNodeId: localIqn,\n\t}, nil\n}", "title": "" }, { "docid": "788a217dead12c808005a9e5d53343b0", "score": "0.47726062", "text": "func (d *Driver) NodeGetInfo(ctx context.Context, req *csi.NodeGetInfoRequest) (*csi.NodeGetInfoResponse, error) {\n\tklog.V(5).Infof(\"Using default NodeGetInfo\")\n\n\treturn &csi.NodeGetInfoResponse{\n\t\tNodeId: d.NodeID,\n\t}, nil\n}", "title": "" }, { "docid": "c62138304de1cf0dceb79671e6acf65a", "score": "0.4766582", "text": "func (h K8sHelpers) UpdateNode(c *k8sclient.Clientset, n *corev1.Node) error {\n\t// Send the updated node to the apiserver.\n\t_, err := c.CoreV1().Nodes().Update(context.Background(), n, metav1.UpdateOptions{\n\t\tTypeMeta: n.TypeMeta,\n\t})\n\tif err != nil {\n\t\tdefaultLog.Errorf(\"Error while updating node label:\", err.Error())\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "106a45b6c1978f97dd492fbb04936a61", "score": "0.47630957", "text": "func (_SimpleLoan *SimpleLoanTransactor) Update(opts *bind.TransactOpts, name [32]byte, value *big.Int, offchain [32]byte) (*types.Transaction, error) {\n\treturn _SimpleLoan.contract.Transact(opts, \"update\", name, value, offchain)\n}", "title": "" }, { "docid": "9d347e6cf4c7a6e85d6f8d5da6a82c36", "score": "0.47556", "text": "func GetNodeInfo(w http.ResponseWriter, r *http.Request) {\n\tif (*r).Method == \"OPTIONS\" {\n\t\treturn\n\t}\n\tneighbourMap := db.GetDataBase()\n\tresp := model.Response{}\n\t// set Default value\n\tresp.Default()\n\n\tNodeNameMap, GraphData, err := neighbourMap.GenarateNetworkTopology()\n\n\tif err != nil {\n\t\tlog.Println(\"ERROR: Payload Error\", err)\n\t\tresp.InternalServerError()\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t} else {\n\t\tdata := make(map[string]interface{})\n\t\tdata[\"graphData\"] = GraphData\n\t\tdata[\"nodeNames\"] = NodeNameMap\n\n\t\tresp.Code = http.StatusOK\n\t\tresp.Message = \"Updated time: \"\n\t\tresp.Data = data\n\t\tw.WriteHeader(http.StatusOK)\n\n\t}\n\tjson.NewEncoder(w).Encode(resp)\n\n}", "title": "" }, { "docid": "1e8002f42c096d76940fd6aaa0ebb9b5", "score": "0.47484186", "text": "func Update(w http.ResponseWriter, r *http.Request) {\n\tinf, userErr, sysErr, statusCode := api.NewInfo(r, []string{\"name\"}, nil)\n\tif userErr != nil || sysErr != nil {\n\t\tapi.HandleErr(w, r, inf.Tx.Tx, statusCode, userErr, sysErr)\n\t\treturn\n\t}\n\tdefer inf.Close()\n\ttx := inf.Tx.Tx\n\n\ttopologies, userErr, sysErr, statusCode, _ := readTopologies(r, false)\n\tif len(topologies) != 1 {\n\t\tapi.HandleErr(w, r, tx, http.StatusBadRequest, errors.New(\"cannot find exactly 1 topology with the query string provided\"), nil)\n\t\treturn\n\t}\n\tvar topology tc.TopologyV5\n\tif err := json.NewDecoder(r.Body).Decode(&topology); err != nil {\n\t\tapi.HandleErr(w, r, tx, http.StatusBadRequest, err, nil)\n\t\treturn\n\t}\n\n\talerts, userErr, sysErr := ValidateTopology(topology, inf)\n\tif userErr != nil || sysErr != nil {\n\t\tcode := http.StatusBadRequest\n\t\tif sysErr != nil {\n\t\t\tcode = http.StatusInternalServerError\n\t\t}\n\t\tapi.HandleErr(w, r, inf.Tx.Tx, code, userErr, sysErr)\n\t\treturn\n\t}\n\n\trequestedName := inf.Params[\"name\"]\n\t// check if the entity was already updated\n\tuserErr, sysErr, statusCode = api.CheckIfUnModifiedByName(r.Header, inf.Tx, requestedName, \"topology\")\n\tif userErr != nil || sysErr != nil {\n\t\tapi.HandleErr(w, r, tx, statusCode, userErr, sysErr)\n\t\treturn\n\t}\n\tnodes := DowngradeTopologyNodes(topology.Nodes)\n\tuserErr, sysErr, statusCode = checkIfTopologyCanBeAlteredByCurrentUser(inf, nodes)\n\tif userErr != nil || sysErr != nil {\n\t\tapi.HandleErr(w, r, tx, statusCode, userErr, sysErr)\n\t\treturn\n\t}\n\n\toldTopology := topologies[0].(tc.TopologyV5)\n\n\tif err := removeParents(tx, oldTopology.Name); err != nil {\n\t\tapi.HandleErr(w, r, tx, http.StatusInternalServerError, nil, err)\n\t\treturn\n\t}\n\n\tvar oldNodes, newNodes = map[string]int{}, map[string]int{}\n\tfor index, node := range oldTopology.Nodes {\n\t\toldNodes[node.Cachegroup] = index\n\t}\n\tfor index, node := range topology.Nodes {\n\t\tnewNodes[node.Cachegroup] = index\n\t}\n\tvar toRemove []string\n\tfor cachegroupName := range oldNodes {\n\t\tif _, exists := newNodes[cachegroupName]; !exists {\n\t\t\ttoRemove = append(toRemove, cachegroupName)\n\t\t} else {\n\t\t\ttopology.Nodes[newNodes[cachegroupName]].Id = oldTopology.Nodes[oldNodes[cachegroupName]].Id\n\t\t}\n\t}\n\n\tif len(toRemove) > 0 {\n\t\tif err := removeNodes(inf.Tx.Tx, oldTopology.Name, &toRemove); err != nil {\n\t\t\tapi.HandleErr(w, r, tx, http.StatusInternalServerError, nil, err)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif userErr, sysErr, statusCode = setTopologyDetails(tx, &topology, oldTopology.Name); userErr != nil || sysErr != nil {\n\t\tapi.HandleErr(w, r, tx, statusCode, userErr, sysErr)\n\t\treturn\n\t}\n\n\tif userErr, sysErr, statusCode = addNodes(tx, topology.Name, &topology); userErr != nil || sysErr != nil {\n\t\tapi.HandleErr(w, r, tx, statusCode, userErr, sysErr)\n\t\treturn\n\t}\n\tif userErr, sysErr, statusCode = addParents(tx, topology.Nodes); userErr != nil || sysErr != nil {\n\t\tapi.HandleErr(w, r, tx, statusCode, userErr, sysErr)\n\t\treturn\n\t}\n\n\talertsObject := tc.CreateAlerts(tc.SuccessLevel, \"topology was updated.\")\n\tif len(alerts.Alerts) != 0 {\n\t\talertsObject.AddAlerts(alerts)\n\t}\n\tapi.WriteAlertsObj(w, r, http.StatusOK, alertsObject, topology)\n\n\tchangeLogMsg := fmt.Sprintf(\"TOPOLOGY: %s, ACTION: Updated topology, keys: {name: %s }\", topology.Name, topology.Name)\n\tapi.CreateChangeLogRawTx(api.ApiChange, changeLogMsg, inf.User, tx)\n}", "title": "" }, { "docid": "a77083797f9f1732e54e7fc9447e227e", "score": "0.47451046", "text": "func (ns *node) NodeGetInfo(\n\tctx context.Context,\n\treq *csi.NodeGetInfoRequest,\n) (*csi.NodeGetInfoResponse, error) {\n\n\treturn &csi.NodeGetInfoResponse{\n\t\tNodeId: ns.driver.config.NodeID,\n\t}, nil\n}", "title": "" }, { "docid": "0980fe11fc94ba201036d16d5c19a296", "score": "0.47358647", "text": "func (h netplanHTTP) UpdateNetworkMapEndpoint(c *gin.Context) {\n\t// get the values to update with off the request\n\tvar nm s.NetworkMap\n\tif err := c.ShouldBindJSON(&nm); err != nil {\n\t\tc.JSON(http.StatusBadRequest, gin.H{\n\t\t\t\"error\": err,\n\t\t})\n\t\treturn\n\t}\n\n\tupdate, err := h.svc.UpdateNetworkMap(&nm)\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError, gin.H{\n\t\t\t\"error\": err,\n\t\t})\n\t\treturn\n\t}\n\n\t// send back updated value\n\tc.JSON(http.StatusOK, update)\n}", "title": "" }, { "docid": "e6c67682c47816265b0f06f7d8e586ea", "score": "0.47335202", "text": "func (w *Wireguard) updateCacheFromNodeUpdates() (conflictingKeys set.Set[wgtypes.Key]) {\n\tconflictingKeys = set.New[wgtypes.Key]()\n\tfor name, update := range w.nodeUpdates {\n\t\tnode := w.getOrInitNodeData(name)\n\n\t\t// This is a remote node configuration. Update the node data and the key to node mappings.\n\t\tlogCtx := w.logCtx.WithField(\"node\", name)\n\t\tlogCtx.Debug(\"Updating cache from update for peer\")\n\t\tupdated := false\n\t\tif update.endpointAddr != nil {\n\t\t\tlogCtx.WithField(\"endpointAddr\", *update.endpointAddr).Debug(\"Store IP address\")\n\t\t\tnode.endpointAddr = *update.endpointAddr\n\t\t\tupdated = true\n\t\t} else if update.deleted {\n\t\t\tlogCtx.Debug(\"Peer deleted\")\n\t\t\tnode.endpointAddr = nil\n\t\t\tupdated = true\n\t\t}\n\n\t\tif update.publicKey != nil {\n\t\t\tlogCtx.WithField(\"publicKey\", *update.publicKey).Debug(\"Store public key\")\n\t\t\tif node.publicKey != zeroKey {\n\t\t\t\t// Remove the key to node reference.\n\t\t\t\tnodenames := w.publicKeyToNodeNames[node.publicKey]\n\t\t\t\tnodenames.Discard(name)\n\t\t\t\tif nodenames.Len() == 0 {\n\t\t\t\t\t// This was the only node with its public key\n\t\t\t\t\tlogCtx.WithField(\"publicKey\", node.publicKey).Debug(\"Removed the only node claiming public key\")\n\t\t\t\t\tdelete(w.publicKeyToNodeNames, node.publicKey)\n\t\t\t\t} else {\n\t\t\t\t\t// This is or was a conflicting key. Recheck the nodes associated with this key at the end.\n\t\t\t\t\tlog.WithField(\"publicKey\", node.publicKey).Info(\"Removed node which claimed the same public key as at least one other node\")\n\t\t\t\t\tconflictingKeys.Add(node.publicKey)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Update the node public key and the key to node mapping.\n\t\t\tnode.publicKey = *update.publicKey\n\t\t\tif node.publicKey != zeroKey {\n\t\t\t\tif nodenames := w.publicKeyToNodeNames[node.publicKey]; nodenames == nil {\n\t\t\t\t\tw.logCtx.Debug(\"Public key not associated with a node\")\n\t\t\t\t\tw.publicKeyToNodeNames[node.publicKey] = set.From(name)\n\t\t\t\t} else {\n\t\t\t\t\tw.logCtx.Info(\"Public key already associated with a node\")\n\t\t\t\t\tconflictingKeys.Add(node.publicKey)\n\t\t\t\t\tnodenames.Add(name)\n\t\t\t\t}\n\t\t\t}\n\t\t\tupdated = true\n\t\t}\n\n\t\tupdate.cidrsDeleted.Iter(func(cidr ip.CIDR) error {\n\t\t\tlogCtx.WithField(\"cidr\", cidr).Debug(\"Discarding CIDR\")\n\t\t\tnode.cidrs.Discard(cidr)\n\t\t\tupdated = true\n\t\t\treturn nil\n\t\t})\n\t\tupdate.cidrsAdded.Iter(func(cidr ip.CIDR) error {\n\t\t\tlogCtx.WithField(\"cidr\", cidr).Debug(\"Adding CIDR\")\n\t\t\tnode.cidrs.Add(cidr)\n\t\t\tupdated = true\n\t\t\treturn nil\n\t\t})\n\n\t\tif updated {\n\t\t\t// Node configuration updated. Store node data.\n\t\t\tw.logCtx.Debug(\"Node updated\")\n\t\t\tw.setNode(name, node)\n\t\t} else {\n\t\t\t// No further update, delete update so it's not processed again.\n\t\t\tw.logCtx.Debug(\"No updates for the node - remove node update to remove additional processing\")\n\t\t\tdelete(w.nodeUpdates, name)\n\t\t}\n\t}\n\n\treturn conflictingKeys\n}", "title": "" }, { "docid": "1510c5ff53043c222acbd4a0448662b8", "score": "0.47289914", "text": "func (c *CachedNodeInfo) GetNodeInfo(name string) (*v1.Node, error) {\n\tnode, found := c.Session.Nodes[name]\n\tif !found {\n\t\treturn nil, errors.NewNotFound(v1.Resource(\"node\"), name)\n\t}\n\n\treturn node.Node, nil\n}", "title": "" }, { "docid": "c9c0101fc3e30054dacd5df8f2fddd4f", "score": "0.4725358", "text": "func (a *DeviceAPI) Update(ctx context.Context, req *pb.UpdateDeviceRequest) (*pb.UpdateDeviceResponse, error) {\n\tvar devEUI lorawan.EUI64\n\tif err := devEUI.UnmarshalText([]byte(req.DevEUI)); err != nil {\n\t\treturn nil, grpc.Errorf(codes.InvalidArgument, err.Error())\n\t}\n\n\tif err := a.validator.Validate(ctx,\n\t\tauth.ValidateNodeAccess(devEUI, auth.Update)); err != nil {\n\t\treturn nil, grpc.Errorf(codes.Unauthenticated, \"authentication failed: %s\", err)\n\t}\n\n\td, err := storage.GetDevice(config.C.PostgreSQL.DB, devEUI)\n\tif err != nil {\n\t\treturn nil, errToRPCError(err)\n\t}\n\n\td.DeviceProfileID = req.DeviceProfileID\n\td.Name = req.Name\n\td.Description = req.Description\n\n\t// as this also performs a remote call to update the node on the\n\t// network-server, wrap it in a transaction\n\terr = storage.Transaction(config.C.PostgreSQL.DB, func(tx sqlx.Ext) error {\n\t\treturn storage.UpdateDevice(tx, &d)\n\t})\n\tif err != nil {\n\t\treturn nil, errToRPCError(err)\n\t}\n\n\treturn &pb.UpdateDeviceResponse{}, nil\n}", "title": "" }, { "docid": "8078ca8b8efaf46463c400a0a2e7cb2b", "score": "0.47162253", "text": "func (w *Wireguard) EndpointUpdate(name string, ipAddr ip.Addr) {\n\tlogCtx := w.logCtx.WithFields(log.Fields{\"name\": name, \"ipAddr\": ipAddr})\n\tlogCtx.Debug(\"EndpointUpdate\")\n\tif !w.Enabled() {\n\t\tlogCtx.Debug(\"Not enabled - ignoring\")\n\t\treturn\n\t} else if name == w.hostname {\n\t\t// This is the IP of the local host.\n\t\tw.ourHostAddr = ipAddr\n\t\tlogCtx.Debug(\"Storing local host IP\")\n\n\t\t// Host encryption is enabled *and* there is no interface IP specified set the interface IP to be the same as\n\t\t// the node IP. An update from EndpointWireguardUpdate may overwrite this.\n\t\tif w.config.EncryptHostTraffic && w.ourInterfaceAddr == nil {\n\t\t\tlogCtx.Debug(\"Use node IP as wireguard device IP for host encryption when no tunnel address specified\")\n\t\t\tw.ourInterfaceAddr = ipAddr\n\t\t\tw.inSyncInterfaceAddr = false\n\t\t}\n\n\t\t// We don't treat this as a peer update, so nothing else to do here.\n\t\treturn\n\t}\n\n\tupdate := w.getOrInitNodeUpdateData(name)\n\tif existing, ok := w.nodes[name]; ok && existing.endpointAddr == ipAddr {\n\t\tlogCtx.Debug(\"Update contains unchanged IP address\")\n\t\tupdate.endpointAddr = nil\n\t} else {\n\t\tlogCtx.Debug(\"Update contains new IP address\")\n\t\tupdate.endpointAddr = &ipAddr\n\t}\n\tupdate.deleted = false\n\tw.setNodeUpdate(name, update)\n}", "title": "" }, { "docid": "41daddd341c2181dfad3e7612a3f06b6", "score": "0.47102797", "text": "func UpdateNetworkIpam(\n\tctx context.Context,\n\ttx *sql.Tx,\n\trequest *models.UpdateNetworkIpamRequest,\n) error {\n\t//TODO\n\treturn nil\n}", "title": "" }, { "docid": "15697e4055b9d4221ac17359d172646f", "score": "0.47101182", "text": "func (a Core_v1Api) ProxyPUTNode(name string) (*string, *APIResponse, error) {\n\n\tvar localVarHttpMethod = strings.ToUpper(\"Put\")\n\t// create path and map variables\n\tlocalVarPath := a.Configuration.BasePath + \"/api/v1/proxy/nodes/{name}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := make(map[string]string)\n\tvar localVarPostBody interface{}\n\tvar localVarFileName string\n\tvar localVarFileBytes []byte\n\t// authentication '(BearerToken)' required\n\t// set key with prefix in header\n\tlocalVarHeaderParams[\"authorization\"] = a.Configuration.GetAPIKeyWithPrefix(\"authorization\")\n\t// add default headers if any\n\tfor key := range a.Configuration.DefaultHeader {\n\t\tlocalVarHeaderParams[key] = a.Configuration.DefaultHeader[key]\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ \"*/*\", }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"*/*\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tvar successPayload = new(string)\n\tlocalVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\n\tvar localVarURL, _ = url.Parse(localVarPath)\n\tlocalVarURL.RawQuery = localVarQueryParams.Encode()\n\tvar localVarAPIResponse = &APIResponse{Operation: \"ProxyPUTNode\", Method: localVarHttpMethod, RequestURL: localVarURL.String()}\n\tif localVarHttpResponse != nil {\n\t\tlocalVarAPIResponse.Response = localVarHttpResponse.RawResponse\n\t\tlocalVarAPIResponse.Payload = localVarHttpResponse.Body()\n\t}\n\n\tif err != nil {\n\t\treturn successPayload, localVarAPIResponse, err\n\t}\n\terr = json.Unmarshal(localVarHttpResponse.Body(), &successPayload)\n\treturn successPayload, localVarAPIResponse, err\n}", "title": "" }, { "docid": "b0e80ef359936e12d68db8f09302c48c", "score": "0.47098345", "text": "func (f *FakeGraphInterface) UpdateNode(in *graph.Node) error {\n\tf.UpdateNodes++\n\treturn nil\n}", "title": "" }, { "docid": "3afe60eddadc8d4866461ba95d8cf617", "score": "0.47051302", "text": "func (s *TreeServer) UpdateNode(ctx context.Context, req *tree.UpdateNodeRequest, resp *tree.UpdateNodeResponse) error {\n\n\tdefer track(\"UpdateNode\", ctx, time.Now(), req, resp)\n\n\tfrom := req.GetFrom()\n\tto := req.GetTo()\n\n\tdsNameFrom, dsPathFrom := s.treeNodeToDataSourcePath(from)\n\tdsNameTo, dsPathTo := s.treeNodeToDataSourcePath(to)\n\tif dsNameFrom == \"\" || dsNameTo == \"\" || dsPathFrom == \"\" || dsPathTo == \"\" {\n\t\treturn errors.Forbidden(common.SERVICE_TREE, \"Cannot write to root node or to datasource node\")\n\t}\n\tif dsNameFrom != dsNameTo {\n\t\treturn errors.Forbidden(common.SERVICE_TREE, \"Cannot move between two different datasources\")\n\t}\n\n\tif ds, ok := s.DataSources[dsNameTo]; ok {\n\n\t\tfrom.Path = dsPathFrom\n\t\tto.Path = dsPathTo\n\n\t\treq := &tree.UpdateNodeRequest{From: from, To: to}\n\n\t\tresponse, _ := ds.writer.UpdateNode(ctx, req)\n\n\t\tresp.Success = response.Success\n\t\tresp.Node = response.Node\n\n\t\treturn nil\n\t}\n\n\treturn errors.Forbidden(common.SERVICE_TREE, \"Unknown data source\")\n}", "title": "" }, { "docid": "c34cc9b76a11b914f84fe4a32f63a484", "score": "0.47042605", "text": "func Update(ctx UpdateContext, old, update Node) error {\n\treturn old.UpdateWith(ctx, update)\n}", "title": "" }, { "docid": "67805de585d5080e9a32bf314eb24300", "score": "0.4697824", "text": "func (n *Node) Notify(id, raftAddr string) error {\n\tresp, err := DoNotify(n.APIAddr, id, raftAddr)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif resp.StatusCode != 200 {\n\t\treturn fmt.Errorf(\"failed to notify node: %s\", resp.Status)\n\t}\n\tdefer resp.Body.Close()\n\treturn nil\n}", "title": "" }, { "docid": "1adfcc4c9bef0754e88552cd69408d51", "score": "0.4694669", "text": "func (m *MockManagerClientInterface) UpdateNode(arg0 *client.Node, arg1 interface{}) (*client.Node, error) {\n\tret := m.ctrl.Call(m, \"UpdateNode\", arg0, arg1)\n\tret0, _ := ret[0].(*client.Node)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "title": "" }, { "docid": "d55a9c126783db6b93954aa8ddcd28fc", "score": "0.46922645", "text": "func (ns *NodeSync) Update(event controller.Event, txn controller.UpdateOperations) (changeDescription string, err error) {\n\tkubeStateChange := event.(*controller.KubeStateChange)\n\n\tswitch kubeStateChange.Resource {\n\tcase nodemodel.NodeKeyword:\n\t\t// update of node management IP addresses\n\t\tvar prev *Node\n\t\tmgmtAddrs := ns.nodeMgmtAddresses(kubeStateChange.NewValue)\n\t\tnodeName := strings.TrimPrefix(kubeStateChange.Key, nodemodel.KeyPrefix())\n\t\tnode, hasOtherNode := ns.nodes[nodeName]\n\t\tif !hasOtherNode {\n\t\t\tif len(mgmtAddrs) == 0 {\n\t\t\t\t// no data for the node\n\t\t\t\tbreak\n\t\t\t}\n\t\t\t// ID not yet known (update may come later)\n\t\t\tnode = &Node{Name: nodeName}\n\t\t\tns.nodes[nodeName] = node\n\t\t} else {\n\t\t\tprevCopy := *node\n\t\t\tprev = &prevCopy\n\t\t\tif node.ID == 0 && len(mgmtAddrs) == 0 {\n\t\t\t\t// node left the cluster\n\t\t\t\tnode = nil\n\t\t\t\tdelete(ns.nodes, nodeName)\n\t\t\t}\n\t\t}\n\t\tif node != nil {\n\t\t\tnode.MgmtIPAddresses = mgmtAddrs\n\t\t\tnode.PodCIDR = ns.nodePodCIDR(kubeStateChange.NewValue)\n\t\t\tif node.ID != 0 {\n\t\t\t\tns.EventLoop.PushEvent(&NodeUpdate{\n\t\t\t\t\tNodeName: nodeName,\n\t\t\t\t\tPrevState: prev,\n\t\t\t\t\tNewState: node,\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\n\tcase vppnode.Keyword:\n\t\t// other node update\n\t\tvar (\n\t\t\tnodeName string\n\t\t\tvppNode *vppnode.VppNode\n\t\t\tprev *Node\n\t\t)\n\t\tif kubeStateChange.NewValue != nil {\n\t\t\tvppNode = kubeStateChange.NewValue.(*vppnode.VppNode)\n\t\t\tnodeName = vppNode.Name\n\t\t} else {\n\t\t\tnodeName = kubeStateChange.PrevValue.(*vppnode.VppNode).Name\n\t\t}\n\t\tnode, hasOtherNode := ns.nodes[nodeName]\n\t\tif !hasOtherNode {\n\t\t\tif vppNode == nil {\n\t\t\t\t// no data for the node\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tnode = &Node{Name: nodeName}\n\t\t\tns.nodes[nodeName] = node\n\t\t} else {\n\t\t\tprevCopy := *node\n\t\t\tprev = &prevCopy\n\t\t\tif vppNode == nil {\n\t\t\t\t// node left the cluster\n\t\t\t\tnode = nil\n\t\t\t\tdelete(ns.nodes, nodeName)\n\t\t\t}\n\t\t}\n\t\tif node != nil {\n\t\t\tnode.ID = vppNode.Id\n\t\t\tnode.VppIPAddresses = ns.nodeVPPAddresses(vppNode)\n\t\t}\n\t\tev := &NodeUpdate{\n\t\t\tNodeName: nodeName,\n\t\t\tPrevState: prev,\n\t\t\tNewState: node,\n\t\t}\n\t\t// do not include prevState if it doesn't contain\n\t\t// allocated node.ID\n\t\tif ev.PrevState != nil && ev.PrevState.ID == 0 {\n\t\t\tev.PrevState = nil\n\t\t}\n\t\tns.EventLoop.PushEvent(ev)\n\t}\n\treturn \"\", nil\n}", "title": "" }, { "docid": "e2b10a6777d3c673ec719eb8536a9bdf", "score": "0.46839076", "text": "func (e *nodeEnqueue) Update(evt event.UpdateEvent, q workqueue.RateLimitingInterface) {}", "title": "" }, { "docid": "83d7fd20a0264325d2c59e4af7ad4956", "score": "0.468279", "text": "func (_m *MockAPIHelpers) UpdateNode(_a0 *client.Client, _a1 *api.Node) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*client.Client, *api.Node) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "title": "" }, { "docid": "440844f979c03c193ca06b4eaf7b42eb", "score": "0.46803674", "text": "func (a *InterNode) UpdateIPAddressUpstream() error {\n\tupstreamNodeIP, err := a.GetUpstreamNodeIP()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif upstreamNodeIP == nil {\n\t\treturn nil\n\t}\n\trootIdUpstream, err := a.getRootIdAtIp(*upstreamNodeIP)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdestUrl := fmt.Sprintf(\"https://%s/node/%v\", *upstreamNodeIP, *rootIdUpstream)\n\tupstream_req, err := http.NewRequest(\"GET\", destUrl, nil)\n\ta.log.Debug(\"Sending request \" + destUrl + \" : GET\")\n\tresp, err := global.Client.Do(upstream_req)\n\tif resp != nil {\n\t\tdefer resp.Body.Close()\n\t}\n\tif err != nil {\n\t\ta.log.Debug(\"Was not able to connect upstream. Node did not respond\")\n\t\treturn err\n\t}\n\tvar node database.PbNode\n\tdecoder := json.NewDecoder(resp.Body)\n\tif err = decoder.Decode(&node); err != nil {\n\t\ta.log.Debug(\"Decoder error: %s\", err.Error())\n\t\treturn err\n\t}\n\tif node.Name != global.RootNode {\n\t\treturn errors.New(\"Can only update root node upstream\")\n\t}\n\n\tvar ipConfig string\n\tfor i := range node.ConfigItems {\n\t\tconfItem := &node.ConfigItems[i]\n\t\tif confItem.Key == \"IP Address\" {\n\t\t\tupstreamInfo := strings.Split(confItem.Value, \":\")\n\t\t\tipConfig = upstreamInfo[0] + global.ApiPort\n\t\t\tbreak\n\t\t}\n\t}\n\tupdatedNode := database.PbNode{Id: *rootIdUpstream, Name: node.Name, ConfigItems: []database.ConfigItem{{Key: \"IP Address\", Value: ipConfig}}}\n\ta.log.Debug(\"GOING TO UPDPATE UPSTREAM NODE AS %v\", updatedNode)\n\tjsonStr, err := json.Marshal(updatedNode)\n\tif err != nil {\n\t\ta.log.Debug(\"UpdateNodeUpstream: Error marshaling device content to send upstream\")\n\t\treturn err\n\t}\n\treturn a.passHttpRequest(jsonStr, *upstreamNodeIP, \"/node\", \"PATCH\")\n}", "title": "" }, { "docid": "6bd8ad4886c5244c440d75b87d55e272", "score": "0.46785158", "text": "func (p *peer) NodeInfo() p2p.NodeInfo {\n\treturn p2p.DefaultNodeInfo{}\n}", "title": "" }, { "docid": "fd0026a478ccc15920286ea493e2a9b5", "score": "0.46763244", "text": "func (s *LocalNodeStore) Update(update func(*LocalNode)) {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\tupdate(&s.value)\n\n\tif s.emit != nil {\n\t\ts.emit(s.value)\n\t}\n}", "title": "" }, { "docid": "3ea00cf95e8b30827346ba7d31688a28", "score": "0.46729243", "text": "func Update() error {\n return mobile.Update()\n}", "title": "" }, { "docid": "76c0ee6708745f06fc0fcf9e27b9404e", "score": "0.46684086", "text": "func (_SimpleLoan *SimpleLoanTransactorSession) Update(name [32]byte, value *big.Int, offchain [32]byte) (*types.Transaction, error) {\n\treturn _SimpleLoan.Contract.Update(&_SimpleLoan.TransactOpts, name, value, offchain)\n}", "title": "" }, { "docid": "bebc7894573e4caf148dd3b2abbb85a8", "score": "0.46620733", "text": "func (s *service) Update(ctx context.Context, r *taskAPI.UpdateTaskRequest) (*ptypes.Empty, error) {\n\treturn empty, nil\n}", "title": "" }, { "docid": "0ebb107de0b09b917e73411683d97b8b", "score": "0.4661728", "text": "func (fn *FileNode) UpdateNode() error {\n\terr := fn.InitFileInfo()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif fn.IsDir() {\n\t\tif fn.FRoot.IsDirOpen(fn.FPath) {\n\t\t\trepo, rnode := fn.Repo()\n\t\t\tif repo != nil {\n\t\t\t\trnode.UpdateRepoFiles()\n\t\t\t}\n\t\t\tfn.UpdateDir()\n\t\t}\n\t} else {\n\t\trepo, _ := fn.Repo()\n\t\tif repo != nil {\n\t\t\tfn.Info.Vcs, _ = repo.Status(string(fn.FPath))\n\t\t}\n\t\tfn.UpdateSig()\n\t\tfn.FRoot.UpdateSig()\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "60f2ccbd4e844ccc52efe9dcf71e2d57", "score": "0.46564066", "text": "func (mem *Mempool) Update(height uint64, txs types.Txs, keyImages []*lktypes.Key) error {\n\t// First, create a lookup map of txns in new txs.\n\ttxsMap := make(map[string]struct{})\n\tfor _, tx := range txs {\n\t\ttxsMap[tx.Hash().String()] = struct{}{}\n\t}\n\n\t// Set height\n\tmem.height = height\n\tmem.notifiedTxsAvailable = false\n\n\t// Remove transactions that are already in txs.\n\tmem.filterTxs(txsMap)\n\n\tatomic.StoreInt32(&mem.rechecking, 1)\n\t// Recheck transactions that are already in goodTxs.\n\tmem.recheckTxs()\n\n\tif mem.SpecGoodTxsSize() > 0 {\n\t\tmem.recheckSpecTxs()\n\t}\n\tatomic.StoreInt32(&mem.rechecking, 0)\n\t// Gather all executable transactions and promote them\n\tmem.promoteExecutables(nil)\n\n\tif mem.GoodTxsSize() > 0 || mem.SpecGoodTxsSize() > 0 {\n\t\tmem.logger.Info(\"mem.notifyTxsAvailable start\")\n\t\tmem.notifyTxsAvailable()\n\t\tmem.logger.Info(\"mem.notifyTxsAvailable end\")\n\t}\n\tmem.metrics.Size.Set(float64(mem.GoodTxsSize()))\n\treturn nil\n}", "title": "" }, { "docid": "3c4c8a003bc70388e1da16518a01ce8f", "score": "0.46549323", "text": "func (c *buddyinfoCollector) Update(ch chan<- prometheus.Metric) error {\n\tbuddyInfo, err := c.fs.BuddyInfo()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"couldn't get buddyinfo: %w\", err)\n\t}\n\n\tlevel.Debug(c.logger).Log(\"msg\", \"Set node_buddy\", \"buddyInfo\", buddyInfo)\n\tfor _, entry := range buddyInfo {\n\t\tfor size, value := range entry.Sizes {\n\t\t\tch <- prometheus.MustNewConstMetric(\n\t\t\t\tc.desc,\n\t\t\t\tprometheus.GaugeValue, value,\n\t\t\t\tentry.Node, entry.Zone, strconv.Itoa(size),\n\t\t\t)\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "2e3ee5bd8aebbd88adda4611a2f5537b", "score": "0.46491867", "text": "func (rp *RopParser) SetNodeInfo(n *node.Node) {\n\t// set DistFromPM\n\tdistString := rp.GetValue(colDistFromPM)\n\tdist, err := strconv.ParseInt(distString, 10, 64)\n\tif err != nil {\n\t\trp.debug(fmt.Sprintf(\"Parse: could not get distance from '%s'\", distString))\n\t}\n\tn.DistFromPM = int(dist)\n\n\tn.Name = rp.GetValue(colName)\n\t// check cable In consistency\n\tcableInName := rp.GetValue(colCableIn)\n\n\t// if TronconIn already defined, check its consistency\n\tif n.TronconIn != nil && cableInName != n.TronconIn.Name {\n\t\tif strings.ReplaceAll(cableInName, \" \", \"\") != strings.ReplaceAll(n.TronconIn.Name, \" \", \"\") {\n\t\t\trp.debug(fmt.Sprintf(\"SetNodeInfo: not matching cable In name '%s' ('%s' expected) for node '%s'\", cableInName, n.TronconIn.Name, n.PtName))\n\t\t}\n\t\tcableInName = n.TronconIn.Name\n\t}\n\tif n.TronconIn == nil {\n\t\t// if TronconIn not defined, create it\n\t\ttrIn := rp.zone.Troncons[cableInName]\n\t\tif trIn == nil {\n\t\t\tif !rp.zone.CreateNodeFromRop {\n\t\t\t\trp.debug(fmt.Sprintf(\"SetNodeInfo: could not get troncon from cable '%s'\", cableInName))\n\t\t\t}\n\t\t\ttrIn = node.NewTroncon(cableInName)\n\t\t\trp.zone.Troncons.Add(trIn)\n\n\t\t\tparentNodeName := rp.GetParentPtName()\n\t\t\tparentNode, found := rp.zone.Nodes[parentNodeName]\n\t\t\tif !found {\n\t\t\t\trp.debug(fmt.Sprintf(\"SetNodeInfo: could not get parentNode '%s'\", parentNodeName))\n\t\t\t}\n\t\t\ttrIn.NodeSource = parentNode\n\t\t}\n\t\tif trIn.NodeDest == nil {\n\t\t\ttrIn.NodeDest = n\n\t\t\tn.TronconIn = trIn\n\t\t\ttrIn.NodeSource.AddChild(n)\n\t\t}\n\t\tif trIn.NodeDest != nil && trIn.NodeDest.PtName != n.PtName {\n\t\t\trp.debug(fmt.Sprintf(\"SetNodeInfo: troncon '%s' already has a destination node '%s' instead of '%s'\", cableInName, trIn.NodeDest.PtName, n.PtName))\n\t\t}\n\t}\n}", "title": "" }, { "docid": "19ea84f957b28d7112532ae7675f4c84", "score": "0.46484968", "text": "func (h *NodeAPIHandler) Update(id string, resource rest.Resource) (rest.Resource, bool, error) {\n\th.g.Lock()\n\tdefer h.g.Unlock()\n\n\t// Current node, to be updated\n\tn := h.g.GetNode(graph.Identifier(id))\n\tif n == nil {\n\t\treturn nil, false, rest.ErrNotFound\n\t}\n\n\t// Node containing the metadata updated\n\tpatchedNode := resource.(*types.Node)\n\n\t// Do not modify/replace Metadata.(TID|Name|Type), use actual node values\n\tif actualTID, _ := n.Metadata.GetFieldString(\"TID\"); actualTID != \"\" {\n\t\tpatchedNode.Metadata.SetField(\"TID\", actualTID)\n\t}\n\tactualName, _ := n.Metadata.GetFieldString(\"Name\")\n\tpatchedNode.Metadata.SetField(\"Name\", actualName)\n\tactualType, _ := n.Metadata.GetFieldString(\"Type\")\n\tpatchedNode.Metadata.SetField(\"Type\", actualType)\n\n\t// Update actual node Metadata with new patched node\n\tpreviousRevision := n.Revision\n\tif err := h.g.SetMetadata(n, patchedNode.Metadata); err != nil {\n\t\treturn nil, false, err\n\t}\n\n\treturn &types.Node{Node: n}, n.Revision != previousRevision, nil\n}", "title": "" }, { "docid": "3e494d89edcf2589f70ee8e051bf2e73", "score": "0.46352258", "text": "func (s *service) NodeGetInfo(\n\tctx context.Context,\n\treq *csi.NodeGetInfoRequest) (\n\t*csi.NodeGetInfoResponse, error) {\n\n\tnodeID, err := s.GetCSINodeID(ctx)\n\tif (err) != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &csi.NodeGetInfoResponse{NodeId: nodeID}, nil\n}", "title": "" }, { "docid": "30f577fd59abe67537663a8e76d0b348", "score": "0.46278682", "text": "func (s *service) Update(ctx context.Context, req *taskAPI.UpdateTaskRequest) (*ptypes.Empty, error) {\n\tlog.G(ctx).WithField(\"id\", req.ID).Debug(\"update\")\n\tresp, err := s.agentClient.Update(ctx, req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn resp, nil\n}", "title": "" }, { "docid": "ea266aad7e499c2a36a37651f801aad2", "score": "0.46151924", "text": "func (t PrintingEventDelegate) NotifyUpdate(peer *memberlist.Node) {\n\tlog.Println(\"Update:\", peer.Name, peer.Addr.String(), int(peer.Port))\n}", "title": "" }, { "docid": "71dad433bd77a74428a9867089ac181b", "score": "0.46145558", "text": "func (m *MemoryStore) Update(ctx context.Context, info ccontent.Info, fieldpaths ...string) (ccontent.Info, error) {\n\tnewLabels, err := m.update(info.Digest, info.Labels)\n\tif err != nil {\n\t\treturn ccontent.Info{}, nil\n\t}\n\tinfo.Labels = newLabels\n\treturn info, nil\n}", "title": "" }, { "docid": "a99564957aaf14f09af4c3e8c6be79da", "score": "0.46143305", "text": "func (mr *MockManagerClientInterfaceMockRecorder) UpdateNode(arg0, arg1 interface{}) *gomock.Call {\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpdateNode\", reflect.TypeOf((*MockManagerClientInterface)(nil).UpdateNode), arg0, arg1)\n}", "title": "" }, { "docid": "4447c2b44136507c1cfbb9a659a826ab", "score": "0.46138585", "text": "func (n *Node) UpdatedNode(obj *v2.CiliumNode) {\n\tn.k8sObj = obj\n}", "title": "" }, { "docid": "735dbe26400abf37163e2eedb0d7c579", "score": "0.46099183", "text": "func (d BlockVolumeNodeDriver) NodeGetInfo(ctx context.Context, req *csi.NodeGetInfoRequest) (*csi.NodeGetInfoResponse, error) {\n\tad, err := d.util.LookupNodeAvailableDomain(d.KubeClient, d.nodeID)\n\n\tif err != nil {\n\t\td.logger.With(zap.Error(err)).With(\"nodeId\", d.nodeID, \"availableDomain\", ad).Error(\"Available domain of node missing.\")\n\t}\n\n\td.logger.With(\"nodeId\", d.nodeID, \"availableDomain\", ad).Info(\"Available domain of node identified.\")\n\treturn &csi.NodeGetInfoResponse{\n\t\tNodeId: d.nodeID,\n\t\tMaxVolumesPerNode: maxVolumesPerNode,\n\n\t\t// make sure that the driver works on this particular AD only\n\t\tAccessibleTopology: &csi.Topology{\n\t\t\tSegments: map[string]string{\n\t\t\t\tkubeAPI.LabelZoneFailureDomain: ad,\n\t\t\t},\n\t\t},\n\t}, nil\n}", "title": "" }, { "docid": "e08ece99ca6d80b0782db2891ad5cd8a", "score": "0.46030217", "text": "func (c *esClient) GetNodeInfo() (*NodeInfoOutput, error) {\n\turl := fmt.Sprintf(\"%s://%s:%s/%s\", c.scheme, c.host, c.port, nodeInfoEndpoint)\n\n\tvar nodeInfoOutput NodeInfoOutput\n\n\terr := c.fetchJSON(url, &nodeInfoOutput)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &nodeInfoOutput, nil\n}", "title": "" }, { "docid": "c9dc50a9e5104c907e20017c460e86cb", "score": "0.45981684", "text": "func UpdateName(name string, myemail string) {\n\tTables()\n\tif session == nil {\n\t\tlog.Println(\"session not available\")\n\t}\n\tlog.Println(\"session available\")\n\terr := session.Query(\"UPDATE register SET username = ? where useremail = ?;\", name, myemail).Exec()\n\tlog.Println(\"Name updated\")\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn\n\t}\n\ttime_output := session.Query(\"SELECT * FROM register;\").Iter()\n\tfmt.Println(\"output: \", time_output)\n}", "title": "" }, { "docid": "1951573c3ba2ca4e75914eef83936c50", "score": "0.45974064", "text": "func (jbobject *StorageBlockManagerMaster) UpdateBlockInfo(a StorageBlockManagerIdInterface, b StorageBlockIdInterface, c StorageStorageLevelInterface, d int64, e int64, f int64) bool {\n\tconv_a := javabind.NewGoToJavaCallable()\n\tconv_b := javabind.NewGoToJavaCallable()\n\tconv_c := javabind.NewGoToJavaCallable()\n\tif err := conv_a.Convert(a); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := conv_b.Convert(b); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := conv_c.Convert(c); err != nil {\n\t\tpanic(err)\n\t}\n\tjret, err := jbobject.CallMethod(javabind.GetEnv(), \"updateBlockInfo\", javabind.Boolean, conv_a.Value().Cast(\"org/apache/spark/storage/BlockManagerId\"), conv_b.Value().Cast(\"org/apache/spark/storage/BlockId\"), conv_c.Value().Cast(\"org/apache/spark/storage/StorageLevel\"), d, e, f)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tconv_a.CleanUp()\n\tconv_b.CleanUp()\n\tconv_c.CleanUp()\n\treturn jret.(bool)\n}", "title": "" }, { "docid": "3eeeecc1ab3234f36339afd2594f3d16", "score": "0.45972088", "text": "func GetNodeInfo() (*NodeInfo, error) {\n\treturn nil, nil\n}", "title": "" }, { "docid": "b381156caade591ed9d9d071663638ca", "score": "0.45910132", "text": "func (c *FakeGPUNodeInfos) Update(ctx context.Context, GPUNodeInfo *nvidiav1.GPUNodeInfo, opts v1.UpdateOptions) (result *nvidiav1.GPUNodeInfo, err error) {\n\tobj, err := c.Fake.\n\t\tInvokes(testing.NewUpdateAction(GPUNodeInfosResource, c.ns, GPUNodeInfo), &nvidiav1.GPUNodeInfo{})\n\n\tif obj == nil {\n\t\treturn nil, err\n\t}\n\treturn obj.(*nvidiav1.GPUNodeInfo), err\n}", "title": "" }, { "docid": "b10b0057e7dc2875d82efc25f6d81b1b", "score": "0.45878807", "text": "func UpdateNode(n *Node) error {\n\tdb, err := sql.Open(dbDriver, dbsource)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer db.Close()\n\tstmt, err := db.Prepare(\"UPDATE node SET devid=?, platformid=?, isprovider=?, inftype=? WHERE id=?\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer stmt.Close()\n\n\tres, err := stmt.Exec(n.DevID, n.PlatformID, n.IsProvider, n.InfType, n.ID)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif i, err := res.RowsAffected(); i != 1 {\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlog.Printf(\"RowsAffected does not equals one: %v\\n\", i)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "589144fcaf73b2acb9e03fb1f86c6fa8", "score": "0.45817247", "text": "func (d *hostInfoOps) Update(\n\tctx context.Context,\n\thostname string,\n\tstate hostpb.HostState,\n\tgoalState hostpb.HostState,\n\tlabels map[string]string,\n) error {\n\tbytes, err := json.Marshal(&labels)\n\tif err != nil {\n\t\treturn err\n\t}\n\thostInfoObject := &HostInfoObject{\n\t\tHostname: base.NewOptionalString(hostname),\n\t\tState: state.String(),\n\t\tGoalState: goalState.String(),\n\t\tLabels: string(bytes),\n\t\tUpdateTime: time.Now(),\n\t}\n\tfieldsToUpdate := []string{\"State\", \"GoalState\", \"Labels\", \"UpdateTime\"}\n\tif err := d.store.oClient.Update(\n\t\tctx,\n\t\thostInfoObject,\n\t\tfieldsToUpdate...); err != nil {\n\t\td.store.metrics.OrmHostInfoMetrics.HostInfoUpdateFail.Inc(1)\n\t\treturn err\n\t}\n\td.store.metrics.OrmHostInfoMetrics.HostInfoUpdate.Inc(1)\n\treturn nil\n}", "title": "" }, { "docid": "72d5e8769f613502d7e4dae30cda0025", "score": "0.45809266", "text": "func (db *DB) UpdateStackAboutInfo(u *types.User) (*types.User, error) {\n\t// for simplicity, update the listing rather than updating single value\n\ttx, err := db.client.Begin()\n\tif err != nil {\n\t\treturn u, err\n\t}\n\n\t// create SQL statement for db update\n\tsqlStatement := `\n\t\tUPDATE \n\t\t\tcoindrop_stackoverflow\n\t\tSET \n\t\t\texchange_account_id = $1, \n\t\t\tdisplay_name = $2, \n\t\t\taccounts = $3\n\t\tWHERE\n\t\t\tuser_id = $4\n\t`\n\n\t// prepare statement\n\tstmt, err := db.client.Prepare(sqlStatement)\n\tif err != nil {\n\t\treturn u, err\n\t}\n\n\tdefer stmt.Close()\n\n\t// execute db write using unique reddit username as the identifier\n\t_, err = stmt.Exec(\n\t\tu.Social.StackOverflow.ExchangeAccountID,\n\t\tu.Social.StackOverflow.DisplayName,\n\t\tu.Social.StackOverflow.Accounts,\n\t\tu.UserID,\n\t)\n\tif err != nil {\n\t\t// rollback transaction if error thrown\n\t\ttx.Rollback()\n\t\treturn u, err\n\t}\n\n\t// commit db write\n\terr = tx.Commit()\n\tif err != nil {\n\t\t// rollback transaction if error thrown\n\t\ttx.Rollback()\n\t\treturn u, err\n\t}\n\n\treturn u, nil\n}", "title": "" }, { "docid": "8e4e21a9c733fab9e71877e9add4b289", "score": "0.45536408", "text": "func (c *Client) GetNodeInfo() (*NetInfo, error) {\n\turl := \"/net-info\"\n\tres := &NetInfo{}\n\treturn res, c.Request(url, nil, res)\n}", "title": "" }, { "docid": "f3feb49742ceadcbe939c288d52ad1d2", "score": "0.45369267", "text": "func (n *Nodes) Update(k string, v Node) {\n\tn.mapping.Store(k, v)\n}", "title": "" }, { "docid": "35d5b0dec71399ff445a0c3374593291", "score": "0.45355052", "text": "func (self *SimNodes) NodesInfo() *p2p.NodesInfo {\n\tserver := self.Server()\n\tif server == nil {\n\t\treturn &p2p.NodesInfo{\n\t\t\tID: self.ID.String(),\n\t\t\tENodes: self.Nodes().String(),\n\t\t}\n\t}\n\treturn server.NodesInfo()\n}", "title": "" }, { "docid": "e3876d2b51b1f2052c6be0ef0bb7ebb7", "score": "0.4522384", "text": "func UpdateNode(contextName string, node *corev1.Node) (*corev1.Node, error) {\n\n\tclientset, err := context.GetClientset(contextName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn connector.UpdateNode(clientset, node)\n}", "title": "" }, { "docid": "0b08b1d8286478f9ce57833dc8ac668f", "score": "0.45221493", "text": "func (_SimpleLoan *SimpleLoanSession) Update(name [32]byte, value *big.Int, offchain [32]byte) (*types.Transaction, error) {\n\treturn _SimpleLoan.Contract.Update(&_SimpleLoan.TransactOpts, name, value, offchain)\n}", "title": "" }, { "docid": "d0af23dcce2ae44be499e6da8bec590e", "score": "0.45199674", "text": "func UpdateUserInfo(userTableName string, userInfo UserInfo) (UserInfo, error) {\n\n\tregion := \"us-east-2\"\n\tawsSession, _ := session.NewSession(&aws.Config{\n\t\tRegion: aws.String(region)},\n\t)\n\n\tdynaClient := dynamodb.New(awsSession)\n\n\t//UserInfoUpdate model\n\ttype UserInfoUpdate struct {\n\t\tFirstName string `json:\":firstName,omitempty\"`\n\t\tLastName string `json:\":lastName,omitempty\"`\n\t}\n\n\t//RoleInfoKey model\n\ttype RoleInfoKey struct {\n\t\tRoleName string `json:\"roleName\"`\n\t}\n\n\t//UserInfoItemKey model\n\ttype UserInfoKey struct {\n\t\tUserID string `json:\"userId\"`\n\t}\n\n\tupdateUserInfo := UserInfoUpdate{\n\t\tFirstName: userInfo.FirstName,\n\t\tLastName: userInfo.LastName,\n\t}\n\n\tav, KeyErr := dynamodbattribute.MarshalMap(UserInfoKey{UserID: userInfo.UserId})\n\tif KeyErr != nil {\n\t\terrorString := \"FailedTableLookupError\" + \"[\" + KeyErr.Error() + \"]\"\n\t\tfmt.Println(errorString)\n\t\treturn userInfo, errors.New(errorString)\n\t}\n\n\tupdateDetails, errUpdateDetails := dynamodbattribute.MarshalMap(updateUserInfo)\n\tif errUpdateDetails != nil {\n\t\terrorString := \"FailedToCreateUpdateDetails\" + \"[\" + errUpdateDetails.Error() + \"]\"\n\t\tfmt.Println(errorString)\n\t\treturn userInfo, errors.New(errorString)\n\t}\n\n\tinput := &dynamodb.UpdateItemInput{\n\t\tKey: av,\n\t\tTableName: aws.String(userTableName),\n\t\t// ExpressionAttributeNames: map[string]*string{\"#role\": aws.String(\"role\")},\n\t\tUpdateExpression: aws.String(\"set firstName = :firstName, lastName = :lastName\"),\n\t\tExpressionAttributeValues: updateDetails,\n\t}\n\n\t_, errUpdateItem := dynaClient.UpdateItem(input)\n\tif errUpdateItem != nil {\n\t\terrorString := \"UpdateItemError\" + \"[\" + errUpdateItem.Error() + \"]\"\n\t\tfmt.Println(errorString)\n\t\treturn userInfo, errors.New(errorString)\n\t}\n\n\tfmt.Println(\"User : \" + userInfo.UserId + \" Details Updated Successfully\")\n\treturn userInfo, nil\n}", "title": "" }, { "docid": "a9f5ac5fb4ea4c686e85fc4d4b1c4c8b", "score": "0.45073253", "text": "func UpdateNodeState(uri string, executorID string, nodeState int) error {\n\tlog.Debugln(\"NotifyNodeState ENTER\")\n\tlog.Debugln(\"URI:\", uri)\n\tlog.Debugln(\"ExecutorID:\", executorID)\n\tlog.Debugln(\"State:\", nodeState)\n\n\turl := uri + \"/api/node/state\"\n\n\tstate := &types.UpdateNode{\n\t\tAcknowledged: false,\n\t\tExecutorID: executorID,\n\t\tState: nodeState,\n\t}\n\n\tresponse, err := json.MarshalIndent(state, \"\", \" \")\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to marshall state object:\", err)\n\t\tlog.Debugln(\"NotifyNodeState LEAVE\")\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(\"POST\", url, bytes.NewBuffer(response))\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to create new HTTP request:\", err)\n\t\tlog.Debugln(\"NotifyNodeState LEAVE\")\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to make HTTP call:\", err)\n\t\tlog.Debugln(\"NotifyNodeState LEAVE\")\n\t\treturn err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tbody, err := ioutil.ReadAll(io.LimitReader(resp.Body, 1048576))\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to read the HTTP Body:\", err)\n\t\tlog.Debugln(\"NotifyNodeState LEAVE\")\n\t\treturn err\n\t}\n\n\tlog.Debugln(\"response Status:\", resp.Status)\n\tlog.Debugln(\"response Headers:\", resp.Header)\n\tlog.Debugln(\"response Body:\", string(body))\n\n\tvar newstate types.UpdateNode\n\terr = json.Unmarshal(body, &newstate)\n\tif err != nil {\n\t\tlog.Errorln(\"Failed to unmarshal the UpdateState object:\", err)\n\t\tlog.Debugln(\"NotifyNodeState LEAVE\")\n\t\treturn err\n\t}\n\n\tlog.Debugln(\"Acknowledged:\", newstate.Acknowledged)\n\tlog.Debugln(\"ExecutorID:\", newstate.ExecutorID)\n\tlog.Debugln(\"State:\", newstate.State)\n\n\tif !newstate.Acknowledged {\n\t\tlog.Errorln(\"Failed to receive an acknowledgement\")\n\t\tlog.Debugln(\"NotifyNodeState LEAVE\")\n\t\treturn ErrStateChangeNotAcknowledged\n\t}\n\n\tlog.Errorln(\"NotifyNodeState Succeeded\")\n\tlog.Debugln(\"NotifyNodeState LEAVE\")\n\treturn nil\n}", "title": "" }, { "docid": "58579ec86cc94baf5ec25a8b79b53b42", "score": "0.44985327", "text": "func (api *networkAPI) Update(obj *network.Network) error {\n\tif api.ct.resolver != nil {\n\t\tapicl, err := api.ct.apiClient()\n\t\tif err != nil {\n\t\t\tapi.ct.logger.Errorf(\"Error creating API server clent. Err: %v\", err)\n\t\t\treturn err\n\t\t}\n\n\t\t_, err = apicl.NetworkV1().Network().Update(context.Background(), obj)\n\t\treturn err\n\t}\n\n\tapi.ct.handleNetworkEvent(&kvstore.WatchEvent{Object: obj, Type: kvstore.Updated})\n\treturn nil\n}", "title": "" }, { "docid": "3a1cc76df9d755593b11940405bdc806", "score": "0.44865373", "text": "func (a Core_v1Api) ProxyPATCHNode(name string) (*string, *APIResponse, error) {\n\n\tvar localVarHttpMethod = strings.ToUpper(\"Patch\")\n\t// create path and map variables\n\tlocalVarPath := a.Configuration.BasePath + \"/api/v1/proxy/nodes/{name}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := make(map[string]string)\n\tvar localVarPostBody interface{}\n\tvar localVarFileName string\n\tvar localVarFileBytes []byte\n\t// authentication '(BearerToken)' required\n\t// set key with prefix in header\n\tlocalVarHeaderParams[\"authorization\"] = a.Configuration.GetAPIKeyWithPrefix(\"authorization\")\n\t// add default headers if any\n\tfor key := range a.Configuration.DefaultHeader {\n\t\tlocalVarHeaderParams[key] = a.Configuration.DefaultHeader[key]\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ \"*/*\", }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"*/*\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tvar successPayload = new(string)\n\tlocalVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\n\tvar localVarURL, _ = url.Parse(localVarPath)\n\tlocalVarURL.RawQuery = localVarQueryParams.Encode()\n\tvar localVarAPIResponse = &APIResponse{Operation: \"ProxyPATCHNode\", Method: localVarHttpMethod, RequestURL: localVarURL.String()}\n\tif localVarHttpResponse != nil {\n\t\tlocalVarAPIResponse.Response = localVarHttpResponse.RawResponse\n\t\tlocalVarAPIResponse.Payload = localVarHttpResponse.Body()\n\t}\n\n\tif err != nil {\n\t\treturn successPayload, localVarAPIResponse, err\n\t}\n\terr = json.Unmarshal(localVarHttpResponse.Body(), &successPayload)\n\treturn successPayload, localVarAPIResponse, err\n}", "title": "" }, { "docid": "8acce9683eb9abb50949922ae4fca9cf", "score": "0.44827178", "text": "func NewNodeInfo() (*NodeInfo, error) {\n\tku, err := kubelet.GetKubeUtil()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnodeInfo := &NodeInfo{\n\t\tclient: ku,\n\t\tgetClusterAgentFunc: clusteragent.GetClusterAgentClient,\n\t\tapiserverNodeLabelsFunc: apiserverNodeLabels,\n\t}\n\n\treturn nodeInfo, nil\n}", "title": "" } ]
c265b3efd99d054e3c967a3175506b1d
safeArrayDestroyData destroys SafeArray object. AKA: SafeArrayDestroyData in Windows API.
[ { "docid": "9f9d0a28a3c832e712f9a2c8607d2ebf", "score": "0.87788516", "text": "func safeArrayDestroyData(safearray *SafeArray) (err error) {\n\terr = convertHresultToError(procSafeArrayDestroyData.Call(uintptr(unsafe.Pointer(safearray))))\n\treturn\n}", "title": "" } ]
[ { "docid": "96fba861f5b8a47750109e3ee3e1c3e8", "score": "0.79386324", "text": "func safeArrayDestroy(safearray *SafeArray) (err error) {\n\terr = convertHresultToError(procSafeArrayDestroy.Call(uintptr(unsafe.Pointer(safearray))))\n\treturn\n}", "title": "" }, { "docid": "9c26ee68f10fa2fdd260ec5680acdbf1", "score": "0.7227296", "text": "func safeArrayDestroyDescriptor(safearray *SafeArray) (err error) {\n\terr = convertHresultToError(procSafeArrayDestroyDescriptor.Call(uintptr(unsafe.Pointer(safearray))))\n\treturn\n}", "title": "" }, { "docid": "cfdf309b20691e9f77817896da79fa98", "score": "0.69392604", "text": "func (sa *Array) DestroyData() error {\n\treturn DestroyData(sa.Array)\n}", "title": "" }, { "docid": "5bc6e1542e777f76fcca509e22ec9925", "score": "0.6898778", "text": "func DestroyData(array *types.COMArray) error {\n\treturn api.DestroyData(array)\n}", "title": "" }, { "docid": "edbfb92d46398b1cbf46097778cd71ff", "score": "0.68955034", "text": "func safeArrayUnaccessData(safearray *SafeArray) (err error) {\n\terr = convertHresultToError(procSafeArrayUnaccessData.Call(uintptr(unsafe.Pointer(safearray))))\n\treturn\n}", "title": "" }, { "docid": "2fb2518dc902da5427605f983969443b", "score": "0.6417736", "text": "func ArrayDestroy(pSelf []Array) {\n\tcpSelf, _ := unpackArgSArray(pSelf)\n\tC.godot_array_destroy(cpSelf)\n\tpackSArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "a277e87492df7a1255587e2886b2426b", "score": "0.6387943", "text": "func safeArrayAllocData(safearray *SafeArray) (err error) {\n\terr = convertHresultToError(procSafeArrayAllocData.Call(uintptr(unsafe.Pointer(safearray))))\n\treturn\n}", "title": "" }, { "docid": "0d86e62b6d6e40724badc66cf81bb89e", "score": "0.6246774", "text": "func safeArrayUnlock(safearray *SafeArray) (err error) {\n\terr = convertHresultToError(procSafeArrayUnlock.Call(uintptr(unsafe.Pointer(safearray))))\n\treturn\n}", "title": "" }, { "docid": "8a334ab596937a9fd87e6b0ba0f4d8e1", "score": "0.6242501", "text": "func PoolRealArrayDestroy(pSelf []PoolRealArray) {\n\tcpSelf, _ := unpackArgSPoolRealArray(pSelf)\n\tC.godot_pool_real_array_destroy(cpSelf)\n\tpackSPoolRealArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "fe7094715fde65984866371223e87b8e", "score": "0.60906893", "text": "func PoolRealArrayWriteAccessDestroy(pWrite []PoolRealArrayWriteAccess) {\n\tcpWrite, _ := unpackArgSPoolRealArrayWriteAccess(pWrite)\n\tC.godot_pool_real_array_write_access_destroy(cpWrite)\n\tpackSPoolRealArrayWriteAccess(pWrite, cpWrite)\n}", "title": "" }, { "docid": "8c5c318de16c31d08439eaf125f7b136", "score": "0.6032006", "text": "func safeArrayAccessData(safearray *SafeArray) (element uintptr, err error) {\n\terr = convertHresultToError(\n\t\tprocSafeArrayAccessData.Call(\n\t\t\tuintptr(unsafe.Pointer(safearray)),\n\t\t\tuintptr(unsafe.Pointer(&element))))\n\treturn\n}", "title": "" }, { "docid": "625e85ab57ceaf89d2c74f2c262ea3e4", "score": "0.59698236", "text": "func PoolByteArrayWriteAccessDestroy(pWrite []PoolByteArrayWriteAccess) {\n\tcpWrite, _ := unpackArgSPoolByteArrayWriteAccess(pWrite)\n\tC.godot_pool_byte_array_write_access_destroy(cpWrite)\n\tpackSPoolByteArrayWriteAccess(pWrite, cpWrite)\n}", "title": "" }, { "docid": "bebba53e53aae2e3bb7acd77c70cb917", "score": "0.59608823", "text": "func PoolVector2ArrayDestroy(pSelf []PoolVector2Array) {\n\tcpSelf, _ := unpackArgSPoolVector2Array(pSelf)\n\tC.godot_pool_vector2_array_destroy(cpSelf)\n\tpackSPoolVector2Array(pSelf, cpSelf)\n}", "title": "" }, { "docid": "fb0da238cb311e21960ed3aa8d719a13", "score": "0.5960104", "text": "func PoolVector3ArrayDestroy(pSelf []PoolVector3Array) {\n\tcpSelf, _ := unpackArgSPoolVector3Array(pSelf)\n\tC.godot_pool_vector3_array_destroy(cpSelf)\n\tpackSPoolVector3Array(pSelf, cpSelf)\n}", "title": "" }, { "docid": "7b3ebb2e6678e319a60fa328e0308cd5", "score": "0.5913242", "text": "func PoolRealArrayReadAccessDestroy(pRead []PoolRealArrayReadAccess) {\n\tcpRead, _ := unpackArgSPoolRealArrayReadAccess(pRead)\n\tC.godot_pool_real_array_read_access_destroy(cpRead)\n\tpackSPoolRealArrayReadAccess(pRead, cpRead)\n}", "title": "" }, { "docid": "c0760acd3d7fcd5a28e46db9ba9d3046", "score": "0.58859646", "text": "func PoolVector3ArrayWriteAccessDestroy(pWrite []PoolVector3ArrayWriteAccess) {\n\tcpWrite, _ := unpackArgSPoolVector3ArrayWriteAccess(pWrite)\n\tC.godot_pool_vector3_array_write_access_destroy(cpWrite)\n\tpackSPoolVector3ArrayWriteAccess(pWrite, cpWrite)\n}", "title": "" }, { "docid": "5f5735d39037a9ebd2fa1ce43ffee3c7", "score": "0.58536446", "text": "func PoolVector2ArrayWriteAccessDestroy(pWrite []PoolVector2ArrayWriteAccess) {\n\tcpWrite, _ := unpackArgSPoolVector2ArrayWriteAccess(pWrite)\n\tC.godot_pool_vector2_array_write_access_destroy(cpWrite)\n\tpackSPoolVector2ArrayWriteAccess(pWrite, cpWrite)\n}", "title": "" }, { "docid": "df44742a547b201d9e2b50fe87f22fb3", "score": "0.58183366", "text": "func PoolByteArrayReadAccessDestroy(pRead []PoolByteArrayReadAccess) {\n\tcpRead, _ := unpackArgSPoolByteArrayReadAccess(pRead)\n\tC.godot_pool_byte_array_read_access_destroy(cpRead)\n\tpackSPoolByteArrayReadAccess(pRead, cpRead)\n}", "title": "" }, { "docid": "1bcef6a7759cff35e383ab6a2ac5b83d", "score": "0.5756737", "text": "func PoolVector2ArrayReadAccessDestroy(pRead []PoolVector2ArrayReadAccess) {\n\tcpRead, _ := unpackArgSPoolVector2ArrayReadAccess(pRead)\n\tC.godot_pool_vector2_array_read_access_destroy(cpRead)\n\tpackSPoolVector2ArrayReadAccess(pRead, cpRead)\n}", "title": "" }, { "docid": "d92c1d5e2798ccb6a3871eb803d5b065", "score": "0.57253563", "text": "func PoolColorArrayWriteAccessDestroy(pWrite []PoolColorArrayWriteAccess) {\n\tcpWrite, _ := unpackArgSPoolColorArrayWriteAccess(pWrite)\n\tC.godot_pool_color_array_write_access_destroy(cpWrite)\n\tpackSPoolColorArrayWriteAccess(pWrite, cpWrite)\n}", "title": "" }, { "docid": "49881f19dfbc42cabfed18d718daa5ed", "score": "0.57155734", "text": "func PoolVector3ArrayReadAccessDestroy(pRead []PoolVector3ArrayReadAccess) {\n\tcpRead, _ := unpackArgSPoolVector3ArrayReadAccess(pRead)\n\tC.godot_pool_vector3_array_read_access_destroy(cpRead)\n\tpackSPoolVector3ArrayReadAccess(pRead, cpRead)\n}", "title": "" }, { "docid": "a9b6cbed9d71b163696f72ac05e5101f", "score": "0.57139164", "text": "func PoolByteArrayDestroy(pSelf []PoolByteArray) {\n\tcpSelf, _ := unpackArgSPoolByteArray(pSelf)\n\tC.godot_pool_byte_array_destroy(cpSelf)\n\tpackSPoolByteArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "a3df0f638ae614a7e5d03867f63d2f8d", "score": "0.5651097", "text": "func PoolColorArrayDestroy(pSelf []PoolColorArray) {\n\tcpSelf, _ := unpackArgSPoolColorArray(pSelf)\n\tC.godot_pool_color_array_destroy(cpSelf)\n\tpackSPoolColorArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "296108d12a866cb66b8259bfe4c47c09", "score": "0.55266637", "text": "func Destroy(array *types.COMArray) error {\n\treturn api.Destroy(array)\n}", "title": "" }, { "docid": "6f4c89d3e5442f4c73f0d3b9ec604646", "score": "0.5448987", "text": "func PoolStringArrayWriteAccessDestroy(pWrite []PoolStringArrayWriteAccess) {\n\tcpWrite, _ := unpackArgSPoolStringArrayWriteAccess(pWrite)\n\tC.godot_pool_string_array_write_access_destroy(cpWrite)\n\tpackSPoolStringArrayWriteAccess(pWrite, cpWrite)\n}", "title": "" }, { "docid": "56127a5056ada3439df70b5e1ef9c5d7", "score": "0.5399522", "text": "func PoolColorArrayReadAccessDestroy(pRead []PoolColorArrayReadAccess) {\n\tcpRead, _ := unpackArgSPoolColorArrayReadAccess(pRead)\n\tC.godot_pool_color_array_read_access_destroy(cpRead)\n\tpackSPoolColorArrayReadAccess(pRead, cpRead)\n}", "title": "" }, { "docid": "8dc8c420bf419df02502d21e448d85ab", "score": "0.53897643", "text": "func safeArrayCreate(variantType VT, dimensions uint32, bounds *SafeArrayBound) (safearray *SafeArray, err error) {\n\tsa, _, err := procSafeArrayCreate.Call(\n\t\tuintptr(variantType),\n\t\tuintptr(dimensions),\n\t\tuintptr(unsafe.Pointer(bounds)))\n\tsafearray = (*SafeArray)(unsafe.Pointer(&sa))\n\treturn\n}", "title": "" }, { "docid": "18561b6a517a441b88dd36986c194d4f", "score": "0.5368051", "text": "func (sa *Array) DestroyDescriptor() error {\n\treturn DestroyDescriptor(sa.Array)\n}", "title": "" }, { "docid": "d5c0bd12949f68cc5dd4dc0f69b185e1", "score": "0.53460145", "text": "func PoolStringArrayDestroy(pSelf []PoolStringArray) {\n\tcpSelf, _ := unpackArgSPoolStringArray(pSelf)\n\tC.godot_pool_string_array_destroy(cpSelf)\n\tpackSPoolStringArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "73c49f99117611f79f1f4fd66ba7ccba", "score": "0.5330583", "text": "func UnaccessData(array *types.COMArray) error {\n\treturn api.UnaccessData(array)\n}", "title": "" }, { "docid": "41d248bbdcac353cba41e0036ace186a", "score": "0.53105396", "text": "func ReleaseCArrowArray(arr *CArrowArray) { releaseArr(arr) }", "title": "" }, { "docid": "de6354c73d2d29e4cb9c72cf1c746e6e", "score": "0.5288424", "text": "func safeArrayCopyData(original *SafeArray, duplicate *SafeArray) (err error) {\n\terr = convertHresultToError(\n\t\tprocSafeArrayCopyData.Call(\n\t\t\tuintptr(unsafe.Pointer(original)),\n\t\t\tuintptr(unsafe.Pointer(duplicate))))\n\treturn\n}", "title": "" }, { "docid": "af218c20b51ca4b58101428ccfb4c226", "score": "0.5266251", "text": "func PoolStringArrayReadAccessDestroy(pRead []PoolStringArrayReadAccess) {\n\tcpRead, _ := unpackArgSPoolStringArrayReadAccess(pRead)\n\tC.godot_pool_string_array_read_access_destroy(cpRead)\n\tpackSPoolStringArrayReadAccess(pRead, cpRead)\n}", "title": "" }, { "docid": "56d42651e0ee288ce9e193e3c91bdc25", "score": "0.5224347", "text": "func PoolIntArrayWriteAccessDestroy(pWrite []PoolIntArrayWriteAccess) {\n\tcpWrite, _ := unpackArgSPoolIntArrayWriteAccess(pWrite)\n\tC.godot_pool_int_array_write_access_destroy(cpWrite)\n\tpackSPoolIntArrayWriteAccess(pWrite, cpWrite)\n}", "title": "" }, { "docid": "7ebf6006af26a4ca80b92ada428951f6", "score": "0.5205093", "text": "func ReleaseCFArray(array CFRef) {\n\tfor i := 0; i < CFArrayGetCount(array); i++ {\n\t\tref := CFArrayGetValueAtIndex(array, i)\n\t\tCFRelease(ref)\n\t}\n\tCFRelease(array)\n}", "title": "" }, { "docid": "1e0737b5575b308cb285fff29aab9731", "score": "0.52022094", "text": "func PoolIntArrayDestroy(pSelf []PoolIntArray) {\n\tcpSelf, _ := unpackArgSPoolIntArray(pSelf)\n\tC.godot_pool_int_array_destroy(cpSelf)\n\tpackSPoolIntArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "02f612ccaa4fa0dccde4103f35594295", "score": "0.51987106", "text": "func CreateSafeArray(rawBytes []byte) (unsafe.Pointer, error) {\n\n\tsaPtr, err := CreateEmptySafeArray(0x11, len(rawBytes)) // VT_UI1\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// now we need to use RtlCopyMemory to copy our bytes to the SafeArray\n\tmodNtDll := syscall.MustLoadDLL(\"ntdll.dll\")\n\tprocRtlCopyMemory := modNtDll.MustFindProc(\"RtlCopyMemory\")\n\tsa := (*SafeArray)(saPtr)\n\t_, _, err = procRtlCopyMemory.Call(\n\t\tsa.pvData,\n\t\tuintptr(unsafe.Pointer(&rawBytes[0])),\n\t\tuintptr(len(rawBytes)))\n\tif err != syscall.Errno(0) {\n\t\treturn nil, err\n\t}\n\treturn saPtr, nil\n\n}", "title": "" }, { "docid": "8d3b90f800999cad5e844ba87b5b7fe8", "score": "0.5156193", "text": "func CreateEmptySafeArray(arrayType int, size int) (unsafe.Pointer, error) {\n\tmodOleAuto := syscall.MustLoadDLL(\"OleAut32.dll\")\n\tprocSafeArrayCreate := modOleAuto.MustFindProc(\"SafeArrayCreate\")\n\n\tsab := SafeArrayBound{\n\t\tcElements: uint32(size),\n\t\tlLbound: 0,\n\t}\n\tvt := uint16(arrayType)\n\tret, _, err := procSafeArrayCreate.Call(\n\t\tuintptr(vt),\n\t\tuintptr(1),\n\t\tuintptr(unsafe.Pointer(&sab)))\n\n\tif err != syscall.Errno(0) {\n\t\treturn nil, err\n\t}\n\n\treturn unsafe.Pointer(ret), nil\n\n}", "title": "" }, { "docid": "34a2c9fc5bdd91e129564b3b6382b20e", "score": "0.5108652", "text": "func safeArrayCreateEx(variantType VT, dimensions uint32, bounds *SafeArrayBound, extra uintptr) (safearray *SafeArray, err error) {\n\tsa, _, err := procSafeArrayCreateEx.Call(\n\t\tuintptr(variantType),\n\t\tuintptr(dimensions),\n\t\tuintptr(unsafe.Pointer(bounds)),\n\t\textra)\n\tsafearray = (*SafeArray)(unsafe.Pointer(sa))\n\treturn\n}", "title": "" }, { "docid": "957d6abd38ab6f705c38f3896f6f4fa7", "score": "0.50735587", "text": "func DestroyDescriptor(array *types.COMArray) error {\n\treturn api.DestroyDescriptor(array)\n}", "title": "" }, { "docid": "8875144673d8ce55eee550e4e4384026", "score": "0.5027459", "text": "func PoolIntArrayReadAccessDestroy(pRead []PoolIntArrayReadAccess) {\n\tcpRead, _ := unpackArgSPoolIntArrayReadAccess(pRead)\n\tC.godot_pool_int_array_read_access_destroy(cpRead)\n\tpackSPoolIntArrayReadAccess(pRead, cpRead)\n}", "title": "" }, { "docid": "b49645a7bfd13998bf95f28417012f91", "score": "0.5006285", "text": "func safeArrayLock(safearray *SafeArray) (err error) {\n\terr = convertHresultToError(procSafeArrayLock.Call(uintptr(unsafe.Pointer(safearray))))\n\treturn\n}", "title": "" }, { "docid": "eeee480b3e7ff3ba8383d624903b52e1", "score": "0.49890372", "text": "func (a *Array) Free() {\n\tif a.tiledbArray != nil {\n\t\ta.Close()\n\t\tC.tiledb_array_free(&a.tiledbArray)\n\t}\n}", "title": "" }, { "docid": "91a6cd37f9e392a200c8d42941ef62eb", "score": "0.4948225", "text": "func (sb *streamBuffer) callRemoveDataSection(index uint64) {\n\tsb.mu.Lock()\n\tdefer sb.mu.Unlock()\n\n\t// Fetch the data section.\n\tdataSection, exists := sb.dataSections[index]\n\tif !exists {\n\t\tbuild.Critical(\"remove called on data section that does not exist\")\n\t}\n\t// Decrement the refcount.\n\tdataSection.refCount--\n\t// Delete the data section if the refcount has fallen to zero.\n\tif dataSection.refCount == 0 {\n\t\tdelete(sb.dataSections, index)\n\t}\n}", "title": "" }, { "docid": "71028f05f1d817ac5a3b9a491409ca47", "score": "0.49449402", "text": "func ArrayClear(pSelf []Array) {\n\tcpSelf, _ := unpackArgSArray(pSelf)\n\tC.godot_array_clear(cpSelf)\n\tpackSArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "16997ce60b58deba4230c330f77e71d7", "score": "0.4893255", "text": "func safeArrayCopy(original *SafeArray) (safearray *SafeArray, err error) {\n\terr = convertHresultToError(\n\t\tprocSafeArrayCopy.Call(\n\t\t\tuintptr(unsafe.Pointer(original)),\n\t\t\tuintptr(unsafe.Pointer(&safearray))))\n\treturn\n}", "title": "" }, { "docid": "06c1e1ff4acc02cf2b5a5e4efc80dc47", "score": "0.48771647", "text": "func safeArrayAllocDescriptorEx(variantType VT, dimensions uint32) (safearray *SafeArray, err error) {\n\terr = convertHresultToError(\n\t\tprocSafeArrayAllocDescriptorEx.Call(\n\t\t\tuintptr(variantType),\n\t\t\tuintptr(dimensions),\n\t\t\tuintptr(unsafe.Pointer(&safearray))))\n\treturn\n}", "title": "" }, { "docid": "f9384da347238e39b0b8a3bf79c2ed5a", "score": "0.48738962", "text": "func safeArrayAllocDescriptor(dimensions uint32) (safearray *SafeArray, err error) {\n\terr = convertHresultToError(\n\t\tprocSafeArrayAllocDescriptor.Call(uintptr(dimensions), uintptr(unsafe.Pointer(&safearray))))\n\treturn\n}", "title": "" }, { "docid": "c2ea8cae98d328512eadb5eee49cd999", "score": "0.48672935", "text": "func (a *Array) Release() {\n\ta.d.Release()\n}", "title": "" }, { "docid": "08c84e9bb3d7cef0441e61db623e376f", "score": "0.48620364", "text": "func ArrayRemove(pSelf []Array, pIdx Int) {\n\tcpSelf, _ := unpackArgSArray(pSelf)\n\tcpIdx, _ := (C.godot_int)(pIdx), cgoAllocsUnknown\n\tC.godot_array_remove(cpSelf, cpIdx)\n\tpackSArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "e2c99019fa166ec132d49e1cf5fd41f1", "score": "0.48410648", "text": "func safeArrayCreateVectorEx(variantType VT, lowerBound int32, length uint32, extra uintptr) (safearray *SafeArray, err error) {\n\tsa, _, err := procSafeArrayCreateVectorEx.Call(\n\t\tuintptr(variantType),\n\t\tuintptr(lowerBound),\n\t\tuintptr(length),\n\t\textra)\n\tsafearray = (*SafeArray)(unsafe.Pointer(sa))\n\treturn\n}", "title": "" }, { "docid": "34e5c1ed194065835e253b2ac68d5c02", "score": "0.48100403", "text": "func PoolRealArrayRemove(pSelf []PoolRealArray, pIdx Int) {\n\tcpSelf, _ := unpackArgSPoolRealArray(pSelf)\n\tcpIdx, _ := (C.godot_int)(pIdx), cgoAllocsUnknown\n\tC.godot_pool_real_array_remove(cpSelf, cpIdx)\n\tpackSPoolRealArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "0a993f3d436a326fef235f592fbbc2a1", "score": "0.48031917", "text": "func safeArrayCreateVector(variantType VT, lowerBound int32, length uint32) (safearray *SafeArray, err error) {\n\tsa, _, err := procSafeArrayCreateVector.Call(\n\t\tuintptr(variantType),\n\t\tuintptr(lowerBound),\n\t\tuintptr(length))\n\tsafearray = (*SafeArray)(unsafe.Pointer(sa))\n\treturn\n}", "title": "" }, { "docid": "df5a3a16fb3169d4e7deda5cae1465bd", "score": "0.47621602", "text": "func (s *Slice) Free() {\n\tif !s.freed && s.sdata == nil {\n\t\tC.free(unsafe.Pointer(s.data))\n\t\ts.freed = true\n\t}\n}", "title": "" }, { "docid": "635bdf3803ae36526c4e1e074638cabd", "score": "0.47545677", "text": "func (j *Judy1) Free() uint64 {\n\treturn uint64(C.Judy1FreeArray(C.PPvoid_t(&j.array), nil))\n}", "title": "" }, { "docid": "99d1645bc275c0c360e9dbfaba92ca45", "score": "0.472935", "text": "func (this *ShmPool) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeShmPoolDestroy)\n\n\tfmt.Println(\"Sending ShmPool -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "a5dc2fc1d29717da32088c6c2cfdc5ac", "score": "0.47230375", "text": "func (this *DataOffer) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeDataOfferDestroy)\n\n\tfmt.Println(\"Sending DataOffer -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "d95714cb95fd8e33e7dd3c792f415a98", "score": "0.4660915", "text": "func safeArrayGetElementSize(safearray *SafeArray) (length *uint32, err error) {\n\tl, _, err := procSafeArrayGetElemsize.Call(uintptr(unsafe.Pointer(safearray)))\n\tlength = (*uint32)(unsafe.Pointer(l))\n\treturn\n}", "title": "" }, { "docid": "d159f48cca5dc60440b3b008596b4429", "score": "0.46512336", "text": "func (j *JudyL) Free() uint64 {\n\treturn uint64(C.JudyLFreeArray(C.PPvoid_t(&j.array), nil))\n}", "title": "" }, { "docid": "336065ab5138409a4f1a0f3ec8d8171b", "score": "0.46424618", "text": "func MemCleanse(v []byte) {\n\tC.OQS_MEM_cleanse(unsafe.Pointer(&v[0]), C.size_t(len(v)))\n}", "title": "" }, { "docid": "804408990196d9d937ce0debe643e78d", "score": "0.46320742", "text": "func DeleteCacheRenderViaData(iData []byte) (err error) {\n\tvar has bool\n\tvar _CacheRender = &CacheRender{Data: iData}\n\tif has, err = Engine.Get(_CacheRender); (has == true) && (err == nil) {\n\t\tif row, err := Engine.Where(\"data = ?\", iData).Delete(new(CacheRender)); (err != nil) || (row <= 0) {\n\t\t\treturn err\n\t\t} else {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn\n}", "title": "" }, { "docid": "3bfe294c967c342216d8ac8bdb78da8d", "score": "0.46313983", "text": "func (this *Subcompositor) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeSubcompositorDestroy)\n\n\tfmt.Println(\"Sending Subcompositor -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "e0a4150c50065f2d9ec6fb8283e2cf66", "score": "0.46296188", "text": "func (this *DataDevice) Release() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeDataDeviceRelease)\n\n\tfmt.Println(\"Sending DataDevice -> Release\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "633d5054ec862bd70e405f790a0b8140", "score": "0.4615772", "text": "func (this *ZxdgShellV6) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeZxdgShellV6Destroy)\n\n\tfmt.Println(\"Sending ZxdgShellV6 -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "ab1a3d8d58f60feae1857932d3bd1918", "score": "0.460601", "text": "func (this *ZxdgSurfaceV6) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeZxdgSurfaceV6Destroy)\n\n\tfmt.Println(\"Sending ZxdgSurfaceV6 -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "0e391aae4ae3d24c9847b0f231ecd88e", "score": "0.4604298", "text": "func safeArraySetRecordInfo(safearray *SafeArray, recordInfo interface{}) (err error) {\n\terr = convertHresultToError(\n\t\tprocSafeArraySetRecordInfo.Call(\n\t\t\tuintptr(unsafe.Pointer(safearray)),\n\t\t\tuintptr(unsafe.Pointer(&recordInfo))))\n\treturn\n}", "title": "" }, { "docid": "83198623c4d53b0d6dd2063477139b4d", "score": "0.4600878", "text": "func (this *Subsurface) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeSubsurfaceDestroy)\n\n\tfmt.Println(\"Sending Subsurface -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "794a99a3ba00f757cd481dd25dcc6da7", "score": "0.45886087", "text": "func typedArray(maxLen uint32, slice interface{}) js.Value {\n\tgetter := js.FuncOf(func(this js.Value, args []js.Value) interface{} {\n\t\tta := js.TypedArrayOf(slice)\n\t\tvar release js.Func\n\t\trelease = js.FuncOf(func(this js.Value, args []js.Value) interface{} {\n\t\t\tta.Release()\n\t\t\trelease.Release()\n\t\t\treturn nil\n\t\t})\n\t\treturn map[string]interface{}{\"ta\": ta, \"release\": release}\n\t})\n\treturn js.Global().Get(\"GoBridge\").Get(\"Bytes\").New(getter, maxLen)\n}", "title": "" }, { "docid": "40378a4ecd2ab58b5c1b3794e967decc", "score": "0.45459405", "text": "func ArrayErase(pSelf []Array, pValue []Variant) {\n\tcpSelf, _ := unpackArgSArray(pSelf)\n\tcpValue, _ := unpackArgSVariant(pValue)\n\tC.godot_array_erase(cpSelf, cpValue)\n\tpackSVariant(pValue, cpValue)\n\tpackSArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "172d5405ddee3c96d9cc37876760d172", "score": "0.45080656", "text": "func (this *DataSource) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeDataSourceDestroy)\n\n\tfmt.Println(\"Sending DataSource -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "efad9d36abc1de35a2eb88ec36ec173a", "score": "0.45062807", "text": "func free_cvector(v []uint8, nl int32, nh int32) {\n\t_ = ((*(*[1000000000]uint8)(unsafe.Pointer(uintptr(unsafe.Pointer(&v[0+nl:][0])) - (uintptr)(1)*unsafe.Sizeof(v[0+nl:][0]))))[:])\n\t// free an unsigned char vector allocated with cvector()\n}", "title": "" }, { "docid": "9d4cec232ea6a538fb94200438e22aa0", "score": "0.4477109", "text": "func zeroArray(a *[scalarSize]byte) {\n\tfor i := 0; i < scalarSize; i++ {\n\t\ta[i] = 0x00\n\t}\n}", "title": "" }, { "docid": "f49d9f126a7379f6bf91806a5de544da", "score": "0.4476343", "text": "func (n nativeObject) Destroy() {}", "title": "" }, { "docid": "af4773e3832aa60ca4a78472d46aebbf", "score": "0.44736412", "text": "func Unlock(array *types.COMArray) error {\n\treturn api.Unlock(array)\n}", "title": "" }, { "docid": "d6ab8671ed696b9bb1fdf640a4df6716", "score": "0.44630843", "text": "func (msg *Message) ClearData() {\n\tmsg.Arguments = msg.Arguments[len(msg.Arguments):]\n}", "title": "" }, { "docid": "74f6e9fd001e25f633becdcd8439f803", "score": "0.44602102", "text": "func (this *Surface) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeSurfaceDestroy)\n\n\tfmt.Println(\"Sending Surface -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "6356f9bcc79e00281b09f65ac9ee63a9", "score": "0.44500497", "text": "func PoolVector2ArrayRemove(pSelf []PoolVector2Array, pIdx Int) {\n\tcpSelf, _ := unpackArgSPoolVector2Array(pSelf)\n\tcpIdx, _ := (C.godot_int)(pIdx), cgoAllocsUnknown\n\tC.godot_pool_vector2_array_remove(cpSelf, cpIdx)\n\tpackSPoolVector2Array(pSelf, cpSelf)\n}", "title": "" }, { "docid": "c3188d85a0ebf3bd4524c8b2849733fe", "score": "0.441621", "text": "func (n *NoOpAuditor) CleanData() (err error) { return err }", "title": "" }, { "docid": "c3188d85a0ebf3bd4524c8b2849733fe", "score": "0.441621", "text": "func (n *NoOpAuditor) CleanData() (err error) { return err }", "title": "" }, { "docid": "2c89e1a2974be835e4cc1bc3819ebbe6", "score": "0.44113746", "text": "func AccessData(array *types.COMArray) (uintptr, error) {\n\treturn api.AccessData(array)\n}", "title": "" }, { "docid": "477fd4f6830393d831f7bc91a675dd24", "score": "0.43908596", "text": "func (tds TableDataSlice) Release() {\n\tif tds.table != nil {\n\t\ttds.table.refc--\n\t}\n}", "title": "" }, { "docid": "0a4a59fe5ca26bb663c19a2f17771dd9", "score": "0.4385086", "text": "func (this *Buffer) Destroy() error {\n\tif this == nil {\n\t\treturn errors.New(\"object is nil\")\n\t}\n\tif this.c.Err != nil {\n\t\treturn errors.Wrap(this.c.Err, \"global wayland error\")\n\t}\n\tthis.c.mu.Lock()\n\tdefer this.c.mu.Unlock()\n\t_, exists := this.c.obj[this.i]\n\tif !exists {\n\t\treturn errors.New(\"object has been deleted\")\n\t}\n\tthis.c.buf.Reset()\n\tvar tmp int32\n\t_ = tmp\n\tvar oob []byte\n\tbinary.Write(this.c.buf, hostByteOrder, this.i)\n\tbinary.Write(this.c.buf, hostByteOrder, uint32(0))\n\thostByteOrder.PutUint32(this.c.buf.Bytes()[4:8], uint32(this.c.buf.Len())<<16|opCodeBufferDestroy)\n\n\tfmt.Println(\"Sending Buffer -> Destroy\")\n\tfmt.Println(hex.Dump(this.c.buf.Bytes()))\n\tthis.c.c.WriteMsgUnix(this.c.buf.Bytes(), oob, nil)\n\treturn nil\n}", "title": "" }, { "docid": "30a57afcdd44477e55b51bc1c05dfa25", "score": "0.43754774", "text": "func PoolColorArrayRemove(pSelf []PoolColorArray, pIdx Int) {\n\tcpSelf, _ := unpackArgSPoolColorArray(pSelf)\n\tcpIdx, _ := (C.godot_int)(pIdx), cgoAllocsUnknown\n\tC.godot_pool_color_array_remove(cpSelf, cpIdx)\n\tpackSPoolColorArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "296aad46d7e320dff7880fe3aa2ae409", "score": "0.43563777", "text": "func ClearByteArray(data []byte) {\n\tif data == nil {\n\t\treturn\n\t}\n\tfor i := 0; i < len(data); i++ {\n\t\tdata[i] = 0\n\t}\n}", "title": "" }, { "docid": "863d2191fcaa69abb34529722e28c851", "score": "0.4353779", "text": "func TMQFreeJsonMeta(jsonMeta unsafe.Pointer) {\n\tC.tmq_free_json_meta((*C.char)(jsonMeta))\n}", "title": "" }, { "docid": "f926739d0ef2b451f679cb90fb81acf7", "score": "0.43535912", "text": "func PurgeData() error {\n\tsupervisorURI, err := GetSupervisorURI()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsupervisorAPIKey, err := GetSupervisorAPIKey()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tpurgeRequest := ForceRequest{Force: true}\n\n\tjsonBody, err := json.Marshal(purgeRequest)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tgo func() {\n\t\t_, err = http.Post(supervisorURI+\"/v2/applications/\"+appID+\"/purge?apikey=\"+supervisorAPIKey,\n\t\t\t\"application/json\", bytes.NewBuffer(jsonBody))\n\t}()\n\n\treturn nil\n}", "title": "" }, { "docid": "95ec54689a707268c938b9c583ceb239", "score": "0.43454626", "text": "func NewSegmentCleanupData_List(s *capnp.Segment, sz int32) (SegmentCleanupData_List, error) {\n\tl, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 0, PointerCount: 1}, sz)\n\treturn SegmentCleanupData_List{l}, err\n}", "title": "" }, { "docid": "fb5ad20b40d3bfed99d35044bcc0dae8", "score": "0.43419826", "text": "func (a *DynamicArray) Clear() {\n\tfor i := 0; i < a.size; i++ {\n\t\ta.arr[i] = nil\n\t}\n\n\ta.size = 0\n}", "title": "" }, { "docid": "e638b42138f43fb1a532bfdc4d5da713", "score": "0.43338457", "text": "func (sa *Array) Unlock() error {\n\treturn Unlock(sa.Array)\n}", "title": "" }, { "docid": "0a507dfb33497a20460fa35c24cb5282", "score": "0.43334773", "text": "func BenchmarkClearArray(b *testing.B) {\n\tb.Run(\"BenchmarkCA1\", func(b *testing.B) {\n\t\tarr := [arraySize]MyInt{}\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tfor i := range arr {\n\t\t\t\tarr[i] = 0\n\t\t\t}\n\t\t}\n\t})\n\n\tb.Run(\"BenchmarkCA2\", func(b *testing.B) {\n\t\tarr := [arraySize]MyInt{}\n\t\tb.StopTimer()\n\t\tsarr := arr[:]\n\t\tb.StartTimer()\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tfor i := range sarr {\n\t\t\t\tsarr[i] = 0\n\t\t\t}\n\t\t}\n\t})\n\n\tb.Run(\"BenchmarkCA3\", func(b *testing.B) {\n\t\tarr := [arraySize]MyInt{}\n\t\tparr := &arr\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tfor i := range parr {\n\t\t\t\tparr[i] = 0\n\t\t\t}\n\t\t}\n\t})\n}", "title": "" }, { "docid": "194971a66ad07170f0c16e924314a8b3", "score": "0.43316945", "text": "func (t TriMeshData) Destroy() {\n\tdelete(vertexListMap, int(t))\n\tdelete(indexListMap, int(t))\n\tC.dGeomTriMeshDataDestroy(t.c())\n}", "title": "" }, { "docid": "ef51e944200d8bd8b0f3e5da695373df", "score": "0.43311098", "text": "func (e *ExtensionArrayBase) Release() {\n\te.array.Release()\n\te.storage.Release()\n}", "title": "" }, { "docid": "7c3a71852d6e2aea92b1deb1c6bb6f34", "score": "0.43272173", "text": "func PoolByteArrayRemove(pSelf []PoolByteArray, pIdx Int) {\n\tcpSelf, _ := unpackArgSPoolByteArray(pSelf)\n\tcpIdx, _ := (C.godot_int)(pIdx), cgoAllocsUnknown\n\tC.godot_pool_byte_array_remove(cpSelf, cpIdx)\n\tpackSPoolByteArray(pSelf, cpSelf)\n}", "title": "" }, { "docid": "ff2d1944ef494ed48da58ebfaa9ee2ab", "score": "0.4326945", "text": "func (*Array) MarshalZerologArray(*Array) {\n}", "title": "" }, { "docid": "780a280084841727bc76a75af89b2ef9", "score": "0.43000278", "text": "func (a *DataApiService) DestroyDataExecute(r ApiDestroyDataRequest) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodDelete\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"DataApiService.DestroyData\")\n\tif err != nil {\n\t\treturn nil, GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/data/destroy\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\tif r.objects == nil {\n\t\treturn nil, reportError(\"objects is required and must be specified\")\n\t}\n\n\tlocalVarQueryParams.Add(\"objects\", parameterToString(*r.objects, \"\"))\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = _ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "title": "" }, { "docid": "7316e708ed80ff15eef52b59e6ac2823", "score": "0.42878458", "text": "func (p *Primitive) Destroy() error {\n\n\terr := p.Meta() // p is now filled out\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor i := 0; i < p.Chunks; i++ {\n\t\tdelReq := &proto.DeleteRequest{}\n\t\tdelReq.Key = proto.Key(fmt.Sprintf(\"%s%s:%10d\", pdb, p.Id, i))\n\t\tdelResp := &proto.DeleteResponse{}\n\t\terr := kvClient.Call(proto.Delete, delReq, delResp)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn p.DestroyMeta()\n}", "title": "" }, { "docid": "0e85fb916b0b9c947273093b98e6c805", "score": "0.4283422", "text": "func PoolVector3ArrayRemove(pSelf []PoolVector3Array, pIdx Int) {\n\tcpSelf, _ := unpackArgSPoolVector3Array(pSelf)\n\tcpIdx, _ := (C.godot_int)(pIdx), cgoAllocsUnknown\n\tC.godot_pool_vector3_array_remove(cpSelf, cpIdx)\n\tpackSPoolVector3Array(pSelf, cpSelf)\n}", "title": "" }, { "docid": "f8caf18922c16e6cc899b41769d5a9d1", "score": "0.4282535", "text": "func (a *DataApiService) DestroyData(ctx _context.Context) ApiDestroyDataRequest {\n\treturn ApiDestroyDataRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "title": "" }, { "docid": "0ec5bed43ec31fb71df933ad014e9bbf", "score": "0.42797863", "text": "func safeArrayGetRecordInfo(safearray *SafeArray) (recordInfo interface{}, err error) {\n\terr = convertHresultToError(\n\t\tprocSafeArrayGetRecordInfo.Call(\n\t\t\tuintptr(unsafe.Pointer(safearray)),\n\t\t\tuintptr(unsafe.Pointer(&recordInfo))))\n\treturn\n}", "title": "" }, { "docid": "a6c63f2c333896832e9fe259833a59af", "score": "0.42781335", "text": "func freeDecriptor(p unsafe.Pointer, dtype C.ub4) {\r\n\ttptr := *(*unsafe.Pointer)(p)\r\n\tC.OCIDescriptorFree(unsafe.Pointer(tptr), dtype)\r\n}", "title": "" } ]
52a01726743ee8375ce1a46f26785cb4
The Organization associated with the Audit Entry.
[ { "docid": "8a8ee21d0ca28014783a272eea4cead3", "score": "0.6993289", "text": "func (this OrgEnableTwoFactorRequirementAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" } ]
[ { "docid": "99edfd471c30c6978fc79cccd62a7400", "score": "0.8278499", "text": "func (this RepoAccessAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "bece059b5052c617c59faf167c94fe5f", "score": "0.81008947", "text": "func (this RepoArchivedAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "c58bff9238d98fb4ae945899add38da7", "score": "0.8086913", "text": "func (this RepoCreateAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "76bda632688c3129b62cdae0f8658d36", "score": "0.79992795", "text": "func (this OrgCreateAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "8a1a6fd968874d12f49310f9e705dfcb", "score": "0.7907162", "text": "func (this OrgEnableSamlAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "b4bc00efd479dc8b177520a90db16c1f", "score": "0.78673524", "text": "func (this TeamAddRepositoryAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "b1736e00e18d4b0bd6553a908450c19c", "score": "0.7821159", "text": "func (this RepoDestroyAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "c51f29c7f689fb32916bc2318fef9009", "score": "0.770792", "text": "func (this RepoAddMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "d2234707294e69bd621ca00f40473837", "score": "0.76937", "text": "func (this OrgAddBillingManagerAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "b8a9dbeaf5fb67f71315699ab0ab6263", "score": "0.76463264", "text": "func (this OrgInviteToBusinessAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "9258f991f591257a3fce374f84468d8a", "score": "0.75705355", "text": "func (this RepoAddTopicAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "d107ea74be8454dd81fff9cb93fad31d", "score": "0.75697416", "text": "func (this OrgAddMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "ee0f115ec1f921781bd6004816d8f23d", "score": "0.755626", "text": "func (this TeamAddMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "faf65fe0b3db9df3664ade601060cbbb", "score": "0.7538143", "text": "func (this OrgOauthAppAccessApprovedAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "85963c3a495adcaea99aaa3eff69d60b", "score": "0.7529373", "text": "func (this OrgDisableSamlAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "e80f4300bbfee81cf833597dd0070c93", "score": "0.7527155", "text": "func (this TeamRemoveRepositoryAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "eee44ebac17d6db1a4777367d9e91045", "score": "0.7510452", "text": "func (this OrgOauthAppAccessRequestedAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "4d846b4fb705a4452ed640e49d727484", "score": "0.7477526", "text": "func (this TeamChangeParentTeamAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "8ee526a40a8c43be3bd22576f160358b", "score": "0.74562854", "text": "func (c *CA) Organization() *organization.Organization {\n\treturn c.organization\n}", "title": "" }, { "docid": "fa0657c012b85c7936292f0ec1e86cbf", "score": "0.74527675", "text": "func (this OrgBlockUserAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "726c6f1d34e2e47aa0712f19cce71844", "score": "0.74420285", "text": "func (this OrgUpdateMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "8e89a6f0d392a51cdeb83e8ff304433f", "score": "0.74377155", "text": "func (this OrgInviteMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "311606236774c6f6775d328f8bd30c4d", "score": "0.7422237", "text": "func (this OrgRestoreMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "31e535fb05b5964bffaae5befc989529", "score": "0.73685753", "text": "func (this RepoAccessAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "ddf2a0de05f427d8d5b698036db24b24", "score": "0.7358179", "text": "func (this RepoConfigLockAnonymousGitAccessAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "85588da0dea966becfc2c5ec7bcb848a", "score": "0.7298709", "text": "func (this RepoConfigEnableAnonymousGitAccessAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "1329816fd4c3283b8d97eb321cd16d1c", "score": "0.7294959", "text": "func (this RepoConfigEnableCollaboratorsOnlyAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "9837177b42778c5a10f8384ee1c8b18e", "score": "0.72745454", "text": "func (this RepoConfigEnableContributorsOnlyAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "d4a7b4a79fe5693b2ac147b113c1a44e", "score": "0.72699547", "text": "func (this RepoRemoveMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "d23ea4b9bfff1dd71a3d98ca5fadf578", "score": "0.72685266", "text": "func (this OrgEnableOauthAppRestrictionsAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "a8ff1e85653dc3a187ce3a370c3dea6f", "score": "0.724553", "text": "func (this RepoRemoveTopicAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "5916044d349fbe73902ed35e300bffd0", "score": "0.72421455", "text": "func (this OrgConfigEnableCollaboratorsOnlyAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "333dce6a4e77fec0baebd2be31509650", "score": "0.72346133", "text": "func (this OauthApplicationCreateAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "c7bd9ba485e0fe715de171502c133bc6", "score": "0.72126687", "text": "func (this OrgUnblockUserAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "f0b6c605d126f9ef539503c373062d39", "score": "0.7203285", "text": "func (this RepoCreateAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "988aa2692985537298127638ffd2dfc7", "score": "0.7143839", "text": "func (this RepositoryVisibilityChangeEnableAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "5cf67142d00911539d47ee19ce7a8703", "score": "0.7143142", "text": "func (this RepoConfigDisableAnonymousGitAccessAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "3f6e2481cc2286218bf3d4bc97383bd4", "score": "0.7138438", "text": "func (this RepoArchivedAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "7423e2cb4b62cee27b5d8dcc149385cc", "score": "0.7130555", "text": "func (this RepoConfigUnlockAnonymousGitAccessAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "95959db1502c8c1edc192d7708b7ebc6", "score": "0.71121085", "text": "func (this RepoConfigDisableCollaboratorsOnlyAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "a542495853b80704712c5b132014a978", "score": "0.7093661", "text": "func (this TeamAddRepositoryAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "462e3401d9ed6648ee6731fb3b9ce4b8", "score": "0.7085408", "text": "func (this RepoConfigDisableContributorsOnlyAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "d74ce783a7c4fbce0a76c63080bc1d94", "score": "0.70822126", "text": "func (this RepoChangeMergeSettingAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "3f6557f4acc8b53bdf5d90ec57668b8d", "score": "0.7072549", "text": "func (this OrgRemoveMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "f7e72a535576ed069abde465f23464e9", "score": "0.70611745", "text": "func (this TeamRemoveMemberAuditEntry) GetOrganization() *Organization { return this.Organization }", "title": "" }, { "docid": "6e0465dfd9ea0720962f99b9e19f50d3", "score": "0.70490944", "text": "func (this OrgConfigDisableCollaboratorsOnlyAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "2170145f19ceaf8e0476c443be660b64", "score": "0.7031423", "text": "func (this OrgDisableOauthAppRestrictionsAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "2f1927595c551658884610279ea8c501", "score": "0.70281047", "text": "func (this OrgOauthAppAccessDeniedAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "7fb782d0dc91d92143fbb4ecbb763f52", "score": "0.7027173", "text": "func (this RepositoryVisibilityChangeDisableAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "ca6d166563dfdd5d55027fb052a217a6", "score": "0.70046866", "text": "func (this RepoAccessAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "677ca3a5745ef1db78b10ee98cc2a6c3", "score": "0.6996883", "text": "func (this PrivateRepositoryForkingEnableAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "c1846052e79ce65c59757dd5a7406f00", "score": "0.6923863", "text": "func (this OrgEnableSamlAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "d08bee42358e5da72eaeb247caf07b74", "score": "0.69192827", "text": "func (this RepoAddMemberAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "d357fba43e9959e01657b4e7acb07ac5", "score": "0.6904566", "text": "func (this OrgCreateAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "f152f993536f40243d725244bfd91983", "score": "0.687735", "text": "func (this RepoDestroyAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "a9e8a6750e99a90055aedaa07a397d1e", "score": "0.6865779", "text": "func (this OrgUpdateDefaultRepositoryPermissionAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "f1d000a19cd48381b9c27d7d5d4ac01e", "score": "0.68618464", "text": "func (this RepoConfigEnableSockpuppetDisallowedAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "a7ce9fe05690d35bb53938081702ec7a", "score": "0.68603873", "text": "func (this RepoCreateAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "7912e7cfbdc3a3b60c41b6c85922164f", "score": "0.68403643", "text": "func (this OrgRemoveOutsideCollaboratorAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "285a7d05cd1db8047bee104c43e156a5", "score": "0.68236256", "text": "func (this RepoArchivedAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "2ff90319b89d86e9faa40988bfa02132", "score": "0.68110967", "text": "func (this RepoConfigDisableSockpuppetDisallowedAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "bb909f679b47aaf2321f729d4b44537d", "score": "0.67849594", "text": "func (this PrivateRepositoryForkingDisableAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "532c5d4c47c20a6afc7a95b9ee4ff8f1", "score": "0.6761758", "text": "func (o SourceIamPolicyOutput) Organization() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *SourceIamPolicy) pulumi.StringOutput { return v.Organization }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "ff0a92b3d69dd8cfcbd799d4a5c6e821", "score": "0.67485505", "text": "func (this TeamAddMemberAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "c6636d6fe81cdd10e1b01c4fae20ab6e", "score": "0.67366475", "text": "func (this RepoAddTopicAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "078de86065d1d7c56cc61152570517f6", "score": "0.67301005", "text": "func (this MembersCanDeleteReposEnableAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "4ae3af8500c0d7c32e99f6c13b92d24f", "score": "0.67264664", "text": "func (this MembersCanDeleteReposClearAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "1c3609e910452e8a36b062805cad4178", "score": "0.6716161", "text": "func (this OrgInviteToBusinessAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "ea581a67388ddf24c0f3e84e376081a1", "score": "0.66875887", "text": "func (this TeamAddRepositoryAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "d7d0653db12fb6e2408550f50a1ac8ce", "score": "0.66634864", "text": "func (this OrgDisableTwoFactorRequirementAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "f0f1f551832b950770d786d798aafb60", "score": "0.66589266", "text": "func (this OrgAddMemberAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "988479f07fa86e5617e254d6e1f38b78", "score": "0.66270167", "text": "func (this OrgRestoreMemberMembershipOrganizationAuditEntryData) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "c3ba97e5cf21a84ef7fa749d9b7634a6", "score": "0.662155", "text": "func (this OrgCreateAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "57178c6713d4a07af2db80e13af1976f", "score": "0.66054976", "text": "func (this MembersCanDeleteReposDisableAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "8c1352b597eedb9003f92b74d19d68c0", "score": "0.65740263", "text": "func (this OrgDisableSamlAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "66ded867ef5a97f85359a39101c05ec6", "score": "0.6573969", "text": "func (this RepoAddMemberAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "3eb597d8092f91183c0263fe984fc125", "score": "0.6559889", "text": "func (this OrgUpdateMemberAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "f7272a742d02329465bf42b7bf239402", "score": "0.6554113", "text": "func (this OrgUpdateMemberRepositoryCreationPermissionAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "b6f39993b86c16d6c18d276935e23d70", "score": "0.6545086", "text": "func (this *Organization) GetOrganization() string {\n\treturn this.organization\n}", "title": "" }, { "docid": "d4f815f66dc7570838ccd861f75360a7", "score": "0.65431476", "text": "func (this OrgRemoveBillingManagerAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "3434ad6336717224fef926a5799b5c4b", "score": "0.65413016", "text": "func (this RepoDestroyAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "1a4c6e5d33dd53934147e265cb5785e8", "score": "0.6514841", "text": "func (this OrgEnableSamlAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "767edf4cdf8bd411fc532dc6a629116a", "score": "0.65075177", "text": "func (this RepoConfigEnableContributorsOnlyAuditEntry) GetOrganizationName() *string {\n\treturn this.OrganizationName\n}", "title": "" }, { "docid": "d7f8a544bfb7555a693fdd5e32f7f7dd", "score": "0.649183", "text": "func (this RepoConfigLockAnonymousGitAccessAuditEntry) GetOrganizationURL() *string {\n\treturn this.OrganizationURL\n}", "title": "" }, { "docid": "9d70bbc02ab677a1bd3ad60db46bf46f", "score": "0.64908093", "text": "func (this OrgInviteMemberAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "2183c7fe67eeb5f6218ab6a5d0424e28", "score": "0.6479502", "text": "func (this OrgRestoreMemberAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "890590f1d603f15ee610b6bc15109f00", "score": "0.64788043", "text": "func (this OrgBlockUserAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "e921f46ba51be9378d982fbfd805cffc", "score": "0.6475355", "text": "func (this RepoRemoveMemberAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "c4d3a847808094fa3150f1fab6c2a4d0", "score": "0.64699644", "text": "func (this OrgUpdateMemberRepositoryInvitationPermissionAuditEntry) GetOrganization() *Organization {\n\treturn this.Organization\n}", "title": "" }, { "docid": "9cb55116998ac0a6904e696dedd7857f", "score": "0.6466293", "text": "func (o NotificationConfigOutput) Organization() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *NotificationConfig) pulumi.StringOutput { return v.Organization }).(pulumi.StringOutput)\n}", "title": "" }, { "docid": "f420f5e33a502f44df49632d9d6021f4", "score": "0.6457685", "text": "func (this RepoConfigLockAnonymousGitAccessAuditEntry) GetOrganizationName() *string {\n\treturn this.OrganizationName\n}", "title": "" }, { "docid": "737f6f81027537677f4983367d714b7a", "score": "0.64299107", "text": "func (this RepoConfigEnableAnonymousGitAccessAuditEntry) GetOrganizationURL() *string {\n\treturn this.OrganizationURL\n}", "title": "" }, { "docid": "785944998f3f7e31e284875ad476a975", "score": "0.6426253", "text": "func (this OrgOauthAppAccessRequestedAuditEntry) GetOrganizationURL() *string {\n\treturn this.OrganizationURL\n}", "title": "" }, { "docid": "b5ca057fad30d7f4503f5dce0cf37765", "score": "0.6421476", "text": "func (this RepoRemoveTopicAuditEntry) GetOrganizationName() *string { return this.OrganizationName }", "title": "" }, { "docid": "e7376183cdb2ee355883c559ec701f2d", "score": "0.64193225", "text": "func (this RepoConfigEnableAnonymousGitAccessAuditEntry) GetOrganizationName() *string {\n\treturn this.OrganizationName\n}", "title": "" }, { "docid": "47bc14ef79b28de56e012dd703a8485f", "score": "0.6414525", "text": "func (this OrgAddBillingManagerAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" }, { "docid": "6400e9d17d3ba51f804aa90a3fbe3cd9", "score": "0.6405199", "text": "func (this RepoChangeMergeSettingAuditEntry) GetOrganizationName() *string {\n\treturn this.OrganizationName\n}", "title": "" }, { "docid": "463ba702da43aa2ece308ca542330429", "score": "0.63980967", "text": "func (this RepoConfigEnableContributorsOnlyAuditEntry) GetOrganizationURL() *string {\n\treturn this.OrganizationURL\n}", "title": "" }, { "docid": "247395ff6ff5fe9ffb2651d2ac4447d8", "score": "0.6397428", "text": "func (this RepoConfigEnableCollaboratorsOnlyAuditEntry) GetOrganizationName() *string {\n\treturn this.OrganizationName\n}", "title": "" }, { "docid": "20a0e15d065eb6181c5a82f54cc4b98b", "score": "0.6394885", "text": "func (this TeamAddMemberAuditEntry) GetOrganizationURL() *string { return this.OrganizationURL }", "title": "" } ]
e8285966e394c076ed39f00201d602cd
Fields allows partial responses to be retrieved. See for more information.
[ { "docid": "6dfe4c9ff968cd4f5466405b6227d38d", "score": "0.0", "text": "func (c *ProjectsLocationsDatasetsFhirStoresGetFHIRStoreMetricsCall) Fields(s ...googleapi.Field) *ProjectsLocationsDatasetsFhirStoresGetFHIRStoreMetricsCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" } ]
[ { "docid": "485ebcd5d3f62391f9e607b8b1da9cf4", "score": "0.69866556", "text": "func (APIResponse) Fields() []ent.Field {\n\treturn []ent.Field{\n\t\tfield.Int(\"code\").\n\t\t\tPositive(),\n\t\tfield.String(\"type\").\n\t\t\tDefault(\"unknown\"),\n\t\tfield.String(\"message\").\n\t\t\tDefault(\"unknown\"),\n\t}\n}", "title": "" }, { "docid": "394887e30d4a92f3b224f6dc820f943f", "score": "0.6658595", "text": "func (a *WordsApiService) GetFields(ctx context.Context, data *models.GetFieldsRequest) (models.FieldsResponse, *http.Response, error) {\n var (\n successPayload models.FieldsResponse\n )\n\n requestData, err := data.CreateRequestData();\n if err != nil {\n return successPayload, nil, err\n }\n\n requestData.Path = a.client.cfg.BaseUrl + requestData.Path;\n\n r, err := a.client.prepareRequest(ctx, requestData)\n if err != nil {\n return successPayload, nil, err\n }\n\n response, err := a.client.callAPI(r)\n defer response.Body.Close()\n\n if err != nil || response == nil {\n return successPayload, response, err\n }\n if response.StatusCode == 401 {\n return successPayload, nil, errors.New(\"Access is denied\")\n }\n if response.StatusCode >= 300 {\n var apiError models.WordsApiErrorResponse;\n\n if err = json.NewDecoder(response.Body).Decode(&apiError); err != nil {\n return successPayload, response, err\n }\n\n return successPayload, response, &apiError\n }\n if err = json.NewDecoder(response.Body).Decode(&successPayload); err != nil {\n return successPayload, response, err\n }\n\n return successPayload, response, err\n}", "title": "" }, { "docid": "4d69c3599ba7ff89f7e24031fa05aa9c", "score": "0.64851075", "text": "func (c *Client) getFields(end endpoint) ([]string, error) {\n\treq, err := c.request(end + \"meta\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar f []string\n\n\tif err = c.send(req, &f); err != nil && err != ErrNoResults {\n\t\treturn nil, err\n\t}\n\n\treturn f, nil\n}", "title": "" }, { "docid": "f2139fd7e9d5944410a050f0062210e3", "score": "0.63830054", "text": "func (e GetResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "b5575f262f6a928dee8bcf121d6fc958", "score": "0.6222895", "text": "func (e Marks_GetResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "67383bc24c86c0d5a9669b9e35310f13", "score": "0.6196849", "text": "func (c *MobileapppanelsGetCall) Fields(s ...googleapi.Field) *MobileapppanelsGetCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "6a0ad8f18045444d1866948c856c4e52", "score": "0.6182445", "text": "func (e PropertyResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "f7d6912aec6e9bdf31aba2720996d39b", "score": "0.61542946", "text": "func (c *ServerDetailsCall) Fields(s ...googleapi.Field) *ServerDetailsCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "35bf78fbcf6780c439f1474d46eecd0a", "score": "0.6148201", "text": "func GetRespField(j *simplejson.Json, keys []string) (actuals []*simplejson.Json) {\r\n\tfor _, k := range keys {\r\n\t\tv := j.Get(k)\r\n\t\tactuals = append(actuals, v)\r\n\t}\r\n\treturn actuals\r\n}", "title": "" }, { "docid": "36a5c5fee7d814648f7e5e4a1d25c800", "score": "0.61457837", "text": "func (e RetrieveManyResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "8b1596f80f155c3335f476bada7e109b", "score": "0.61403096", "text": "func (a *WordsApiService) GetFormFields(ctx context.Context, data *models.GetFormFieldsRequest) (models.FormFieldsResponse, *http.Response, error) {\n var (\n successPayload models.FormFieldsResponse\n )\n\n requestData, err := data.CreateRequestData();\n if err != nil {\n return successPayload, nil, err\n }\n\n requestData.Path = a.client.cfg.BaseUrl + requestData.Path;\n\n r, err := a.client.prepareRequest(ctx, requestData)\n if err != nil {\n return successPayload, nil, err\n }\n\n response, err := a.client.callAPI(r)\n defer response.Body.Close()\n\n if err != nil || response == nil {\n return successPayload, response, err\n }\n if response.StatusCode == 401 {\n return successPayload, nil, errors.New(\"Access is denied\")\n }\n if response.StatusCode >= 300 {\n var apiError models.WordsApiErrorResponse;\n\n if err = json.NewDecoder(response.Body).Decode(&apiError); err != nil {\n return successPayload, response, err\n }\n\n return successPayload, response, &apiError\n }\n if err = json.NewDecoder(response.Body).Decode(&successPayload); err != nil {\n return successPayload, response, err\n }\n\n return successPayload, response, err\n}", "title": "" }, { "docid": "6ec47f8fc74a1584381dadd1cc37ee05", "score": "0.6116269", "text": "func (e ThingResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "2187bd08807f04a1a5f2dff60624aead", "score": "0.6110415", "text": "func (e SomeStringResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "c4b10291f54a1523db379c8001436adc", "score": "0.6107665", "text": "func (m *SearchRequest) GetFields()([]string) {\n val, err := m.GetBackingStore().Get(\"fields\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.([]string)\n }\n return nil\n}", "title": "" }, { "docid": "bab5e89c606df6667e1b330cb9939ec1", "score": "0.6100833", "text": "func (e GetUsersResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "df086c1f1c06f292a958698997503573", "score": "0.60716283", "text": "func (c *RetrieveCall) Fields(s ...googleapi.Field) *RetrieveCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "a65a631800b43db972dea964bf2a44ee", "score": "0.6065498", "text": "func (h *Http) getFields() []string {\n\tvar f Fields\n\tfields := make([]string, 0)\n\n\tif h.Request.Method != \"\" {\n\t\tf.method = h.Request.Method\n\t\tfields = append(fields, f.method)\n\t}\n\n\tif h.Response.StatusCode != 0 {\n\t\tf.code = h.Response.StatusCode\n\t\tfields = append(fields, strconv.Itoa(f.code))\n\t}\n\n\tif h.Request.Proto != \"\" {\n\t\tf.protocol = h.Request.Proto\n\t\tfields = append(fields, f.protocol)\n\t}\n\n\tif h.Request.URL.Host != \"\" {\n\t\tf.path = h.Request.URL.Path\n\t\tfields = append(fields, f.path)\n\t}\n\n\tif h.Request.Host != \"\" {\n\t\tf.host = h.Request.Host\n\t\tfields = append(fields, f.host)\n\t}\n\n\treturn fields\n}", "title": "" }, { "docid": "0a0fe450a1fa924060d2aa027a87d61e", "score": "0.6049475", "text": "func (e UsersResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "9fd3fc2846d348e5ff0f2a2d31a9d927", "score": "0.60461026", "text": "func (e ApplicationResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "dc5ef6f2c5e8762d260c0faa3d298cd5", "score": "0.6044642", "text": "func (e InferTomeAndFolioResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "24fba2ca2221c7b8b6eefc5af7f5bfd4", "score": "0.60434955", "text": "func (e AuthenticationResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "50405c781c95fc43d55971287c8a8668", "score": "0.60432905", "text": "func (e UserResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "50405c781c95fc43d55971287c8a8668", "score": "0.60432905", "text": "func (e UserResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "d7eb19927557170d79db87901d00be3b", "score": "0.6028078", "text": "func (e ValidateResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "003fca2ac61c7e309a8f9ef3472581c6", "score": "0.60267466", "text": "func (e RequestProxyResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "2fb03c44b569085cc10bdb19065db175", "score": "0.6023246", "text": "func (e DiscoveryResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "437ae00fc85e174a818bf3a8023f1b2b", "score": "0.6009606", "text": "func (Opt) Fields() []ent.Field {\n\treturn []ent.Field{\n\t\tfield.UUID(\"id\", uuid.UUID{}).Default(uuid.New),\n\t\tfield.String(\"body\"),\n\t\tfield.Float(\"weight\"),\n\t}\n}", "title": "" }, { "docid": "22aeb7993e9ff2cf1a7867b316e50198", "score": "0.60088754", "text": "func (r *SearchMvt) Fields(fields ...string) *SearchMvt {\n\tr.req.Fields = fields\n\n\treturn r\n}", "title": "" }, { "docid": "0d6867a374702b8c0def8000cddd508c", "score": "0.5995647", "text": "func (f *fakeResultReader) Fields() []*querypb.Field {\n\treturn f.fields\n}", "title": "" }, { "docid": "2a1660267b0981ec956e6b949ad5ca7d", "score": "0.59930015", "text": "func (c *ResultsGetCall) Fields(s ...googleapi.Field) *ResultsGetCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "1135fcd87edef4243952224f4a1f63d2", "score": "0.5990293", "text": "func (c *DirectDealsGetCall) Fields(s ...googleapi.Field) *DirectDealsGetCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "b39187e2e54332046a188d22ffe12051", "score": "0.5989228", "text": "func (e DataResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "f908713531697f41d0e92813243e4541", "score": "0.5965869", "text": "func (e ListResourcesResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "a5adaff7ab7cec520165a5566083db1d", "score": "0.596416", "text": "func (c *V1GetIamPolicyCall) Fields(s ...googleapi.Field) *V1GetIamPolicyCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "a7a6a448d5ac4c0c1fd13108dbac2a22", "score": "0.59617054", "text": "func (o ClusteringResponseOutput) Fields() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v ClusteringResponse) []string { return v.Fields }).(pulumi.StringArrayOutput)\n}", "title": "" }, { "docid": "1dc72f4b6aa85321a6d6762035bd8aa5", "score": "0.59572834", "text": "func (e GetBusinessResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "fb59920ac1dd872802152503c24b15d2", "score": "0.5944213", "text": "func (b Metadata) Fields() []string {\n\treturn []string{serviceHeader}\n}", "title": "" }, { "docid": "1d1ea91dad36ed8ae84f187f3cc090ac", "score": "0.59397167", "text": "func (e GetUserResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "ad378735139bb050b6e4328d23d5c896", "score": "0.59296715", "text": "func (e InferLicenseResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "fcfad910e6ed2eb27cd5239738346a8a", "score": "0.59191054", "text": "func (e GetCloudResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "da37861ec5c5b754a2eeabd8680d53d7", "score": "0.5915748", "text": "func (c *BillingInfoGetCall) Fields(s ...googleapi.Field) *BillingInfoGetCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "27c5c91346886a65fc2876e9441d423f", "score": "0.59143835", "text": "func (e StartDiscoveryResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "754bf697f4b88d334b1125506d010743", "score": "0.59134597", "text": "func (e GetVPCCidrResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "dbe2f843b4a682d404cb7df343cd2515", "score": "0.5900355", "text": "func (o ClusteringResponsePtrOutput) Fields() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v *ClusteringResponse) []string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Fields\n\t}).(pulumi.StringArrayOutput)\n}", "title": "" }, { "docid": "7e4f36ba9c68d580b7e47d071b5fbc47", "score": "0.5900316", "text": "func (e ProductResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "57587632c414fce8fc136acd0f1b0ea8", "score": "0.5892315", "text": "func (CoveredPerson) Fields() []ent.Field {\n return []ent.Field{\n \n }\n }", "title": "" }, { "docid": "6ce8321af79ad609014608d430e20566", "score": "0.5882958", "text": "func (e ReadProfileResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "8b6ffb00d311c977219d070348511bf7", "score": "0.5877198", "text": "func (e GetAccountResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "f6585116ce7c7d5ef5307ecf7d828704", "score": "0.58761096", "text": "func (a *WordsApiService) GetField(ctx context.Context, data *models.GetFieldRequest) (models.FieldResponse, *http.Response, error) {\n var (\n successPayload models.FieldResponse\n )\n\n requestData, err := data.CreateRequestData();\n if err != nil {\n return successPayload, nil, err\n }\n\n requestData.Path = a.client.cfg.BaseUrl + requestData.Path;\n\n r, err := a.client.prepareRequest(ctx, requestData)\n if err != nil {\n return successPayload, nil, err\n }\n\n response, err := a.client.callAPI(r)\n defer response.Body.Close()\n\n if err != nil || response == nil {\n return successPayload, response, err\n }\n if response.StatusCode == 401 {\n return successPayload, nil, errors.New(\"Access is denied\")\n }\n if response.StatusCode >= 300 {\n var apiError models.WordsApiErrorResponse;\n\n if err = json.NewDecoder(response.Body).Decode(&apiError); err != nil {\n return successPayload, response, err\n }\n\n return successPayload, response, &apiError\n }\n if err = json.NewDecoder(response.Body).Decode(&successPayload); err != nil {\n return successPayload, response, err\n }\n\n return successPayload, response, err\n}", "title": "" }, { "docid": "4b73039f3c9f6d711541d9cecd67d2bb", "score": "0.58652616", "text": "func (e ExternalIDPResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "9766b3321204f44cd4505acba42a52ec", "score": "0.58639586", "text": "func (e VersionResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "3fccc6c5806bcef8f769d2e3090654fc", "score": "0.586351", "text": "func (c *OperationsGetCall) Fields(s ...googleapi.Field) *OperationsGetCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "b27e62ea4ae79ac088e3df986e0a9198", "score": "0.58624136", "text": "func (m *RawMapper) Fields() []string { return append(m.selectFields, m.selectTags...) }", "title": "" }, { "docid": "66abc8bdd7384044cb1e456962dbfeaa", "score": "0.5859054", "text": "func (e GetConfigDataResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "62d48f8f81d2a5d48040179e4db5a83c", "score": "0.5856515", "text": "func (e RequestInfoValidationError) Field() string { return e.field }", "title": "" }, { "docid": "cd8af44c882c987ad4857e2742a166ee", "score": "0.58499384", "text": "func (rb *partialRequestBodyFields) getattr() reflect.Value {\n\treturn reflect.ValueOf(rb.Fields)\n}", "title": "" }, { "docid": "095b390e6a90c1a1f55b0e9b8645fcb9", "score": "0.58429617", "text": "func (e PolygonParkAnalytics_ResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "519a277f7aa259d7d3940403faa1a235", "score": "0.58394766", "text": "func (e PolygonAnalytics_ResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "1b312041242b5955daaa33866f69a32c", "score": "0.58299416", "text": "func (e PolygonSubwayAnalytics_ResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "f4f1a28a11dcf65481f540624b7f869a", "score": "0.5800895", "text": "func (e ExternalIDPSearchResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "1610f2f3f0e785edefb5de3caf0152b3", "score": "0.5795434", "text": "func (e PolygonPollutionAnalytics_ResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "28255fc5e34004eb7c384197de50671f", "score": "0.5792847", "text": "func (e GetVehicleResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "2b6fe4fd9345abc24f2533577147130c", "score": "0.57921517", "text": "func (e PolygonAnalyticsDashboard_ResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "f92985202a11de857b5e9a9dc0ba6115", "score": "0.57788354", "text": "func (e ListEquipmentMetadataResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "9bf0452b4784c4af8c55c8759cc9cc5d", "score": "0.5777988", "text": "func (Robber) Fields() []ent.Field {\n\treturn nil\n}", "title": "" }, { "docid": "f66e5952f49bc0fc96c53d37e43eb5e4", "score": "0.5777707", "text": "func (u User) Fields() map[string]interface{} {\n\tfields := map[string]interface{}{\n\t\t\"hash\": u.Hash,\n\t\t\"username\": u.Username,\n\t\t\"private_id\": u.PrivateID,\n\t\t\"public_id\": u.PublicID,\n\t\t\"totp\": u.TOTP,\n\t\t\"use_twofactor\": u.UseTwoFactor,\n\t\t\"seen_twofactor\": u.SeenTwoFactor,\n\t}\n\n\treturn fields\n}", "title": "" }, { "docid": "24847fd03516c51a8030fc5a1e20f026", "score": "0.5776574", "text": "func (e ConnectResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "5f26f309cd5eea863856dce95868a1ae", "score": "0.57762796", "text": "func (e GetEquipmentResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "45f905b74fc5c128f1e9107f4662861a", "score": "0.577466", "text": "func (e RegisterResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "43809ff0b11a0575d065b9fb18578f32", "score": "0.5769884", "text": "func (e ShareResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "4611fd3e8a40712fef8b3f80f8839de6", "score": "0.5768802", "text": "func (api *API) Fields(ctx context.Context, indexName string, frameName string) ([]*Field, error) {\n\tif err := api.validate(apiFields); err != nil {\n\t\treturn nil, errors.Wrap(err, \"validate api method\")\n\t}\n\n\tindex := api.Holder.index(indexName)\n\tif index == nil {\n\t\treturn nil, ErrIndexNotFound\n\t}\n\n\tframe := index.frame(frameName)\n\tif frame == nil {\n\t\treturn nil, ErrFrameNotFound\n\t}\n\n\treturn frame.GetFields()\n}", "title": "" }, { "docid": "0c0b68082188b089b05681626d8b8c6a", "score": "0.576195", "text": "func (e ValidateLicenseResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "f0ea1ccced826ef606d0f37cb0fa489a", "score": "0.57617825", "text": "func (e ReadContactResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "b450223f796840abd3b5932458363797", "score": "0.5760644", "text": "func (e BuyBusinessResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "589be393e92f272ae0222a3745886e8a", "score": "0.5760453", "text": "func (rs *ReviewVideoService) Fields() ([]string, error) {\n\tf, err := rs.client.getFields(rs.end)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"cannot get ReviewVideo fields\")\n\t}\n\n\treturn f, nil\n}", "title": "" }, { "docid": "0c91c41ee100cee4e87ddce7c6458bc8", "score": "0.5758449", "text": "func (e UnCordonNodeResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "8c5f8f86725d65613e9d87e716d61d84", "score": "0.57574636", "text": "func (e PayResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "ebbb62608da1f5b628bd29a8eb504760", "score": "0.57503873", "text": "func (e ListPoliciesResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "f8ed19907556a2ec20c48b8944b4944f", "score": "0.57467353", "text": "func (e ListUsersResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "28f257269def99dce2193363adf133b1", "score": "0.574665", "text": "func (e CreateResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "5743faeebcdc0ae8b7aa82bd6303da24", "score": "0.57444096", "text": "func (e ListCloudResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "b8380374fead039161712ad0652ee7a5", "score": "0.5739782", "text": "func (e WatchBusinessResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "7f30eb1b7f5521f5e2273766671f9022", "score": "0.57390416", "text": "func (e ScopeListResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "1f7f3d04b3ae717e81d06fc6fa227ece", "score": "0.5737777", "text": "func (e Marks_GetRequestValidationError) Field() string { return e.field }", "title": "" }, { "docid": "1098bc84c16c106767202b4d480cc46f", "score": "0.57358855", "text": "func (c *ProjectsLocationsDatasetsFhirStoresFhirPatientEverythingCall) Fields(s ...googleapi.Field) *ProjectsLocationsDatasetsFhirStoresFhirPatientEverythingCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "9ac024778ea82140779fc54c567d76e2", "score": "0.5734768", "text": "func (c *ProjectsLocationsDatasetsFhirStoresFhirReadCall) Fields(s ...googleapi.Field) *ProjectsLocationsDatasetsFhirStoresFhirReadCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "7cfbf1881b665a8593a076cfc24ec103", "score": "0.5734079", "text": "func (e SearchResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "3bb1067420ba8024a6fdd9aeb63ebd6a", "score": "0.57292765", "text": "func (e EquipmentTypesResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "6de270fe6ddbe8a62075bc30658262dc", "score": "0.5728835", "text": "func (c *MobileapppanelsListCall) Fields(s ...googleapi.Field) *MobileapppanelsListCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "d832ad0d2bd1d39211e94387bd53b819", "score": "0.57285976", "text": "func (e DeltaDiscoveryResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "3c61ee40b9ab114173924e5c18b7fc07", "score": "0.5727241", "text": "func (e ReadRequestValidationError) Field() string { return e.field }", "title": "" }, { "docid": "2d2d891bb5f2bf57466ae702c94f2244", "score": "0.57138616", "text": "func (e EmptyRequestValidationError) Field() string { return e.field }", "title": "" }, { "docid": "15f6d49766fe28b0e1542222167e2152", "score": "0.56994987", "text": "func (e CsvResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "f1fda52eecbcafd57bf798497afd339d", "score": "0.56851494", "text": "func (e ListProductsResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "db4e9299d77f1308eac88b32bb9f68ee", "score": "0.5684029", "text": "func (e TrainingsResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "5ae5a2b9afd25cec12e0a2832e6f7557", "score": "0.5678841", "text": "func (e CordonNodeResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "7b018d8e263195f5cd4f2d949941b563", "score": "0.5669305", "text": "func (e GetProjectResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "665bd6b3b56b0076476595695ec56ba5", "score": "0.5669276", "text": "func (e ReadFeedArticleDetailsResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "e065d886818086a9b885bb5d2dcdc785", "score": "0.56636655", "text": "func (e ListWhitelistsResponseValidationError) Field() string { return e.field }", "title": "" }, { "docid": "3addec489fc4ced8b2b4bc816e7ab3a8", "score": "0.5659628", "text": "func (c *ProjectsLocationsDatasetsFhirStoresFhirVreadCall) Fields(s ...googleapi.Field) *ProjectsLocationsDatasetsFhirStoresFhirVreadCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" }, { "docid": "e0032b5f0d180cdf1af05959a7a59a9c", "score": "0.565835", "text": "func (c *GoogleServiceAccountsGetCall) Fields(s ...googleapi.Field) *GoogleServiceAccountsGetCall {\n\tc.urlParams_.Set(\"fields\", googleapi.CombineFields(s))\n\treturn c\n}", "title": "" } ]
391df15c761a0440c000db0979f7d1d5
SetHeaders : Allow user to set Headers
[ { "docid": "7e00412ef5d69773651754ce074f19d2", "score": "0.0", "text": "func (options *CreatePageRuleOptions) SetHeaders(param map[string]string) *CreatePageRuleOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" } ]
[ { "docid": "44e99838470a496a54b73db6a4524b01", "score": "0.8047731", "text": "func (ac challenge) SetHeaders(w http.ResponseWriter) {\n\n}", "title": "" }, { "docid": "93c76e84ad44b98096f0999a1a1bfe37", "score": "0.8020761", "text": "func (client *Client) setHeaders(req *http.Request, httpVerb, path, contentType, content string) error {\n\tif client.creds == nil {\n\t\treturn ErrNoCredentials\n\t}\n\tnow := time.Now().UTC().Format(\"2006-01-02T15:04:05.000000Z07:00\")\n\n\tif len(contentType) > 0 {\n\t\treq.Header.Set(\"Content-Type\", contentType)\n\t}\n\n\treq.Header.Set(\"Accept\", \"application/json\")\n\treq.Header.Set(\"Dragonchain\", client.creds.GetDragonchainID())\n\treq.Header.Set(\"Timestamp\", fmt.Sprintf(\"%s\", now))\n\treq.Header.Set(\"Authorization\", client.creds.GetAuthorization(httpVerb, path, now, contentType, content))\n\treturn nil\n}", "title": "" }, { "docid": "8031e451edf045774a67a524df95db8a", "score": "0.78400195", "text": "func setHeaders(request *http.Request, headers Vals) {\n\tfor _, v := range headers {\n\t\trequest.Header.Set(v.Name, v.Value)\n\t}\n}", "title": "" }, { "docid": "daf801ae3d26ee9f94bd87f0060b1a62", "score": "0.78021115", "text": "func setHeaders(headers *http.Header, options RegistryOptions) {\n\tt := options.Telemetry\n\tif t.User != \"\" {\n\t\theaders.Add(\"User\", t.User)\n\t}\n\tif t.Client != \"\" {\n\t\theaders.Add(\"Client\", t.Client)\n\t}\n\tif t.Locale != \"\" {\n\t\theaders.Add(\"Locale\", t.Locale)\n\t}\n}", "title": "" }, { "docid": "16752fd6c3dd314c59c3788617cd54ea", "score": "0.77077115", "text": "func setHTTPHeaders(req *http.Request, headers []map[string]string) {\n\t//req.Header.Set(\"Authorization\", \"Bearer access-token\")\n\tfor _, header := range headers {\n\t\tfor k, v := range header {\n\t\t\treq.Header.Set(k, v)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "0219d1e0552444a8ce55d6bb668c4861", "score": "0.76975876", "text": "func (r *Router) SetHeaders(\n\tctx context.Context, w http.ResponseWriter, h map[string][]string,\n) context.Context {\n\theader := w.Header()\n\tfor k, v := range h {\n\t\theader[k] = v\n\t}\n\n\treturn context.WithValue(ctx, HeadersSetKey, true)\n}", "title": "" }, { "docid": "942376a7f47d4f88e17748949ec51d74", "score": "0.7689848", "text": "func setHeaders(w http.ResponseWriter, resp *SendResponse) {\n\t// Set header values\n\tcopyHeader(w.Header(), resp.Response.Header)\n\tif resp.CacheMeta != nil {\n\t\txCacheVal := \"MISS\"\n\n\t\tif resp.CacheMeta.Hit {\n\t\t\txCacheVal = \"HIT\"\n\n\t\t\t// If this is a cache hit, we also set the Age header\n\t\t\tage := fmt.Sprintf(\"%.0f\", resp.CacheMeta.Age.Seconds())\n\t\t\tw.Header().Set(\"Age\", age)\n\n\t\t\t// Update the date value\n\t\t\tw.Header().Set(\"Date\", time.Now().Format(http.TimeFormat))\n\t\t}\n\n\t\tw.Header().Set(\"X-Cache\", xCacheVal)\n\t}\n\n\t// Set status code\n\tw.WriteHeader(resp.Response.StatusCode)\n}", "title": "" }, { "docid": "117d35587752eb87151ad1c0427c900f", "score": "0.7590747", "text": "func SetHeaders(w http.ResponseWriter) {\n\tw.Header().Set(\"X-Frame-Options\", \"SAMEORIGIN\")\n\tw.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n\tw.Header().Set(\"Strict-Transport-Security\", \"max-age=63072000; includeSubDomains; preload\")\n\tw.Header().Set(\"Allow\", \"OPTIONS, GET, HEAD, POST\")\n\tw.Header().Set(\"Content-Security-Policy\", \"default-src 'none'; font-src 'https://fonts.googleapis.com'; img-src 'self' https://i.imgur.com; object-src 'none'; script-src 'self'; style-src 'self'; frame-ancestors 'self'; base-uri 'self'; form-action 'self'; 'stric-dynamic'\")\n\tw.Header().Set(\"Set-Cookie\", \"__Host-BMOSESSIONID=YnVnemlsbGE=; Max-Age=2592000; Path=/; Secure; HttpOnly; SameSite=Strict\")\n\tw.Header().Set(\"Referrer-Policy\", \"no-referrer, strict-origin-when-cross-origin\")\n\tw.Header().Set(\"X-Content-Type-Options\",\"nosniff\")\n\tw.Header().Set(\"X-XSS-Protection\", \"1; mode=block\")\n\n}", "title": "" }, { "docid": "2da7b3dde13b6943a7abfd5faf3a9f6d", "score": "0.75798017", "text": "func setHeaders(resp http.ResponseWriter, headers map[string]string) {\n\tfor field, value := range headers {\n\t\tresp.Header().Set(http.CanonicalHeaderKey(field), value)\n\t}\n}", "title": "" }, { "docid": "e13363c1fffe150d8e708cbe306b9e1c", "score": "0.75763303", "text": "func setHeaders(req *http.Request, contentType string) {\n\treq.Header.Add(\"Authorization\", \"Bearer \"+APIKey)\n\treq.Header.Set(\"Content-Type\", contentType)\n\treq.Header.Set(\"Accept\", \"application/json\")\n}", "title": "" }, { "docid": "ac086efb65defb455d1748256e8ad99b", "score": "0.7372083", "text": "func SetHeaders(w http.ResponseWriter, headerMap map[string]string) {\n\tfor k, v := range headerMap {\n\t\tw.Header().Set(k, v)\n\t}\n}", "title": "" }, { "docid": "b324e412201f873d0886565d52fb644d", "score": "0.73351634", "text": "func setAccessControlHeaders(w http.ResponseWriter) {\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Authorization, Content-Type, Origin\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Date\")\n}", "title": "" }, { "docid": "fcf8742fc41ce8b187b41c76eb34336c", "score": "0.7329545", "text": "func setHeaders(w http.ResponseWriter, h http.Header, u *users.User) {\n\tw.Header().Set(\"Docker-Distribution-Api-Version\", \"registry/2.0\")\n\tfor k, v := range h {\n\t\tif strings.ToLower(k) == \"content-length\" {\n\t\t\tcontinue\n\t\t}\n\t\tw.Header().Set(k, strings.Join(v, \",\"))\n\t}\n}", "title": "" }, { "docid": "9889a82a8542901a4966c861eee8c958", "score": "0.7329386", "text": "func (ch challenge) SetHeaders(r *http.Request, w http.ResponseWriter) {\n\tw.Header().Set(\"WWW-Authenticate\", fmt.Sprintf(\"Basic realm=%q\", ch.realm))\n}", "title": "" }, { "docid": "9889a82a8542901a4966c861eee8c958", "score": "0.7329386", "text": "func (ch challenge) SetHeaders(r *http.Request, w http.ResponseWriter) {\n\tw.Header().Set(\"WWW-Authenticate\", fmt.Sprintf(\"Basic realm=%q\", ch.realm))\n}", "title": "" }, { "docid": "88437e94cd2c4020e8c80d250310e9e2", "score": "0.72968507", "text": "func (handler *Handler) SetHeaders(headers map[string]interface{}) *Handler {\n\n\tfor k, v := range headers {\n\t\thandler.Headers[k] = v\n\t}\n\n\thandler.updateSpecHeaders()\n\n\treturn handler\n}", "title": "" }, { "docid": "b8bd168b1851dce7899198477bbb142f", "score": "0.72793496", "text": "func (c *Client) SetHeaders(headers http.Header) {\n\tc.modifyLock.Lock()\n\tdefer c.modifyLock.Unlock()\n\tc.headers = headers\n}", "title": "" }, { "docid": "beffc3b55bbb659b253f8e39d31e107d", "score": "0.72709775", "text": "func SetHeaders(headersList []string, w http.ResponseWriter) error {\n\tif headersList == nil {\n\t\treturn errors.New(\"headers list not provided\")\n\t} else if len(headersList)%2 != 0 {\n\t\treturn errors.New(\"headers are not a key-value list\")\n\t}\n\n\tfor i := 0; i < len(headersList); i += 2 {\n\t\tw.Header().Set(headersList[i], headersList[i+1])\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "1e6113b83cfaf35abaaa93f36aa44335", "score": "0.7255374", "text": "func SetHeader(w http.ResponseWriter, key, value string) {\n\tw.Header().Set(key, value)\n}", "title": "" }, { "docid": "3d8e103188167f5ed5d26314e3ce5724", "score": "0.7250635", "text": "func SetRequestHeaders(req *http.Request) {\n\tauthorization := os.Getenv(\"authorization\")\n\n\treq.Header.Set(\"X-FromAppId\", con.XFromAppId)\n\treq.Header.Set(\"Content-Type\", con.ContentType)\n\treq.Header.Set(\"Accept\", con.Accept)\n\treq.Header.Set(\"X-TransactionId\", con.XTransactionId)\n\treq.Header.Set(\"Authorization\", authorization)\n\n}", "title": "" }, { "docid": "66e181019e6132c53f0aa524d5cab655", "score": "0.71611524", "text": "func (ctx *HijackResponse) SetHeader(pairs ...string) {\n\tfor i := 0; i < len(pairs); i += 2 {\n\t\tctx.payload.ResponseHeaders = append(ctx.payload.ResponseHeaders, &proto.FetchHeaderEntry{\n\t\t\tName: pairs[i],\n\t\t\tValue: pairs[i+1],\n\t\t})\n\t}\n}", "title": "" }, { "docid": "c507f9b167aaba21e12b3270830ab359", "score": "0.709343", "text": "func setHeaders(w http.ResponseWriter, contentLength int, signatureFileName string) {\n\tconst userAgent = \"signature-development-utility/2.0 (https://github.com/exponential-decay/signature-development-utility; by @beet_keeper\"\n\n\tconst headerUserAgent = \"User-agent\"\n\tconst headerDisposition = \"Content-Disposition\"\n\tconst headerContentType = \"Content-Type\"\n\tconst headerContentLength = \"Content-Length\"\n\n\tconst attachment = \"attachment; filename=%s.xml\"\n\tconst mime = \"application/xml\"\n\n\tdisposition := fmt.Sprintf(attachment, signatureFileName)\n\n\tw.Header().Set(headerUserAgent, userAgent)\n\tw.Header().Set(headerDisposition, disposition)\n\tw.Header().Set(headerContentType, mime)\n\tw.Header().Set(headerContentLength, strconv.Itoa(contentLength))\n}", "title": "" }, { "docid": "9a161a887e4e7367c4a572f19370f45b", "score": "0.70685434", "text": "func (v *Vault) setHeaders() {\n\theaders := map[string][]string{\"X-Vault-Token\": {v.token}}\n\tv.client.SetHeaders(headers)\n\tv.client.SetToken(v.token)\n}", "title": "" }, { "docid": "048adc33fd7d2f95cf344d64cba0fc70", "score": "0.7057168", "text": "func SetHeader(req *http.Request) {\n\treq.Header.Set(\"Accept\", \"Application/json\")\n\treq.Header.Set(\"Content-Type\", \"Application/json\")\n\treq.Header.Set(\"Charset\", \"utf-8\")\n}", "title": "" }, { "docid": "19740be2233e031b032f731342d12a0d", "score": "0.7048974", "text": "func SetHeadersMiddleware(headers http.Header) httpclient.Middleware {\n\treturn httpclient.NamedMiddlewareFunc(SetHeadersMiddlewareName, func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {\n\t\tif len(headers) == 0 {\n\t\t\treturn next\n\t\t}\n\n\t\treturn httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {\n\t\t\tfor k, v := range headers {\n\t\t\t\tcanonicalKey := textproto.CanonicalMIMEHeaderKey(k)\n\t\t\t\treq.Header[canonicalKey] = v\n\t\t\t}\n\n\t\t\treturn next.RoundTrip(req)\n\t\t})\n\t})\n}", "title": "" }, { "docid": "6159d9e57d4d6d3343c21f825ec382e6", "score": "0.7038457", "text": "func SetResponseHeaders(headers map[string]string) func(next http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tctx, span := trace.StartSpan(r.Context(), \"proxy.SetResponseHeaders\")\n\t\t\tdefer span.End()\n\t\t\tfor key, val := range headers {\n\t\t\t\tr.Header.Set(key, val)\n\t\t\t}\n\t\t\tnext.ServeHTTP(w, r.WithContext(ctx))\n\t\t})\n\t}\n}", "title": "" }, { "docid": "9ba6c0efeddf50ff4ea79b9e4e6f795a", "score": "0.70378315", "text": "func SetHeader() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Header(\"Access-Control-Allow-Origin\", \"*\")\n\t\tc.Header(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE, UPDATE\")\n\t\tc.Header(\"Access-Control-Allow-Headers\", \"Origin, X-Requested-With, Content-Type, Accept, Authorization\")\n\t\tc.Header(\"Access-Control-Expose-Headers\", \"Content-Length, Access-Control-Allow-Origin, Access-Control-Allow-Headers, Cache-Control, Content-Language, Content-Type\")\n\t\tc.Header(\"Access-Control-Allow-Credentials\", \"true\")\n\n\t\tif c.Request.Method == \"OPTIONS\" {\n\t\t\tc.AbortWithStatus(204)\n\t\t\treturn\n\t\t}\n\n\t\tc.Next()\n\t}\n}", "title": "" }, { "docid": "a8e164c7797c65a84844f480be73888b", "score": "0.7034614", "text": "func (rm *REKTManager) SetHeader(key, value string) {\n\trm.headers.Set(key, value)\n}", "title": "" }, { "docid": "39dfca0fb5a0c4a1eefb5793f4057bab", "score": "0.7020734", "text": "func (options *CreateSourcesOptions) SetHeaders(param map[string]string) *CreateSourcesOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "7c612bf557110d64874ef07e55d947de", "score": "0.7011777", "text": "func SetHeaders(w http.ResponseWriter, nonce string) {\n\t// Content security policy\n\tcsp := []string{\n\t\tfmt.Sprintf(\"script-src '%s' 'self'\", nonce),\n\t\t\"worker-src 'self' blob:\", // No single quotes around blob:\n\t}\n\tw.Header().Set(\"Content-Security-Policy\", strings.Join(csp, \"; \"))\n}", "title": "" }, { "docid": "9fa7bb44c29ca34355af0e6b27c00549", "score": "0.7008446", "text": "func (options *GetDevelopmentModeOptions) SetHeaders(param map[string]string) *GetDevelopmentModeOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "a2c950175a9db92289f41a3ea3ab3662", "score": "0.70035136", "text": "func (zr *ZRequest) SetHeaders(headers map[string]string) *ZRequest {\n\tif zr.ended {\n\t\treturn zr\n\t}\n\tfor key, value := range headers {\n\t\tzr.headers.Set(key, value)\n\t}\n\treturn zr\n}", "title": "" }, { "docid": "867d7f51f42d996104c90ff9e627c1d6", "score": "0.69796807", "text": "func (i *ICoreWebView2HttpRequestHeaders) SetHeader(name, value string) error {\n\t_name, err := windows.UTF16PtrFromString(name)\n\tif err != nil {\n\t\treturn nil\n\t}\n\n\t_value, err := windows.UTF16PtrFromString(value)\n\tif err != nil {\n\t\treturn nil\n\t}\n\n\tres, _, err := i.vtbl.SetHeader.Call(\n\t\tuintptr(unsafe.Pointer(i)),\n\t\tuintptr(unsafe.Pointer(_name)),\n\t\tuintptr(unsafe.Pointer(_value)),\n\t)\n\tif err != windows.ERROR_SUCCESS {\n\t\treturn err\n\t}\n\tif windows.Handle(res) != windows.S_OK {\n\t\treturn syscall.Errno(res)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "6494c3e1b5298794a4ba759a1b577706", "score": "0.69615453", "text": "func (r *Router) SetHeader(w http.ResponseWriter, k, v string) {\n\tw.Header().Set(k, v)\n}", "title": "" }, { "docid": "5c8c4fb1a52964f7b88b81db5c95dffe", "score": "0.6959256", "text": "func SetHeader(out io.Writer, key, value string) {\n\tif resp, ok := out.(http.ResponseWriter); ok {\n\t\tresp.Header().Set(key, value)\n\t}\n}", "title": "" }, { "docid": "563241b655dae559d754adad615f7a0a", "score": "0.69575226", "text": "func (l *EchoLogrus) SetHeader(string) {}", "title": "" }, { "docid": "9d5900acb8149d7c9de8746ed11c0848", "score": "0.6951425", "text": "func (resp *ClientResponse) setHeaders(headers headerFieldArray) {\n\tresp.Headers = headers\n\tresp.Status = headers.GetStatus()\n}", "title": "" }, { "docid": "8480fc42364899d77d4378cd64e59d84", "score": "0.6927999", "text": "func (h headers) Set(value string) error {\n\tparts := strings.SplitN(value, \":\", 2)\n\tif len(parts) != 2 {\n\t\treturn fmt.Errorf(\"header '%s' has a wrong format\", value)\n\t}\n\tkey, val := strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1])\n\tif key == \"\" || val == \"\" {\n\t\treturn fmt.Errorf(\"header '%s' has a wrong format\", value)\n\t}\n\t// Add key/value directly to the http.Header (map[string][]string).\n\t// http.Header.Add() canonicalizes keys but vegeta is used\n\t// to test systems that require case-sensitive headers.\n\th.Header[key] = append(h.Header[key], val)\n\treturn nil\n}", "title": "" }, { "docid": "ae3fa8c40f91fe8be997edf1a4b9e8b0", "score": "0.69264716", "text": "func (options *GetTemplateActivityLogOptions) SetHeaders(param map[string]string) *GetTemplateActivityLogOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "fcd530f76ae7048b832c145e151d44bc", "score": "0.69226074", "text": "func (options *UpdateDevelopmentModeOptions) SetHeaders(param map[string]string) *UpdateDevelopmentModeOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "5fef3a910fd634ae77a54bf50c1f5910", "score": "0.69168633", "text": "func (r *StandardResponse) SetHeaders(headers http.Header) {\n\tr.headers = headers\n}", "title": "" }, { "docid": "cd6c7db5fc8076da751db259571d4037", "score": "0.6904254", "text": "func (options *CreateProviderGatewayOptions) SetHeaders(param map[string]string) *CreateProviderGatewayOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "f0e45eef0fbb11c418b1014e8d466318", "score": "0.68900734", "text": "func (self *Response) SetHeaders(responseHeaders interface{}) error {\n\tswitch responseHeaders.(type) {\n\tcase func(string, http.Request) map[string]string:\n\t\t//\n\t\t// Mock Response should have a Body Generator function registered?\n\t\t//\n\n\t\theadersGenerator := responseHeaders.(func(string, http.Request) map[string]string)\n\t\tself.headersGenerator = &headersGenerator\n\n\tcase map[string]string:\n\t\t//\n\t\t// Mock Response should have a static body registered?\n\t\t//\n\n\t\tself.headers = responseHeaders.(map[string]string)\n\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported headers type. SetHeaders() can receive either a static headers map (map[string]string) or a Headers Generator Callback with the signature 'func(string, http.Request) map[string]string', for a more detailed description kindly check Go Mock Yourself documentation\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "629e3f9dfbbbdc18ec303df348918b15", "score": "0.6887552", "text": "func (options *GetWorkspaceActivityOptions) SetHeaders(param map[string]string) *GetWorkspaceActivityOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "2e6f9cc8e4596f0d11d5d3e7d0956379", "score": "0.68816894", "text": "func (options *ListSharedDatasetsOptions) SetHeaders(param map[string]string) *ListSharedDatasetsOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "06e9f6864d24aa9078386988f77cdce6", "score": "0.6881359", "text": "func (h *HTTP) SetHeader(key, value string) {\n\th.headers[key] = value\n}", "title": "" }, { "docid": "e81298e28379fa8a79973e3bb4e24ea1", "score": "0.68796194", "text": "func (options *GetKmsSettingsOptions) SetHeaders(param map[string]string) *GetKmsSettingsOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "86192345d214ad8973eb420f1c443a81", "score": "0.68704236", "text": "func (options *PlanWorkspaceCommandOptions) SetHeaders(param map[string]string) *PlanWorkspaceCommandOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "fc256a66bccf7475a62a600c1683d1c1", "score": "0.68545324", "text": "func SetCallHeaders(ctx context.Context, outgoing *http.Request) {\n\tincoming := GetRequest(ctx)\n\tif incoming == nil {\n\t\treturn\n\t}\n\tif outgoing.Header == nil {\n\t\toutgoing.Header = http.Header{}\n\t}\n\toutgoing.Header.Set(\"X-Logjam-Caller-Id\", incoming.id)\n\toutgoing.Header.Set(\"X-Logjam-Action\", incoming.action)\n}", "title": "" }, { "docid": "0bdb5e2f67788b6192ac070b9c4e002d", "score": "0.6851189", "text": "func (api *Client) SetHeader(key, value string) *Client {\n\tapi.mu.Lock()\n\tdefer api.mu.Unlock()\n\tapi.headers[key] = value\n\treturn api\n}", "title": "" }, { "docid": "4f3557135726ca111ac509acc7b72290", "score": "0.6847197", "text": "func (options *ConfigureLoggingOptions) SetHeaders(param map[string]string) *ConfigureLoggingOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "1758b3de724caf4723119d5d3db1eb70", "score": "0.6845591", "text": "func (options *ReplaceKmsSettingsOptions) SetHeaders(param map[string]string) *ReplaceKmsSettingsOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "33fa5f644a274c4fbb19b8a81e7191be", "score": "0.6844381", "text": "func optHeaders(headers map[string]string) func(*fasthttp.Request, *fasthttp.Response) {\n\treturn func(request *fasthttp.Request, response *fasthttp.Response) {\n\t\tfor key, value := range headers {\n\t\t\trequest.Header.Set(key, value)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "a1dea57901894d3a0d075bc8aebdebcf", "score": "0.68345606", "text": "func SetAccessControlHeaders(c *gin.Context) {\n\tc.Header(\"Access-Control-Allow-Headers\", \"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\n\to := c.GetHeader(\"Origin\")\n\tif o != config.Get().PanelLocation {\n\t\tfor _, origin := range config.Get().AllowedOrigins {\n\t\t\tif origin != \"*\" && o != origin {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tc.Header(\"Access-Control-Allow-Origin\", origin)\n\t\t\tc.Next()\n\t\t\treturn\n\t\t}\n\t}\n\n\tc.Header(\"Access-Control-Allow-Origin\", config.Get().PanelLocation)\n\tc.Next()\n}", "title": "" }, { "docid": "812fd1bd5f0acaf5ac5756d6040b1a97", "score": "0.68316084", "text": "func (options *ApplyWorkspaceCommandOptions) SetHeaders(param map[string]string) *ApplyWorkspaceCommandOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "0a748a2d2e93078fee3d1cabcdeda67a", "score": "0.6820863", "text": "func (r *Request) SetHeader(k, v string) *Request {\n\tpanic(\"TODO\")\n\treturn r\n}", "title": "" }, { "docid": "4c67910cf1321091e7f3cfdb918f5c88", "score": "0.6820032", "text": "func (options *ListSourcesOptions) SetHeaders(param map[string]string) *ListSourcesOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "9584e6448c934e07771161244fba4cb4", "score": "0.68185306", "text": "func (options *UpdateProviderGatewayOptions) SetHeaders(param map[string]string) *UpdateProviderGatewayOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "4cbb90a734cbe7a1fde391b083171884", "score": "0.6818516", "text": "func (options *GetProviderPortOptions) SetHeaders(param map[string]string) *GetProviderPortOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "c2dc94bab937adbd39186b2a6c0385d5", "score": "0.68157333", "text": "func (options *GetSharedDatasetOptions) SetHeaders(param map[string]string) *GetSharedDatasetOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "1e019a749ef072c2ef6ba2516e415f1c", "score": "0.6811205", "text": "func SetContextHeaders(r *http.Request) {\n\tif r == nil {\n\t\treturn\n\t}\n\tif headers, ok := r.Context().Value(headersClientContextKey).(http.Header); ok && len(headers) > 0 {\n\t\tif r.Header == nil {\n\t\t\tr.Header = http.Header{}\n\t\t}\n\t\tCopyHeader(headers, r.Header)\n\t}\n}", "title": "" }, { "docid": "674078dddf488527d45ea5cf61bf87f8", "score": "0.68071115", "text": "func (service *Manager) SetHeader(header interface{}) {\n\tservice.client.AddHeader(header)\n}", "title": "" }, { "docid": "5889cc99d6bd1d8b02253efe604ea250", "score": "0.6805987", "text": "func (r *Request) SetHeader(key, val string) {\n\tr.Headers[key] = val\n}", "title": "" }, { "docid": "3a3ea1fe7958108d190adf79796556d7", "score": "0.68039095", "text": "func (options *CreateSharedDatasetOptions) SetHeaders(param map[string]string) *CreateSharedDatasetOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "3754ecdc0f2857034c00a7cf7f65b9f4", "score": "0.67949104", "text": "func (router Router) setUpHeaders(w http.ResponseWriter, r *http.Request) bool {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", router.Host)\n\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Header().Set(\"Access-Control-Max-Age\", \"120\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With\")\n\tif r.Method == http.MethodOptions {\n\t\tw.WriteHeader(200)\n\t\treturn false\n\t}\n\treturn true\n}", "title": "" }, { "docid": "45308c1585bb4e8b6acc72dff7786624", "score": "0.67895806", "text": "func (options *RefreshWorkspaceCommandOptions) SetHeaders(param map[string]string) *RefreshWorkspaceCommandOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "9558ebec6a7ffa479c35d010dbdad923", "score": "0.6786303", "text": "func (options *RunWorkspaceCommandsOptions) SetHeaders(param map[string]string) *RunWorkspaceCommandsOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "d1229a23b1a9eb09351ff6e94f298679", "score": "0.67854834", "text": "func (options *GetConfigOptions) SetHeaders(param map[string]string) *GetConfigOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "d5045b2107668680d387991ed0a64fc1", "score": "0.67834467", "text": "func (options *GetSourceOptions) SetHeaders(param map[string]string) *GetSourceOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "4f89c5649842e8251c5f127c9b7a9dfb", "score": "0.67826164", "text": "func (req *PatchJSONRequest) SetHeader(k, v string) {\n\treq.req.Header.Set(k, v)\n}", "title": "" }, { "docid": "31a8b7ea1ee56e0022c00f739675fd2a", "score": "0.6774983", "text": "func (options *UpdateSourceOptions) SetHeaders(param map[string]string) *UpdateSourceOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "37621c622973da6c2c81bd4e06cc10a3", "score": "0.677233", "text": "func (c *Context) SetHeader(key, value string) {\n\tc.W.Header().Set(key, value)\n}", "title": "" }, { "docid": "677e64c70957a9e1b78259bc94db4640", "score": "0.67701024", "text": "func (h *HTTPX) SetCustomHeaders(r *retryablehttp.Request, headers map[string]string) {\n\tfor name, value := range headers {\n\t\tr.Header.Set(name, value)\n\t\t// host header is particular\n\t\tif strings.EqualFold(name, \"host\") {\n\t\t\tr.Host = value\n\t\t}\n\t}\n}", "title": "" }, { "docid": "e519fd54320eba8ea8308651315ff130", "score": "0.676807", "text": "func (c *Context) setHead(key, value string) {\n\tc.f.Response.Header.Set(key, value)\n}", "title": "" }, { "docid": "82dce1e05a218d46e952497c55eb902e", "score": "0.6767906", "text": "func (options *GetProviderGatewayOptions) SetHeaders(param map[string]string) *GetProviderGatewayOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "64b4854cedfc3d50248640fd4111fa06", "score": "0.6761674", "text": "func (options *GetNotificationsRegistrationTestOptions) SetHeaders(param map[string]string) *GetNotificationsRegistrationTestOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "e06e8bc60f6fab7d1eb611d5bc2f60a6", "score": "0.67578405", "text": "func (this *SIPMessage) SetHeaders(headers *list.List) {\n\tfor listIterator := headers.Front(); listIterator != nil; listIterator = listIterator.Next() {\n\t\tsipHeader := listIterator.Value.(header.Header)\n\t\tthis.AttachHeader2(sipHeader, false)\n\t}\n}", "title": "" }, { "docid": "31b0d964e70d1cd020ab4098bf96b13d", "score": "0.67525774", "text": "func (options *CreateFilterOptions) SetHeaders(param map[string]string) *CreateFilterOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "8677b23a487601dd665ff1b8e3f033a5", "score": "0.6741195", "text": "func (this *Context) SetHeader(hdr string, val string, unique bool) {\n\tif unique {\n\t\tthis.Header().Set(hdr, val)\n\t} else {\n\t\tthis.Header().Add(hdr, val)\n\t}\n}", "title": "" }, { "docid": "5b1ba6c136ecf230dc779373e647e1a9", "score": "0.6737054", "text": "func (options *GetWorkspaceActivityLogsOptions) SetHeaders(param map[string]string) *GetWorkspaceActivityLogsOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "11791ed554ebb4ffb01a0e3e8d5a6fb5", "score": "0.6732543", "text": "func (options *ToneChatOptions) SetHeaders(param map[string]string) *ToneChatOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "cf6bf010ece4502b6559a13ca71f576d", "score": "0.6732388", "text": "func setHeader(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Server\", \"WebServer\")\n\tw.Header().Set(\"Content-Type\", \"text/html\")\n\tw.Header().Set(\"Cache-Control\", \"no-cache, private, max-age=0\")\n\tw.Header().Set(\"Expires\", time.Unix(0, 0).Format(http.TimeFormat))\n\tw.Header().Set(\"Pragma\", \"no-cache\")\n\tw.Header().Set(\"X-Accel-Expires\", \"0\")\n}", "title": "" }, { "docid": "a5461d06aba678494963c988206520bb", "score": "0.67269784", "text": "func (c *Client) SetHeader(key, value string) {\n\tc.Headers[key] = value\n}", "title": "" }, { "docid": "7268eae0ccc07539837804ac3a986a16", "score": "0.6719003", "text": "func (options *GetEventNotificationsIntegrationOptions) SetHeaders(param map[string]string) *GetEventNotificationsIntegrationOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "1a83abf420cb4cdedc40f4b919b6e840", "score": "0.6710401", "text": "func (options *CreateActionOptions) SetHeaders(param map[string]string) *CreateActionOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "6eb6904c213ed5c47f5944098586f7ac", "score": "0.67088187", "text": "func (l *Lambda) ResponseHeaderSet(header, value string) {\n\tl.w.Header().Set(header, value)\n}", "title": "" }, { "docid": "66b4800bfbe492caaba16351573e416a", "score": "0.6701626", "text": "func (c *Context) SetHeader(key string, value string) {\n\tc.Writer.Header().Set(key, value)\n}", "title": "" }, { "docid": "e8807b27a478a4479c230f5470aaf872", "score": "0.6698927", "text": "func (options *TestNotificationChannelOptions) SetHeaders(param map[string]string) *TestNotificationChannelOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "9322adb8098ed5d91382d8647aaf3b8f", "score": "0.66970056", "text": "func setAccessControlResponseHeaders (w http.ResponseWriter, req *http.Request) {\n\tif origin := req.Header.Get(\"Origin\"); origin != \"\" {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\",\n\t\t\t\"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t}\n}", "title": "" }, { "docid": "a60906b1be1726e0c29bc80735dc4849", "score": "0.66968024", "text": "func (options *ListKubeconfigOptions) SetHeaders(param map[string]string) *ListKubeconfigOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "8735becfe50855f8006dcfed5936cdde", "score": "0.66934085", "text": "func (options *GetWorkspaceReadmeOptions) SetHeaders(param map[string]string) *GetWorkspaceReadmeOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "ee878580e81283f3276a7bd1286ea4ae", "score": "0.6693247", "text": "func (options *UpdatePrivateEndpointWhitelistOptions) SetHeaders(param map[string]string) *UpdatePrivateEndpointWhitelistOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "0b7156f8c1c55ae3b9f96825c533ff15", "score": "0.66908556", "text": "func (m *MailYak) SetHeader(name, value string) {\n\tm.headers[m.trimRegex.ReplaceAllString(name, \"\")] = []string{mime.QEncoding.Encode(\"UTF-8\", m.trimRegex.ReplaceAllString(value, \"\"))}\n}", "title": "" }, { "docid": "0e43030c7766ad1e7560792859065918", "score": "0.6688021", "text": "func (options *PostTestEventNotificationOptions) SetHeaders(param map[string]string) *PostTestEventNotificationOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "b0083411400b82e3d12caef0d5668234", "score": "0.66869855", "text": "func (options *GetSchematicsJobOptions) SetHeaders(param map[string]string) *GetSchematicsJobOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "71a25551469cfd07576939822694ea3d", "score": "0.6685865", "text": "func (options *ListEnterprisesOptions) SetHeaders(param map[string]string) *ListEnterprisesOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "18e67f4c518ae2180a98cdb1e3199880", "score": "0.6684581", "text": "func (b binder) setFromHeaders() HTTPError {\n\tfor k, values := range b.req.Header {\n\t\tk = strings.ToLower(k)\n\t\tfor _, v := range values {\n\t\t\tif err := b.setField(k, v, ParamSourceHeader); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "3cc28673f284a063630f682d69d3c765", "score": "0.6683813", "text": "func (options *InstallConfigOptions) SetHeaders(param map[string]string) *InstallConfigOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "237e3cbe20bcfc7724350497c6499ce0", "score": "0.66830605", "text": "func (options *CreateWorkspaceOptions) SetHeaders(param map[string]string) *CreateWorkspaceOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "c992c9dfff35d3d06e5d2f7bc749f066", "score": "0.66824776", "text": "func (options *ListProviderPortsOptions) SetHeaders(param map[string]string) *ListProviderPortsOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" }, { "docid": "6e6315d274151f2513fdec59da250ba7", "score": "0.6682445", "text": "func (options *ReplaceSharedDatasetOptions) SetHeaders(param map[string]string) *ReplaceSharedDatasetOptions {\n\toptions.Headers = param\n\treturn options\n}", "title": "" } ]
2c782a4adf1fd0602b7b20604ce6fbec
Stop stops the middleware and returns any occurred error, nil otherwise
[ { "docid": "632253f10f7103152bf63ca517504899", "score": "0.76039684", "text": "func (m *Middleware) Stop() error {\n\tm.Started = false\n\treturn nil\n}", "title": "" } ]
[ { "docid": "d649f133161c6450b3b5ddd599fa5d35", "score": "0.75422335", "text": "func (f UnaryInboundMiddlewareFunc) Stop(testing.TB) error { return nil }", "title": "" }, { "docid": "5341a71a09d4353f825ba5d444279ff1", "score": "0.7278031", "text": "func (r *Router) Stop(_ context.Context) error { return nil }", "title": "" }, { "docid": "0288ec5f26630be28f1e14d1e90c4d3b", "score": "0.6730158", "text": "func Stop(err error) error {\n\treturn stop{err}\n}", "title": "" }, { "docid": "e03c502322d4f398b679e9499637ad96", "score": "0.65740496", "text": "func (c GenNumMiddleware) Stop() {\n\tc.downstreamCache.Stop()\n}", "title": "" }, { "docid": "e03c502322d4f398b679e9499637ad96", "score": "0.65740496", "text": "func (c GenNumMiddleware) Stop() {\n\tc.downstreamCache.Stop()\n}", "title": "" }, { "docid": "2d89e6ba2ddefb9a2f1061a9081bace6", "score": "0.6551074", "text": "func (fn ServerFunc) Stop(error) {}", "title": "" }, { "docid": "73a60b1632a566f053f93f6478a453a1", "score": "0.6402632", "text": "func (f UnaryHandlerFunc) Stop(testing.TB) error { return nil }", "title": "" }, { "docid": "012793ecc22e14733bb2d03713668d57", "score": "0.628334", "text": "func Stop() error {\n\treturn nil\n}", "title": "" }, { "docid": "1966c009e61ef508dda32f178fedfb98", "score": "0.6271251", "text": "func (rest *REST) Stop() error {\n\trest.cancelFunc()\n\treturn rest.srv.Shutdown(rest.ctx)\n}", "title": "" }, { "docid": "61c3121c645332ac36f41b46bd2e3e68", "score": "0.6250109", "text": "func (s *ContextRouter) Stop() {\n\tif s.router != nil {\n\t\ts.cancelfunc()\n\t}\n\ts.Lock()\n\ts.context, s.cancelfunc = context.WithCancel(context.Background())\n\ts.Unlock()\n}", "title": "" }, { "docid": "592e0b1edd5a22858d2465e8dc28c150", "score": "0.623876", "text": "func Stop() {\n\ts.httpSrv.Shutdown(context.Background())\n\n}", "title": "" }, { "docid": "82145ee74efe35b11d3b23ef7ffb6b28", "score": "0.62247866", "text": "func (d *diagHandler) Stop() {}", "title": "" }, { "docid": "5b843c3bfa194325a40adef9d1e91d12", "score": "0.61868656", "text": "func (receiver *WebReceiver) Stop(int) {\n\tgo func() {\n\t\treceiver.log.Info(\"Shutting down HTTP server\")\n\t\treceiver.stopping = true\n\t\treceiver.server.Shutdown(nil)\n\t\treceiver.stopping = false\n\t\treceiver.stopSignal.Broadcast()\n\t\treceiver.log.Info(\"HTTP server safely shutdown\")\n\t}()\n\treturn\n}", "title": "" }, { "docid": "83c18c6c0614be76559f8de1ff6c7f22", "score": "0.61775404", "text": "func Stop(err error) error {\n\treturn terminalError{err}\n}", "title": "" }, { "docid": "8f7e8735b8a37ec6bf13ed2c53ba0c01", "score": "0.61219907", "text": "func (c *Client) Stop() goa.Endpoint {\n\tvar (\n\t\tdecodeResponse = DecodeStopResponse(c.decoder, c.RestoreResponseBody)\n\t)\n\treturn func(ctx context.Context, v interface{}) (interface{}, error) {\n\t\treq, err := c.BuildStopRequest(ctx, v)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresp, err := c.StopDoer.Do(req)\n\n\t\tif err != nil {\n\t\t\treturn nil, goahttp.ErrRequestError(\"stress\", \"stop\", err)\n\t\t}\n\t\treturn decodeResponse(resp)\n\t}\n}", "title": "" }, { "docid": "163bc7f6748a90be39dc186f51b15346", "score": "0.6104591", "text": "func (r *Router) Stop(ctx context.Context) error {\n\tatomic.StoreInt32(r.shutdownInvoked, 1)\n\treturn r.r.Shutdown(ctx)\n}", "title": "" }, { "docid": "71a4f5df3ae3dc81722718ee3dec2fe9", "score": "0.6102506", "text": "func (m *Middleware) Stop(cw management.CommandWriter) error {\n\t_, err := cw.SingleLineCommand(\"bytecount %v\", 0)\n\treturn err\n}", "title": "" }, { "docid": "00f579c55be0262f474a0b43116b2bc9", "score": "0.6099206", "text": "func (p *Provider) Stop(context.Context) error {\n\treturn p.err\n}", "title": "" }, { "docid": "4a30951311e2ab972afad03216ebc40c", "score": "0.60882473", "text": "func (r *Router) Stop(ray xray.Ray) error {\n\tclose(r.Delivery)\n\treturn nil\n}", "title": "" }, { "docid": "9082317d0c00cf07ec8e829a1ba91e6f", "score": "0.60875994", "text": "func (s *T) Stop() {\n\ts.httpServer.Shutdown(context.Background())\n\ts.wg.Wait()\n\tclose(s.errorCh)\n}", "title": "" }, { "docid": "f79b16a309f37af92777773c09401a19", "score": "0.60754925", "text": "func (s *WebService) Stop() error {\n\t// This is not crucial so errors are ignored\n\ts.insecureServer.Close()\n\n\treturn s.server.Shutdown(context.Background())\n}", "title": "" }, { "docid": "bc9f3f2719859b57551800246104b482", "score": "0.60734123", "text": "func Stop() (err error) {\n\terr = stop()\n\treturn\n}", "title": "" }, { "docid": "1eabc595a7e6d09d990f1ba45b2e3cd1", "score": "0.6041416", "text": "func (h *JsonHttpHandler) Stop() {\n\treturn\n}", "title": "" }, { "docid": "d13277b5f336fc167a8977539b8e2bb2", "score": "0.60266644", "text": "func (e *FunctionWorker) Stop(ctx context.Context) error {\n\tif e.StopFunc == nil {\n\t\treturn nil\n\t}\n\treturn e.StopFunc(ctx)\n}", "title": "" }, { "docid": "3dd7795e75e70f850330245d2f1cdc78", "score": "0.60125774", "text": "func (r *Runner) Stop(ctx context.Context) error {\n\treturn r.stop(ctx, nil)\n}", "title": "" }, { "docid": "b635d4735b93ce09ab1dd99a44520e91", "score": "0.6003778", "text": "func (e Endpoints) Stop(ctx context.Context, request StopRequest) (string, error) {\n\tresponse, err := e.StopEndpoint(ctx, request)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tresp := response.(StopResponse)\n\treturn \"\", resp.Err\n}", "title": "" }, { "docid": "8fa75a5ef25e0b8be798158e9f44e4bd", "score": "0.60001576", "text": "func (server *httpsserver) Stop() error {\n\tlog.Println(\"Closing...\")\n\treturn server.srv.Close()\n}", "title": "" }, { "docid": "7e9c7cc58d104123165ddc52f305b82d", "score": "0.5992635", "text": "func (t *jsTracer) Stop(err error) {\n\tt.vm.Interrupt(err)\n}", "title": "" }, { "docid": "d196a6b6addeb1fdfd57b61a92b83a9b", "score": "0.5977072", "text": "func (d *customActionHandler) Stop() {}", "title": "" }, { "docid": "68a0c9a06cd0cdf1480e2a488b4d8fee", "score": "0.59752464", "text": "func (bm *Manager) Stop() error {\n\tbm.cancelHanlders()\n\treturn nil\n}", "title": "" }, { "docid": "8a0c9b5223edf463d371343a5e06297b", "score": "0.5970512", "text": "func (instance *Instance) Stop() {\n\tinstance.Logger.Info(\"Shutting down the HTTP Server\")\n}", "title": "" }, { "docid": "7f37e41774e0ade68c57dd17659e58de", "score": "0.5964044", "text": "func (a *appsec) stop() {\n\tif a.started {\n\t\ta.started = false\n\t\ta.unregisterWAF()\n\t\ta.limiter.Stop()\n\t}\n}", "title": "" }, { "docid": "baae824ef7b70e46bb0e9d4cd1af0759", "score": "0.5956622", "text": "func (con *HTTPConnector) Stop(ctx context.Context) error {\n\treturn con.httpServer.Close()\n}", "title": "" }, { "docid": "aa8a0e4ad6cb6da436decc9800589f0b", "score": "0.5951905", "text": "func (lv *Liveview) Stop() {\n\tlv.HTTPResponse.Body.Close()\n\tlv.Camera.StopLiveview()\n}", "title": "" }, { "docid": "b3a56d2f533618ff8b34e10fabc222ad", "score": "0.5951568", "text": "func (w *WebServer) Stop() {\n\tmanners.Close()\n\t<-w.stopped\n}", "title": "" }, { "docid": "82dc050ff72f1169858684395845d507", "score": "0.5950842", "text": "func (p *Provider) Stop() error {\n\treturn p.network.StopHandlingRequests()\n}", "title": "" }, { "docid": "fdf2ac10914dbb8af19df5d21fe89f60", "score": "0.59476584", "text": "func (m *resInjectorHandler) Stop() {\n\tclose(m.agent.Responses)\n}", "title": "" }, { "docid": "1593c9d270c0d77d7e6260a527522228", "score": "0.594108", "text": "func (r *ReverseProxy) Stop() {\n\tmanners.Close()\n}", "title": "" }, { "docid": "1e64f8894848270de39c4392e1501626", "score": "0.5940833", "text": "func Stop() {\n\tdefer srv.httpServer.Shutdown(context.Background())\n}", "title": "" }, { "docid": "40633b0667ff50074ae45f39410492e6", "score": "0.5938816", "text": "func Stop(ctx context.Context) {\n\tif err := pipeline.Stop(ctx); err != nil {\n\t\tlog.Println(err)\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "85b8214741cc4f8af6bc7ee40b929591", "score": "0.591602", "text": "func Stop() (result Result, err error) {\n\tresult = stopResult()\n\treturn\n}", "title": "" }, { "docid": "fe003a573ad09a6add0f4d03696042e3", "score": "0.59129417", "text": "func (g *Gateway) Stop() {\n\t_ = g.server.Close()\n\t_ = g.logClient.Close()\n}", "title": "" }, { "docid": "de1446aa31c06e8fe134d54a20df0f11", "score": "0.59025556", "text": "func (r *REST) Stop() {\n\tlogrus.Warn(\"Stopping REST server..\")\n\terr := r.server.Shutdown(context.TODO())\n\tif err != nil {\n\t\tlogrus.Error(\"Error stopping server: \", err)\n\t}\n}", "title": "" }, { "docid": "a8280aeac2ab3e994a035db32cd7c26c", "score": "0.5899464", "text": "func (h *Handler) Stop() {\n\tclose(h.agent.Responses)\n}", "title": "" }, { "docid": "7c606f01f11218ec51af0c189b6e6e00", "score": "0.5894124", "text": "func (rec *RoundTripRecorder) Stop() {\n\tatomic.StoreInt32(&rec.stopped, 1)\n}", "title": "" }, { "docid": "c394020ae81b0f284a2785a9e5331a7d", "score": "0.5893425", "text": "func (ws *WebService) Stop() error {\n\treturn ws.server.Shutdown(context.Background())\n}", "title": "" }, { "docid": "88b63b6e85eb02409ad2871090280877", "score": "0.5885147", "text": "func (Timer) Stop(*error) {}", "title": "" }, { "docid": "ea272d1bab6a9872e5b96530b6c1be5b", "score": "0.5884885", "text": "func (_e *MockController_Expecter) Stop() *MockController_Stop_Call {\n\treturn &MockController_Stop_Call{Call: _e.mock.On(\"Stop\")}\n}", "title": "" }, { "docid": "7dd8aae58377abcf8a797ee502ded602", "score": "0.58757246", "text": "func (r *REST) Stop() error {\n\tvar err error // error holder\n\tctxShutDown, cancel := context.WithTimeout(context.Background(), 25*time.Second)\n\tdefer func() {\n\t\tcancel()\n\t}()\n\n\tif err = r.server.Shutdown(ctxShutDown); err != nil {\n\t\tlog.Error().Err(err).Msg(\"server shutdown failed\")\n\n\t\treturn err\n\t}\n\n\tlog.Info().Msg(\"Server stopped properly\")\n\tif err == http.ErrServerClosed {\n\t\terr = nil\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "da355d1f54e6b57eacc3029cc00043ba", "score": "0.58751595", "text": "func (r *Router) Stop() {\n\tr.closeChnl <- true\n\tr.running = false\n}", "title": "" }, { "docid": "fd4e932cf470a81feed2c412455d498d", "score": "0.5870649", "text": "func (h *htlcSuccessResolver) Stop() {\n\tclose(h.quit)\n}", "title": "" }, { "docid": "1a75052ee242b42cf7b3180974f9bc77", "score": "0.58610404", "text": "func (s *Server) Stop() (err error) {\n\tctx, _ := context.WithTimeout(context.Background(), 60*time.Second)\n\tlog.Debugf(\"Stopping frontend with ctx: %s\", ctx)\n\tvar returnErr error\n\terr = s.server.Shutdown(ctx)\n\tif err != nil {\n\t\ts.sentry.CaptureErrorAndWait(err, map[string]string{\"stopping\": \"frontend server\"})\n\t\treturnErr = err\n\t}\n\terr = s.database.Close()\n\tif err != nil {\n\t\ts.sentry.CaptureErrorAndWait(err, map[string]string{\"stopping\": \"frontend database\"})\n\t\treturnErr = err\n\t}\n\treturn returnErr\n}", "title": "" }, { "docid": "9564c7f541399c86600d9c2b0dd9e3a3", "score": "0.5829383", "text": "func (module *HTTPNotifier) Stop() error {\n\treturn nil\n}", "title": "" }, { "docid": "18bd61041be93986592c42ebc88a40f9", "score": "0.58216757", "text": "func Stop() error {\n\tif server == nil {\n\t\treturn nil\n\t}\n\n\treturn server.Shutdown()\n}", "title": "" }, { "docid": "9d301bae66e58db23f1a0040c578a3c6", "score": "0.5811385", "text": "func stopRequestHandler(w http.ResponseWriter, r *http.Request) {\n\tif r.Body == nil {\n\t\thttp.Error(w, \"invalid request, no content\", 500)\n\t\treturn\n\t}\n\n\tvar msg Message\n\terr := json.NewDecoder(r.Body).Decode(&msg)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), 500)\n\t\treturn\n\t}\n\n\tflowName := msg.FlowName\n\trequestID := msg.RequestID\n\n\terr = stopRequest(flowName, requestID)\n\tif err != nil {\n\t\tlog.Printf(\"failed to stop request %s for %s, error: %v\",\n\t\t\trequestID, flowName, err)\n\t\thttp.Error(w, fmt.Sprintf(\"failed to stop request %s for %s, error: %v\",\n\t\t\trequestID, flowName, err), http.StatusInternalServerError)\n\t}\n\n\tw.Write([]byte(\"\"))\n\treturn\n}", "title": "" }, { "docid": "b4fd233441deff7d3e63a0f32a9e673f", "score": "0.5806082", "text": "func Stop(w rest.ResponseWriter, r *rest.Request) {\n\tdefer close(QuitChain)\n\tdefer os.Exit(0)\n\t//test only\n\tAPI.Stop()\n\tw.Header().Set(\"Content-Type\", \"text/plain\")\n\t_, err := w.(http.ResponseWriter).Write([]byte(\"ok\"))\n\tif err != nil {\n\t\tlog.Warn(fmt.Sprintf(\"writejson err %s\", err))\n\t}\n}", "title": "" }, { "docid": "32a982ba7493a5f9ed12cf6b609d4e83", "score": "0.5804498", "text": "func (e *ToutLoop) Stop() {\n\tclose(e.requests)\n\te.wg.Wait()\n}", "title": "" }, { "docid": "340c6557aa7f5ce45269bc71645231af", "score": "0.58018243", "text": "func (a *Application) Stop() {\n\tvar err error\n\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tdefer cancel()\n\n\tif err = a.HTTPServer.Shutdown(ctx); err != nil {\n\t\ta.Logger.WithError(err).Errorf(\"There was an error shutting down the server\")\n\t}\n}", "title": "" }, { "docid": "e8315de05d514ee7472b131c2e98d5ad", "score": "0.5799349", "text": "func (p *provider) Stop(ctx context.Context) {\n\tif p.server == nil {\n\t\treturn\n\t}\n\n\tp.Lock()\n\tdefer p.Unlock()\n\tlog.WithField(\"name\", services.SpName).Info(\".Stop\")\n\tp.server.Stop()\n\tp.closed = true\n}", "title": "" }, { "docid": "c98598c07d9ea4451d55640e9c83d142", "score": "0.5797931", "text": "func (api *Api) Stop() error {\n\tprintln(\"Stopping the API server...\")\n\treturn nil\n}", "title": "" }, { "docid": "87f356155624c059294e623b1e5e8484", "score": "0.5783207", "text": "func (app *App) Stop() error {\n\treturn app.sever.Close()\n}", "title": "" }, { "docid": "020d23385d3b3572b36a787be9afb805", "score": "0.57796735", "text": "func (fwdr *StreamForwarder) Stop() error {\n\tfwdr.stream.Close()\n\tfwdr.conn.Close()\n\treturn nil\n}", "title": "" }, { "docid": "dcde75b4541054d62bac9b6df898af7d", "score": "0.5778061", "text": "func (s *Server) Stop() error {\n\treturn nil\n}", "title": "" }, { "docid": "adf8cb6c396cb227adb7f038d0c0a7e5", "score": "0.57759196", "text": "func (s *Server) Stop() error {\n\t// We only close the reader because we want the writer to be available if\n\t// Stop() was called by a request handler which still needs to send back a\n\t// response (goodbye()). The writer will be closed automatically when the\n\t// loop exits.\n\tif s.running.Swap(false) {\n\t\treturn s.r.Close()\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "e4eab901a161aa7cfd3804b48d067948", "score": "0.57652736", "text": "func (s *Server) Stop() error {\n\t// The context is used to inform the server it has 5 seconds to finish\n\t// the request it is currently handling\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\tif err := s.server.Shutdown(ctx); err != nil {\n\t\treturn fmt.Errorf(\"server forced to shutdown: %v\", err)\n\t}\n\n\tlog.Info(\"Server Exiting\")\n\n\treturn nil\n}", "title": "" }, { "docid": "fe8238ec5d473ff80cedbd2e14df6391", "score": "0.57607716", "text": "func (p *Pipeline) Stop() {\n\tC.gstreamer_recordwebm_stop_pipeline(p.Pipeline)\n}", "title": "" }, { "docid": "2758d4c28914cd66fc4d523a940c5953", "score": "0.5755783", "text": "func Stop() {}", "title": "" }, { "docid": "6bf34dd2dcf50a292b951c4022b995e6", "score": "0.5755703", "text": "func (core *coreService) Stop(_ context.Context) error {\n\treturn core.chainListener.Stop()\n}", "title": "" }, { "docid": "2a6ea14db169d27bccb4d3b2308dedee", "score": "0.57549876", "text": "func (p *pipeline) Stop() {\n\tp.stop <- struct{}{}\n}", "title": "" }, { "docid": "a40d1bc059b70d78d5730488eedec550", "score": "0.5751071", "text": "func (d *mockProvider) Stop() {}", "title": "" }, { "docid": "1cf70f0aa77ed3188d74950763b11ad4", "score": "0.5747629", "text": "func stopWebserver(w http.ResponseWriter, r *http.Request) {\n startStop <- \"stop\"\n}", "title": "" }, { "docid": "41bfc9589b138ad0c915b4f95aed5c94", "score": "0.574756", "text": "func (a *App) Stop() error {\n\n\t// stop the http server\n\terr := a.Server.Stop()\n\tif err != nil {\n\t\tizap.Logger.Fatal(\"Backend http server stop\", zap.Error(err))\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "9c003f5af6ebbde18d17e15035cf4f5e", "score": "0.57443845", "text": "func (f *FakeServer) Stop() {}", "title": "" }, { "docid": "9ea5c9525deb5551c6e01a8dd55dc335", "score": "0.5737714", "text": "func (s server) Stop() error {\n\tfmt.Println(\"Stopping\")\n\treturn s.server.Shutdown(context.TODO())\n}", "title": "" }, { "docid": "f331a4a4257d4c5a066877ad597e20e2", "score": "0.57356924", "text": "func (_m *MockController) Stop() {\n\t_m.Called()\n}", "title": "" }, { "docid": "2d0b028505f0f501d869933f63f06e19", "score": "0.5734734", "text": "func (r *Recorder) Stop() error {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\n\tif r.sw == nil {\n\t\treturn nil\n\t}\n\n\t// Finalize our recorded file.\n\terr := r.sw.Close()\n\tr.sw = nil\n\n\t// Propagate our receive error, if Close didn't return an error.\n\tif err == nil {\n\t\terr = r.recvErr\n\t}\n\tr.recvErr = nil\n\n\trecorderRecordingGauge.Dec()\n\treturn err\n}", "title": "" }, { "docid": "53a1825d833a855143ea68352073f1b3", "score": "0.57329243", "text": "func (pipeline *Pipeline) Stop() {\n\tendpoints := pipeline.source.Endpoints()\n\tpipeline.source.Stop()\n\n\t// pipeline has stopped, emit one last round of metrics and send the exit event\n\tclose(pipeline.done)\n\tpipeline.emitMetrics()\n\tpipeline.source.pipe.Event <- events.NewExitEvent(time.Now().UnixNano(), pipeline.version, endpoints)\n\tpipeline.emitter.Stop()\n\tclose(pipeline.source.pipe.Err)\n}", "title": "" }, { "docid": "639406be9a8ed9df7d69bc042c44dce7", "score": "0.5731897", "text": "func Stop() {\n\terr := srv.httpServer.Shutdown(context.Background())\n\tif err != nil {\n\t\tlog.Errorf(\"Unexpected error while shutting down HTTP server - %s\", err)\n\t}\n\tdefer runCancel()\n}", "title": "" }, { "docid": "639406be9a8ed9df7d69bc042c44dce7", "score": "0.5731897", "text": "func Stop() {\n\terr := srv.httpServer.Shutdown(context.Background())\n\tif err != nil {\n\t\tlog.Errorf(\"Unexpected error while shutting down HTTP server - %s\", err)\n\t}\n\tdefer runCancel()\n}", "title": "" }, { "docid": "220d5c9c0606ae755e0032b6a6cbd7c0", "score": "0.5731813", "text": "func (wh *Webhook) Stop() {\n\tif err := wh.Server.Close(); err != nil {\n\t\tlogger.Sugar().Warnf(\"[Webhook] Stop webhook cp-server failed. %v\", err.Error())\n\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "0c4d6f08033d0a2abe3162fc15f4398c", "score": "0.5730444", "text": "func (h *Consumer) Stop() error {\n\tlogger.Logf(\"HTTP consumer on port %d stopping\", h.port)\n\treturn h.listener.Close()\n}", "title": "" }, { "docid": "d84511330f93530d6698834af6d22288", "score": "0.57301223", "text": "func (p *ProcessLogger) Stop() error {\n\tp.cancel()\n\n\tselect {\n\tcase err := <-p.errors:\n\t\treturn err\n\tdefault:\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "5d0fae84023830fbf0a7dda28d1cbb8c", "score": "0.57267225", "text": "func (chain *BlockChain) Stop() {\n\tchain.proc.Close()\n}", "title": "" }, { "docid": "7a99c3bdc6e4874b080d067abcc495a8", "score": "0.5721864", "text": "func (con *BaseModuleController) Stop(c *Context) {\n\tif con.Next != nil {\n\t\tcon.Next.Stop(c)\n\t}\n}", "title": "" }, { "docid": "83d084cec56c47ddf586fdce7b11634e", "score": "0.57217646", "text": "func (t *Trace) Stop() error {\n\tif t.err != nil {\n\t\treturn t.err\n\t}\n\n\tclose(t.stop)\n\terr := <-t.stopped\n\t// TODO(fg) does the trace format support writing error messages? if yes,\n\t// we should probably attempt to write the error to the file as well.\n\tif finishErr := t.enc.Finish(); finishErr != nil && err == nil {\n\t\terr = finishErr\n\t}\n\n\tif err != nil {\n\t\tt.err = err\n\t} else {\n\t\t// To be returned if Stop() is called more than once.\n\t\tt.err = errors.New(\"tracer is already stopped\")\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "4d49e25dd00dc87b0f4f5a1d2c5be609", "score": "0.5721229", "text": "func (s *Server) Stop(ctx context.Context) error {\n\treturn s.srv.Stop(ctx)\n}", "title": "" }, { "docid": "4d49e25dd00dc87b0f4f5a1d2c5be609", "score": "0.5721229", "text": "func (s *Server) Stop(ctx context.Context) error {\n\treturn s.srv.Stop(ctx)\n}", "title": "" }, { "docid": "2516258f8e20994e0b8f8cdc3c57a653", "score": "0.57153094", "text": "func (r *runner) Stop(context.Context) error {\n\tclose(r.quit)\n\treturn nil\n}", "title": "" }, { "docid": "c51209cac07ac45fc1298c4416b5eefb", "score": "0.5713569", "text": "func (c *RaftController) Stop() {\n\tif !c.running {\n\t\treturn\n\t}\n\tc.grpcServer.Stop()\n}", "title": "" }, { "docid": "b09f928972aa12e0d075e59b2f4edf3e", "score": "0.57135355", "text": "func (s *Service) Stop(ray xray.Ray) error {\n\ts.lock.Lock()\n\tdefer s.lock.Unlock()\n\n\tif s.http == nil {\n\t\treturn nil\n\t}\n\n\tray = ray.WithLogger(\"http-service\")\n\tray.Debug(\"Starting service :name\", args.Name(s.GetName()))\n\n\ts.http.Close()\n\ts.tcp.Close()\n\ts.http = nil\n\ts.tcp = nil\n\treturn nil\n}", "title": "" }, { "docid": "227916a74be37834324a8af6c9e3f77a", "score": "0.5706933", "text": "func (dao *blockDAO) Stop(ctx context.Context) error { return dao.lifecycle.OnStop(ctx) }", "title": "" }, { "docid": "9f38c525d8c7b4b856f5b7a4f190160b", "score": "0.57033837", "text": "func (bot *Engine) Stop() {\n\tbot.cancelFunc()\n}", "title": "" }, { "docid": "d433a7576983020db363066b8bf9ca9e", "score": "0.5699871", "text": "func (d *Diagnosis) Stop() {\n\tif d.server != nil {\n\t\t_ = d.server.Close()\n\t\td.log.Info(\"aah go diagnosis server shutdown successfully\")\n\t}\n\t// stop the profilers for file mode and close the file descriptors\n}", "title": "" }, { "docid": "7ddb488be6eb59df86bcdd7d36367e3e", "score": "0.56960523", "text": "func (s *Application) Stop() {\n\tif s.cfg.ExternalServer {\n\t\treturn\n\t}\n\ts.server.Stop()\n}", "title": "" }, { "docid": "7ddb488be6eb59df86bcdd7d36367e3e", "score": "0.56960523", "text": "func (s *Application) Stop() {\n\tif s.cfg.ExternalServer {\n\t\treturn\n\t}\n\ts.server.Stop()\n}", "title": "" }, { "docid": "0b21475fea7674f21808a0947c38f922", "score": "0.56953114", "text": "func (m *Stream) Stop(_ context.Context) error {\n\tm.hk.RemoveHook(hook.C2SStreamElementReceived, m.onElementRecv)\n\tm.hk.RemoveHook(hook.C2SStreamElementSent, m.onElementSent)\n\tm.hk.RemoveHook(hook.C2SStreamDisconnected, m.onDisconnect)\n\tm.hk.RemoveHook(hook.C2SStreamTerminated, m.onTerminate)\n\n\tlog.Infow(\"Stopped stream module\", \"xep\", XEPNumber)\n\treturn nil\n}", "title": "" }, { "docid": "0a0405c4c565e90f462a16192dc0b54f", "score": "0.5695018", "text": "func (s *TankSession) stop() (err error) {\n\tif s.Tank.Url == \"\" || s.Tank == nil {\n\t\terr = errors.New(\"session needs to have a tank\")\n\t\tlog.Println(err)\n\t\ts.setFailed([]string{err.Error()})\n\t\treturn\n\t}\n\tif s.Name == \"\" {\n\t\terr = errors.New(\"session has to have a name to stop\")\n\t\tlog.Println(err)\n\t\ts.setFailed([]string{err.Error()})\n\t\treturn\n\t}\n\tresp, err := http.Get(fmt.Sprintf(\"%v/stop?session=%v\", s.Tank.Url, s.Name))\n\tif err != nil {\n\t\terr = errors.New(fmt.Sprintf(\"http.POST failed: %v\", err))\n\t\tlog.Println(err)\n\t\ts.setFailed([]string{err.Error()})\n\t\treturn\n\t}\n\tdefer resp.Body.Close()\n\t_, err = checkResponseCode(*resp)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\ts.setFailed([]string{err.Error()})\n\t\treturn\n\t}\n\t//wait for session to reach \"finished\" stage\n\tfailed, failures := s.isFailed()\n\tif failed {\n\t\ts.setFailed(failures)\n\t\treturn errors.New(fmt.Sprintf(\"stopping session %v@%v failed %v\", s.Name, s.Tank.Url, s.Failures))\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "eb69575ae642d65ea0b1589a47f0b472", "score": "0.5694507", "text": "func (r *Reactor) OnStop() {}", "title": "" }, { "docid": "ba6eb8b1d73af13b67c940e20759665b", "score": "0.5690764", "text": "func (_ *testServer) Stop() {}", "title": "" }, { "docid": "3e991289cab1dc35422fda1c5bf78d5e", "score": "0.5690479", "text": "func (server *Server) Stop() error {\n\treturn nil\n}", "title": "" } ]
896aff33c72dd819a46cedcd609e4786
Deprecated: Do not use.
[ { "docid": "f0d21ca8808322d9c9cae02fdc943509", "score": "0.0", "text": "func (m *HttpConnectionManager) GetIdleTimeout() *types.Duration {\n\tif m != nil {\n\t\treturn m.IdleTimeout\n\t}\n\treturn nil\n}", "title": "" } ]
[ { "docid": "6b8a9170e289754ab6680d551a766247", "score": "0.57517385", "text": "func Exposed() {}", "title": "" }, { "docid": "c20851ef58fc10ac74f73b76d5dd41c3", "score": "0.56714225", "text": "func DeprecatedFunction() {\n}", "title": "" }, { "docid": "b57653ce8be0f247febab5cb40fe1805", "score": "0.5210844", "text": "func CGBCompatibility() {}", "title": "" }, { "docid": "0351c435cdb59a58645628a8526fadea", "score": "0.5139657", "text": "func Example_implementing() {}", "title": "" }, { "docid": "8f9f5a9ed8de66457d455e11a0c6b666", "score": "0.51286066", "text": "func ExamplePuffer() {}", "title": "" }, { "docid": "fc0a410804fff27646c946f8f298464f", "score": "0.4985175", "text": "func DeprecatedFeature(err error) *pq.Error {\n\tif pqerr, ok := err.(*pq.Error); ok &&\n\t\tpqerr.Code == \"01P01\" {\n\t\treturn pqerr\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "cd23a79484d5e707cb981c14979bd1a7", "score": "0.4940295", "text": "func unused(value interface{}) {\n\t// TODO remove this method\n}", "title": "" }, { "docid": "75a29c2405396739c10e6cbe7e63194f", "score": "0.48960584", "text": "func (_Ubt *UbtCaller) Deprecated(opts *bind.CallOpts) (bool, error) {\n\tvar out []interface{}\n\terr := _Ubt.contract.Call(opts, &out, \"deprecated\")\n\n\tif err != nil {\n\t\treturn *new(bool), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(bool)).(*bool)\n\n\treturn out0, err\n\n}", "title": "" }, { "docid": "5350d9688cac07646d3bc86791731a55", "score": "0.48841557", "text": "func fixBrokerDeprecated(in *Broker) *Broker {\n\tin.Spec.ChannelTemplate = nil\n\tin.Status.TriggerChannel = nil\n\treturn in\n}", "title": "" }, { "docid": "3a62e584e8ae2b4399e6702eb4666855", "score": "0.48237896", "text": "func (d Deprecated) DeprecatedOp() int {\n\treturn 1\n}", "title": "" }, { "docid": "2bcae435ec72470e56257b4b46f78f5f", "score": "0.47959423", "text": "func (els *elements) attr(name string) string {\n\tpanic(\"unimplemented\")\n}", "title": "" }, { "docid": "9a01b23cc156f963ea392589c9f28d36", "score": "0.47672948", "text": "func Example() {}", "title": "" }, { "docid": "ecbbc07d4326761aac3312dbb3e98fd6", "score": "0.47172046", "text": "func (l *link) Deprecation() string {\n\treturn l.deprecation\n}", "title": "" }, { "docid": "785d09de8c1fe61b015f284dfe449e50", "score": "0.46978003", "text": "func (_) Error() string { return \"\" }", "title": "" }, { "docid": "16453c521af11ce96a63707e02e4c7ac", "score": "0.46948987", "text": "func DontCognize(interface{}) {}", "title": "" }, { "docid": "5707747ed1a2c7921baf912a30146a55", "score": "0.46923864", "text": "func (a *Args) PromoteDeprecated() {\n\t// We don't have any deprecated fields right now.\n}", "title": "" }, { "docid": "59a9b2d1412efd3f921755be5db9ba7f", "score": "0.46918195", "text": "func OldAPI2(x string) {\n\tfmt.Println(x)\n}", "title": "" }, { "docid": "9de4e4920e7e5e27a518a45001f27c79", "score": "0.46743214", "text": "func isDeprecated(mf *modfile.File) (bool, string) {\n\tconst prefix = \"Deprecated:\"\n\n\tif mf.Module == nil {\n\t\treturn false, \"\"\n\t}\n\tfor _, comment := range append(mf.Module.Syntax.Before, mf.Module.Syntax.Suffix...) {\n\t\ttext := strings.TrimSpace(strings.TrimPrefix(comment.Token, \"//\"))\n\t\tif strings.HasPrefix(text, prefix) {\n\t\t\treturn true, strings.TrimSpace(text[len(prefix):])\n\t\t}\n\t}\n\treturn false, \"\"\n}", "title": "" }, { "docid": "dd578a4b609387a0adb7c55f4b5275f1", "score": "0.4667578", "text": "func useIssue31891A(c *C.Issue31891A) {}", "title": "" }, { "docid": "45b9136a7add67cbce97b74ca899eb13", "score": "0.46592605", "text": "func Documented() {}", "title": "" }, { "docid": "bca7b2ed93f965bda9df69fbb74afd8f", "score": "0.46354508", "text": "func ImplantEvent() {}", "title": "" }, { "docid": "6e6cd95cb0e4707cd16dda72579efed5", "score": "0.46251208", "text": "func GolintFoo() {}", "title": "" }, { "docid": "bf3707be4b85d7a1dee6bee6a642cfc5", "score": "0.46223402", "text": "func deprecationComment(deprecated bool) string {\n\tif !deprecated {\n\t\treturn \"\"\n\t}\n\treturn \"// Deprecated: Do not use.\"\n}", "title": "" }, { "docid": "9f3d29dca36a4ae869a9c97cbc4fa2a7", "score": "0.4622167", "text": "func (*Deprecation) Descriptor() ([]byte, []int) {\n\treturn file_interservice_cfgmgmt_response_nodes_proto_rawDescGZIP(), []int{8}\n}", "title": "" }, { "docid": "3af0cd19e42b298030798d0c8f66a043", "score": "0.46140528", "text": "func (_TetherToken *TetherTokenCaller) Deprecated(opts *bind.CallOpts) (bool, error) {\n\tvar (\n\t\tret0 = new(bool)\n\t)\n\tout := ret0\n\terr := _TetherToken.contract.Call(opts, out, \"deprecated\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "e78513832302d11810f9866826f30a56", "score": "0.4606752", "text": "func (o *CounterOpts) markDeprecated() {\n\to.deprecateOnce.Do(func() {\n\t\to.Help = fmt.Sprintf(\"(Deprecated since %v) %v\", o.DeprecatedVersion, o.Help)\n\t})\n}", "title": "" }, { "docid": "5c53d635e7fcf447e887316190e229f8", "score": "0.4606723", "text": "func init() {\n\t}", "title": "" }, { "docid": "6db211ef77e288483a86e5c3bbec8876", "score": "0.45930198", "text": "func OBSOLETE(args ...interface{}) {\n\tif !DebugMode() {\n\t\treturn\n\t}\n\tfuncName, calledBy := FuncName(2), FuncName(3)\n\tif strings.HasSuffix(funcName, \"OLD\") &&\n\t\tstrings.HasSuffix(calledBy, \"OLD\") {\n\t\treturn\n\t}\n\tvar ar []interface{}\n\tar = append(ar, \"OBSOLETE\", funcName, \"<\", calledBy)\n\tar = append(ar, args...)\n\tfmt.Println(ar...)\n}", "title": "" }, { "docid": "c44f3585c21ff1777d220fb0b551a9be", "score": "0.45878327", "text": "func _ErrorNoOp() {\n\tvar x [1]struct{}\n\t_ = x[ErrUnsupportedMethod-(1)]\n\t_ = x[ErrUnsupportedParameter-(2)]\n\t_ = x[ErrUnexpectedEndOfFile-(3)]\n\t_ = x[ErrExpectedEndOfFile-(4)]\n\t_ = x[ErrCannotParseText-(6)]\n\t_ = x[ErrIncorrectNumberOfColumns-(7)]\n\t_ = x[ErrThereIsNoColumn-(8)]\n\t_ = x[ErrSizesOfColumnsDoesntMatch-(9)]\n\t_ = x[ErrNotFoundColumnInBlock-(10)]\n\t_ = x[ErrPositionOutOfBound-(11)]\n\t_ = x[ErrParameterOutOfBound-(12)]\n\t_ = x[ErrSizesOfColumnsInTupleDoesntMatch-(13)]\n\t_ = x[ErrDuplicateColumn-(15)]\n\t_ = x[ErrNoSuchColumnInTable-(16)]\n\t_ = x[ErrDelimiterInStringLiteralDoesntMatch-(17)]\n\t_ = x[ErrCannotInsertElementIntoConstantColumn-(18)]\n\t_ = x[ErrSizeOfFixedStringDoesntMatch-(19)]\n\t_ = x[ErrNumberOfColumnsDoesntMatch-(20)]\n\t_ = x[ErrCannotReadAllDataFromTabSeparatedInput-(21)]\n\t_ = x[ErrCannotParseAllValueFromTabSeparatedInput-(22)]\n\t_ = x[ErrCannotReadFromIstream-(23)]\n\t_ = x[ErrCannotWriteToOstream-(24)]\n\t_ = x[ErrCannotParseEscapeSequence-(25)]\n\t_ = x[ErrCannotParseQuotedString-(26)]\n\t_ = x[ErrCannotParseInputAssertionFailed-(27)]\n\t_ = x[ErrCannotPrintFloatOrDoubleNumber-(28)]\n\t_ = x[ErrCannotPrintInteger-(29)]\n\t_ = x[ErrCannotReadSizeOfCompressedChunk-(30)]\n\t_ = x[ErrCannotReadCompressedChunk-(31)]\n\t_ = x[ErrAttemptToReadAfterEOF-(32)]\n\t_ = x[ErrCannotReadAllData-(33)]\n\t_ = x[ErrTooManyArgumentsForFunction-(34)]\n\t_ = x[ErrTooLessArgumentsForFunction-(35)]\n\t_ = x[ErrBadArguments-(36)]\n\t_ = x[ErrUnknownElementInAst-(37)]\n\t_ = x[ErrCannotParseDate-(38)]\n\t_ = x[ErrTooLargeSizeCompressed-(39)]\n\t_ = x[ErrChecksumDoesntMatch-(40)]\n\t_ = x[ErrCannotParseDatetime-(41)]\n\t_ = x[ErrNumberOfArgumentsDoesntMatch-(42)]\n\t_ = x[ErrIllegalTypeOfArgument-(43)]\n\t_ = x[ErrIllegalColumn-(44)]\n\t_ = x[ErrIllegalNumberOfResultColumns-(45)]\n\t_ = x[ErrUnknownFunction-(46)]\n\t_ = x[ErrUnknownIdentifier-(47)]\n\t_ = x[ErrNotImplemented-(48)]\n\t_ = x[ErrLogicalError-(49)]\n\t_ = x[ErrUnknownType-(50)]\n\t_ = x[ErrEmptyListOfColumnsQueried-(51)]\n\t_ = x[ErrColumnQueriedMoreThanOnce-(52)]\n\t_ = x[ErrTypeMismatch-(53)]\n\t_ = x[ErrStorageDoesntAllowParameters-(54)]\n\t_ = x[ErrStorageRequiresParameter-(55)]\n\t_ = x[ErrUnknownStorage-(56)]\n\t_ = x[ErrTableAlreadyExists-(57)]\n\t_ = x[ErrTableMetadataAlreadyExists-(58)]\n\t_ = x[ErrIllegalTypeOfColumnForFilter-(59)]\n\t_ = x[ErrUnknownTable-(60)]\n\t_ = x[ErrOnlyFilterColumnInBlock-(61)]\n\t_ = x[ErrSyntaxError-(62)]\n\t_ = x[ErrUnknownAggregateFunction-(63)]\n\t_ = x[ErrCannotReadAggregateFunctionFromText-(64)]\n\t_ = x[ErrCannotWriteAggregateFunctionAsText-(65)]\n\t_ = x[ErrNotAColumn-(66)]\n\t_ = x[ErrIllegalKeyOfAggregation-(67)]\n\t_ = x[ErrCannotGetSizeOfField-(68)]\n\t_ = x[ErrArgumentOutOfBound-(69)]\n\t_ = x[ErrCannotConvertType-(70)]\n\t_ = x[ErrCannotWriteAfterEndOfBuffer-(71)]\n\t_ = x[ErrCannotParseNumber-(72)]\n\t_ = x[ErrUnknownFormat-(73)]\n\t_ = x[ErrCannotReadFromFileDescriptor-(74)]\n\t_ = x[ErrCannotWriteToFileDescriptor-(75)]\n\t_ = x[ErrCannotOpenFile-(76)]\n\t_ = x[ErrCannotCloseFile-(77)]\n\t_ = x[ErrUnknownTypeOfQuery-(78)]\n\t_ = x[ErrIncorrectFileName-(79)]\n\t_ = x[ErrIncorrectQuery-(80)]\n\t_ = x[ErrUnknownDatabase-(81)]\n\t_ = x[ErrDatabaseAlreadyExists-(82)]\n\t_ = x[ErrDirectoryDoesntExist-(83)]\n\t_ = x[ErrDirectoryAlreadyExists-(84)]\n\t_ = x[ErrFormatIsNotSuitableForInput-(85)]\n\t_ = x[ErrReceivedErrorFromRemoteIoServer-(86)]\n\t_ = x[ErrCannotSeekThroughFile-(87)]\n\t_ = x[ErrCannotTruncateFile-(88)]\n\t_ = x[ErrUnknownCompressionMethod-(89)]\n\t_ = x[ErrEmptyListOfColumnsPassed-(90)]\n\t_ = x[ErrSizesOfMarksFilesAreInconsistent-(91)]\n\t_ = x[ErrEmptyDataPassed-(92)]\n\t_ = x[ErrUnknownAggregatedDataVariant-(93)]\n\t_ = x[ErrCannotMergeDifferentAggregatedDataVariants-(94)]\n\t_ = x[ErrCannotReadFromSocket-(95)]\n\t_ = x[ErrCannotWriteToSocket-(96)]\n\t_ = x[ErrCannotReadAllDataFromChunkedInput-(97)]\n\t_ = x[ErrCannotWriteToEmptyBlockOutputStream-(98)]\n\t_ = x[ErrUnknownPacketFromClient-(99)]\n\t_ = x[ErrUnknownPacketFromServer-(100)]\n\t_ = x[ErrUnexpectedPacketFromClient-(101)]\n\t_ = x[ErrUnexpectedPacketFromServer-(102)]\n\t_ = x[ErrReceivedDataForWrongQueryID-(103)]\n\t_ = x[ErrTooSmallBufferSize-(104)]\n\t_ = x[ErrCannotReadHistory-(105)]\n\t_ = x[ErrCannotAppendHistory-(106)]\n\t_ = x[ErrFileDoesntExist-(107)]\n\t_ = x[ErrNoDataToInsert-(108)]\n\t_ = x[ErrCannotBlockSignal-(109)]\n\t_ = x[ErrCannotUnblockSignal-(110)]\n\t_ = x[ErrCannotManipulateSigset-(111)]\n\t_ = x[ErrCannotWaitForSignal-(112)]\n\t_ = x[ErrThereIsNoSession-(113)]\n\t_ = x[ErrCannotClockGettime-(114)]\n\t_ = x[ErrUnknownSetting-(115)]\n\t_ = x[ErrThereIsNoDefaultValue-(116)]\n\t_ = x[ErrIncorrectData-(117)]\n\t_ = x[ErrEngineRequired-(119)]\n\t_ = x[ErrCannotInsertValueOfDifferentSizeIntoTuple-(120)]\n\t_ = x[ErrUnknownSetDataVariant-(121)]\n\t_ = x[ErrIncompatibleColumns-(122)]\n\t_ = x[ErrUnknownTypeOfAstNode-(123)]\n\t_ = x[ErrIncorrectElementOfSet-(124)]\n\t_ = x[ErrIncorrectResultOfScalarSubquery-(125)]\n\t_ = x[ErrCannotGetReturnType-(126)]\n\t_ = x[ErrIllegalIndex-(127)]\n\t_ = x[ErrTooLargeArraySize-(128)]\n\t_ = x[ErrFunctionIsSpecial-(129)]\n\t_ = x[ErrCannotReadArrayFromText-(130)]\n\t_ = x[ErrTooLargeStringSize-(131)]\n\t_ = x[ErrCannotCreateTableFromMetadata-(132)]\n\t_ = x[ErrAggregateFunctionDoesntAllowParameters-(133)]\n\t_ = x[ErrParametersToAggregateFunctionsMustBeLiterals-(134)]\n\t_ = x[ErrZeroArrayOrTupleIndex-(135)]\n\t_ = x[ErrUnknownElementInConfig-(137)]\n\t_ = x[ErrExcessiveElementInConfig-(138)]\n\t_ = x[ErrNoElementsInConfig-(139)]\n\t_ = x[ErrAllRequestedColumnsAreMissing-(140)]\n\t_ = x[ErrSamplingNotSupported-(141)]\n\t_ = x[ErrNotFoundNode-(142)]\n\t_ = x[ErrFoundMoreThanOneNode-(143)]\n\t_ = x[ErrFirstDateIsBiggerThanLastDate-(144)]\n\t_ = x[ErrUnknownOverflowMode-(145)]\n\t_ = x[ErrQuerySectionDoesntMakeSense-(146)]\n\t_ = x[ErrNotFoundFunctionElementForAggregate-(147)]\n\t_ = x[ErrNotFoundRelationElementForCondition-(148)]\n\t_ = x[ErrNotFoundRHSElementForCondition-(149)]\n\t_ = x[ErrNoAttributesListed-(150)]\n\t_ = x[ErrIndexOfColumnInSortClauseIsOutOfRange-(151)]\n\t_ = x[ErrUnknownDirectionOfSorting-(152)]\n\t_ = x[ErrIllegalDivision-(153)]\n\t_ = x[ErrAggregateFunctionNotApplicable-(154)]\n\t_ = x[ErrUnknownRelation-(155)]\n\t_ = x[ErrDictionariesWasNotLoaded-(156)]\n\t_ = x[ErrIllegalOverflowMode-(157)]\n\t_ = x[ErrTooManyRows-(158)]\n\t_ = x[ErrTimeoutExceeded-(159)]\n\t_ = x[ErrTooSlow-(160)]\n\t_ = x[ErrTooManyColumns-(161)]\n\t_ = x[ErrTooDeepSubqueries-(162)]\n\t_ = x[ErrTooDeepPipeline-(163)]\n\t_ = x[ErrReadonly-(164)]\n\t_ = x[ErrTooManyTemporaryColumns-(165)]\n\t_ = x[ErrTooManyTemporaryNonConstColumns-(166)]\n\t_ = x[ErrTooDeepAst-(167)]\n\t_ = x[ErrTooBigAst-(168)]\n\t_ = x[ErrBadTypeOfField-(169)]\n\t_ = x[ErrBadGet-(170)]\n\t_ = x[ErrBlocksHaveDifferentStructure-(171)]\n\t_ = x[ErrCannotCreateDirectory-(172)]\n\t_ = x[ErrCannotAllocateMemory-(173)]\n\t_ = x[ErrCyclicAliases-(174)]\n\t_ = x[ErrChunkNotFound-(176)]\n\t_ = x[ErrDuplicateChunkName-(177)]\n\t_ = x[ErrMultipleAliasesForExpression-(178)]\n\t_ = x[ErrMultipleExpressionsForAlias-(179)]\n\t_ = x[ErrThereIsNoProfile-(180)]\n\t_ = x[ErrIllegalFinal-(181)]\n\t_ = x[ErrIllegalPrewhere-(182)]\n\t_ = x[ErrUnexpectedExpression-(183)]\n\t_ = x[ErrIllegalAggregation-(184)]\n\t_ = x[ErrUnsupportedMyisamBlockType-(185)]\n\t_ = x[ErrUnsupportedCollationLocale-(186)]\n\t_ = x[ErrCollationComparisonFailed-(187)]\n\t_ = x[ErrUnknownAction-(188)]\n\t_ = x[ErrTableMustNotBeCreatedManually-(189)]\n\t_ = x[ErrSizesOfArraysDoesntMatch-(190)]\n\t_ = x[ErrSetSizeLimitExceeded-(191)]\n\t_ = x[ErrUnknownUser-(192)]\n\t_ = x[ErrWrongPassword-(193)]\n\t_ = x[ErrRequiredPassword-(194)]\n\t_ = x[ErrIPAddressNotAllowed-(195)]\n\t_ = x[ErrUnknownAddressPatternType-(196)]\n\t_ = x[ErrServerRevisionIsTooOld-(197)]\n\t_ = x[ErrDNSError-(198)]\n\t_ = x[ErrUnknownQuota-(199)]\n\t_ = x[ErrQuotaDoesntAllowKeys-(200)]\n\t_ = x[ErrQuotaExpired-(201)]\n\t_ = x[ErrTooManySimultaneousQueries-(202)]\n\t_ = x[ErrNoFreeConnection-(203)]\n\t_ = x[ErrCannotFsync-(204)]\n\t_ = x[ErrNestedTypeTooDeep-(205)]\n\t_ = x[ErrAliasRequired-(206)]\n\t_ = x[ErrAmbiguousIdentifier-(207)]\n\t_ = x[ErrEmptyNestedTable-(208)]\n\t_ = x[ErrSocketTimeout-(209)]\n\t_ = x[ErrNetworkError-(210)]\n\t_ = x[ErrEmptyQuery-(211)]\n\t_ = x[ErrUnknownLoadBalancing-(212)]\n\t_ = x[ErrUnknownTotalsMode-(213)]\n\t_ = x[ErrCannotStatvfs-(214)]\n\t_ = x[ErrNotAnAggregate-(215)]\n\t_ = x[ErrQueryWithSameIDIsAlreadyRunning-(216)]\n\t_ = x[ErrClientHasConnectedToWrongPort-(217)]\n\t_ = x[ErrTableIsDropped-(218)]\n\t_ = x[ErrDatabaseNotEmpty-(219)]\n\t_ = x[ErrDuplicateInterserverIoEndpoint-(220)]\n\t_ = x[ErrNoSuchInterserverIoEndpoint-(221)]\n\t_ = x[ErrAddingReplicaToNonEmptyTable-(222)]\n\t_ = x[ErrUnexpectedAstStructure-(223)]\n\t_ = x[ErrReplicaIsAlreadyActive-(224)]\n\t_ = x[ErrNoZookeeper-(225)]\n\t_ = x[ErrNoFileInDataPart-(226)]\n\t_ = x[ErrUnexpectedFileInDataPart-(227)]\n\t_ = x[ErrBadSizeOfFileInDataPart-(228)]\n\t_ = x[ErrQueryIsTooLarge-(229)]\n\t_ = x[ErrNotFoundExpectedDataPart-(230)]\n\t_ = x[ErrTooManyUnexpectedDataParts-(231)]\n\t_ = x[ErrNoSuchDataPart-(232)]\n\t_ = x[ErrBadDataPartName-(233)]\n\t_ = x[ErrNoReplicaHasPart-(234)]\n\t_ = x[ErrDuplicateDataPart-(235)]\n\t_ = x[ErrAborted-(236)]\n\t_ = x[ErrNoReplicaNameGiven-(237)]\n\t_ = x[ErrFormatVersionTooOld-(238)]\n\t_ = x[ErrCannotMunmap-(239)]\n\t_ = x[ErrCannotMremap-(240)]\n\t_ = x[ErrMemoryLimitExceeded-(241)]\n\t_ = x[ErrTableIsReadOnly-(242)]\n\t_ = x[ErrNotEnoughSpace-(243)]\n\t_ = x[ErrUnexpectedZookeeperError-(244)]\n\t_ = x[ErrCorruptedData-(246)]\n\t_ = x[ErrIncorrectMark-(247)]\n\t_ = x[ErrInvalidPartitionValue-(248)]\n\t_ = x[ErrNotEnoughBlockNumbers-(250)]\n\t_ = x[ErrNoSuchReplica-(251)]\n\t_ = x[ErrTooManyParts-(252)]\n\t_ = x[ErrReplicaIsAlreadyExist-(253)]\n\t_ = x[ErrNoActiveReplicas-(254)]\n\t_ = x[ErrTooManyRetriesToFetchParts-(255)]\n\t_ = x[ErrPartitionAlreadyExists-(256)]\n\t_ = x[ErrPartitionDoesntExist-(257)]\n\t_ = x[ErrUnionAllResultStructuresMismatch-(258)]\n\t_ = x[ErrClientOutputFormatSpecified-(260)]\n\t_ = x[ErrUnknownBlockInfoField-(261)]\n\t_ = x[ErrBadCollation-(262)]\n\t_ = x[ErrCannotCompileCode-(263)]\n\t_ = x[ErrIncompatibleTypeOfJoin-(264)]\n\t_ = x[ErrNoAvailableReplica-(265)]\n\t_ = x[ErrMismatchReplicasDataSources-(266)]\n\t_ = x[ErrStorageDoesntSupportParallelReplicas-(267)]\n\t_ = x[ErrCPUIDError-(268)]\n\t_ = x[ErrInfiniteLoop-(269)]\n\t_ = x[ErrCannotCompress-(270)]\n\t_ = x[ErrCannotDecompress-(271)]\n\t_ = x[ErrAioSubmitError-(272)]\n\t_ = x[ErrAioCompletionError-(273)]\n\t_ = x[ErrAioReadError-(274)]\n\t_ = x[ErrAioWriteError-(275)]\n\t_ = x[ErrIndexNotUsed-(277)]\n\t_ = x[ErrLeadershipLost-(278)]\n\t_ = x[ErrAllConnectionTriesFailed-(279)]\n\t_ = x[ErrNoAvailableData-(280)]\n\t_ = x[ErrDictionaryIsEmpty-(281)]\n\t_ = x[ErrIncorrectIndex-(282)]\n\t_ = x[ErrUnknownDistributedProductMode-(283)]\n\t_ = x[ErrUnknownGlobalSubqueriesMethod-(284)]\n\t_ = x[ErrTooLessLiveReplicas-(285)]\n\t_ = x[ErrUnsatisfiedQuorumForPreviousWrite-(286)]\n\t_ = x[ErrUnknownFormatVersion-(287)]\n\t_ = x[ErrDistributedInJoinSubqueryDenied-(288)]\n\t_ = x[ErrReplicaIsNotInQuorum-(289)]\n\t_ = x[ErrLimitExceeded-(290)]\n\t_ = x[ErrDatabaseAccessDenied-(291)]\n\t_ = x[ErrLeadershipChanged-(292)]\n\t_ = x[ErrMongodbCannotAuthenticate-(293)]\n\t_ = x[ErrInvalidBlockExtraInfo-(294)]\n\t_ = x[ErrReceivedEmptyData-(295)]\n\t_ = x[ErrNoRemoteShardFound-(296)]\n\t_ = x[ErrShardHasNoConnections-(297)]\n\t_ = x[ErrCannotPipe-(298)]\n\t_ = x[ErrCannotFork-(299)]\n\t_ = x[ErrCannotDlsym-(300)]\n\t_ = x[ErrCannotCreateChildProcess-(301)]\n\t_ = x[ErrChildWasNotExitedNormally-(302)]\n\t_ = x[ErrCannotSelect-(303)]\n\t_ = x[ErrCannotWaitpid-(304)]\n\t_ = x[ErrTableWasNotDropped-(305)]\n\t_ = x[ErrTooDeepRecursion-(306)]\n\t_ = x[ErrTooManyBytes-(307)]\n\t_ = x[ErrUnexpectedNodeInZookeeper-(308)]\n\t_ = x[ErrFunctionCannotHaveParameters-(309)]\n\t_ = x[ErrInvalidShardWeight-(317)]\n\t_ = x[ErrInvalidConfigParameter-(318)]\n\t_ = x[ErrUnknownStatusOfInsert-(319)]\n\t_ = x[ErrValueIsOutOfRangeOfDataType-(321)]\n\t_ = x[ErrBarrierTimeout-(335)]\n\t_ = x[ErrUnknownDatabaseEngine-(336)]\n\t_ = x[ErrDdlGuardIsActive-(337)]\n\t_ = x[ErrUnfinished-(341)]\n\t_ = x[ErrMetadataMismatch-(342)]\n\t_ = x[ErrSupportIsDisabled-(344)]\n\t_ = x[ErrTableDiffersTooMuch-(345)]\n\t_ = x[ErrCannotConvertCharset-(346)]\n\t_ = x[ErrCannotLoadConfig-(347)]\n\t_ = x[ErrCannotInsertNullInOrdinaryColumn-(349)]\n\t_ = x[ErrIncompatibleSourceTables-(350)]\n\t_ = x[ErrAmbiguousTableName-(351)]\n\t_ = x[ErrAmbiguousColumnName-(352)]\n\t_ = x[ErrIndexOfPositionalArgumentIsOutOfRange-(353)]\n\t_ = x[ErrZlibInflateFailed-(354)]\n\t_ = x[ErrZlibDeflateFailed-(355)]\n\t_ = x[ErrBadLambda-(356)]\n\t_ = x[ErrReservedIdentifierName-(357)]\n\t_ = x[ErrIntoOutfileNotAllowed-(358)]\n\t_ = x[ErrTableSizeExceedsMaxDropSizeLimit-(359)]\n\t_ = x[ErrCannotCreateCharsetConverter-(360)]\n\t_ = x[ErrSeekPositionOutOfBound-(361)]\n\t_ = x[ErrCurrentWriteBufferIsExhausted-(362)]\n\t_ = x[ErrCannotCreateIoBuffer-(363)]\n\t_ = x[ErrReceivedErrorTooManyRequests-(364)]\n\t_ = x[ErrOutputIsNotSorted-(365)]\n\t_ = x[ErrSizesOfNestedColumnsAreInconsistent-(366)]\n\t_ = x[ErrTooManyFetches-(367)]\n\t_ = x[ErrBadCast-(368)]\n\t_ = x[ErrAllReplicasAreStale-(369)]\n\t_ = x[ErrDataTypeCannotBeUsedInTables-(370)]\n\t_ = x[ErrInconsistentClusterDefinition-(371)]\n\t_ = x[ErrSessionNotFound-(372)]\n\t_ = x[ErrSessionIsLocked-(373)]\n\t_ = x[ErrInvalidSessionTimeout-(374)]\n\t_ = x[ErrCannotDlopen-(375)]\n\t_ = x[ErrCannotParseUUID-(376)]\n\t_ = x[ErrIllegalSyntaxForDataType-(377)]\n\t_ = x[ErrDataTypeCannotHaveArguments-(378)]\n\t_ = x[ErrUnknownStatusOfDistributedDdlTask-(379)]\n\t_ = x[ErrCannotKill-(380)]\n\t_ = x[ErrHTTPLengthRequired-(381)]\n\t_ = x[ErrCannotLoadCatboostModel-(382)]\n\t_ = x[ErrCannotApplyCatboostModel-(383)]\n\t_ = x[ErrPartIsTemporarilyLocked-(384)]\n\t_ = x[ErrMultipleStreamsRequired-(385)]\n\t_ = x[ErrNoCommonType-(386)]\n\t_ = x[ErrExternalLoadableAlreadyExists-(387)]\n\t_ = x[ErrCannotAssignOptimize-(388)]\n\t_ = x[ErrInsertWasDeduplicated-(389)]\n\t_ = x[ErrCannotGetCreateTableQuery-(390)]\n\t_ = x[ErrExternalLibraryError-(391)]\n\t_ = x[ErrQueryIsProhibited-(392)]\n\t_ = x[ErrThereIsNoQuery-(393)]\n\t_ = x[ErrQueryWasCancelled-(394)]\n\t_ = x[ErrFunctionThrowIfValueIsNonZero-(395)]\n\t_ = x[ErrTooManyRowsOrBytes-(396)]\n\t_ = x[ErrQueryIsNotSupportedInMaterializedView-(397)]\n\t_ = x[ErrCannotParseDomainValueFromString-(441)]\n\t_ = x[ErrAuthenticationFailed-(516)]\n\t_ = x[ErrKeeperException-(999)]\n\t_ = x[ErrPocoException-(1000)]\n\t_ = x[ErrStdException-(1001)]\n\t_ = x[ErrUnknownException-(1002)]\n\t_ = x[ErrConditionalTreeParentNotFound-(2001)]\n\t_ = x[ErrIllegalProjectionManipulator-(2002)]\n}", "title": "" }, { "docid": "17a64a7702f635fe3c178049f74fc927", "score": "0.4568529", "text": "func (d *Deprecated) DeprecatedPtrOp() int {\n\treturn 0\n}", "title": "" }, { "docid": "3d47d17d52671cbe9e1bfe145e4fab08", "score": "0.45434806", "text": "func regression1033() {\n\tfoo(&Bar{})\n}", "title": "" }, { "docid": "1dca339e1b353d8bda352f43ac4813df", "score": "0.45227325", "text": "func init() {}", "title": "" }, { "docid": "384dec8fc1df8aa14e6a4421fa3b3148", "score": "0.4520523", "text": "func (c *CustomConnection) Msgs() {\n\n}", "title": "" }, { "docid": "10c2de064d0750b53d06cdd010773dcb", "score": "0.45136473", "text": "func use(interface{}) {}", "title": "" }, { "docid": "10c2de064d0750b53d06cdd010773dcb", "score": "0.45136473", "text": "func use(interface{}) {}", "title": "" }, { "docid": "c685bb944de7ff6f5f29e7bb54835de1", "score": "0.44962272", "text": "func (_Ubt *UbtCallerSession) Deprecated() (bool, error) {\n\treturn _Ubt.Contract.Deprecated(&_Ubt.CallOpts)\n}", "title": "" }, { "docid": "a8a389c770bb3e65cf6906140313e130", "score": "0.4493401", "text": "func (c *Context) noOp(name string) {\n}", "title": "" }, { "docid": "bc1806c451448275361a69bda841e1b5", "score": "0.4490844", "text": "func (_Ubt *UbtSession) Deprecated() (bool, error) {\n\treturn _Ubt.Contract.Deprecated(&_Ubt.CallOpts)\n}", "title": "" }, { "docid": "f61f726b0e71503ce3f143300614e71f", "score": "0.4490804", "text": "func pointerArg(s *SaiyanBasic) {\n\ts.Name = \"Goku Pointer\"\n\ts.Power += 10000\n}", "title": "" }, { "docid": "4b90b8494b38ca011b94eca5aff29b8a", "score": "0.44670546", "text": "func Legacy() Strategy {\n\treturn xffStrategy{}\n}", "title": "" }, { "docid": "6bb90d5aff173684e401ac413e34867d", "score": "0.44438252", "text": "func PrintDeprecatedFeatureWarning(feature string) {\n\tnewError(\"You are using a deprecated feature: \" + feature + \". Please update your config file with latest configuration format, or update your client software.\").WriteToLog()\n}", "title": "" }, { "docid": "c808e28ea2ffcc0739148a780bb50485", "score": "0.4436664", "text": "func Internal(err error) error {\n\treturn Grpc(codes.Internal, err)\n}", "title": "" }, { "docid": "3a514ef1f50018e711484a2170a528aa", "score": "0.44254115", "text": "func XyzFunc() {}", "title": "" }, { "docid": "28efd97b79691006acb24040315f2b43", "score": "0.44220096", "text": "func OldAPI1(x string) {\n\tfmt.Println(x)\n}", "title": "" }, { "docid": "2988b3d13da126532211152a64aad105", "score": "0.4420763", "text": "func DeprecatedFlag(f *flag.FlagSet, name, message string) {\n\tf.Var(deprecatedFlag{name}, name, message)\n}", "title": "" }, { "docid": "37955dea69cf9d27a64d6b4b43e72484", "score": "0.44117364", "text": "func newUUID() uuid.UUID {\n\treturn uuidgen.newUUID()\n}", "title": "" }, { "docid": "54d1c907465d235c938ddaa20141edac", "score": "0.4410532", "text": "func privateFunc() string {\r\n\r\n\t// private access, not available outside lang\r\n\treturn \"I'm private!\"\r\n}", "title": "" }, { "docid": "fd9f1b59f6041c0c6990ab27bb94e30d", "score": "0.4405839", "text": "func (self *Pointer) Position() *Point{\n return &Point{self.Object.Get(\"position\")}\n}", "title": "" }, { "docid": "170ab271ba0ce1ac3651e89488905e6b", "score": "0.44018674", "text": "func newLegacyObjectVerifier(f *sif.FileImage, od sif.Descriptor) *legacyObjectVerifier {\n\treturn &legacyObjectVerifier{f: f, od: od}\n}", "title": "" }, { "docid": "f5d1b5c092efbe12aa4d48a2dbc2184f", "score": "0.44016966", "text": "func (t *T) Buuuuug() { // In square, we misspell this method.\n\treturn\n}", "title": "" }, { "docid": "fd0710777b9446685aefb3dea5bacc01", "score": "0.4400037", "text": "func (*ParameterDescription) Backend() {}", "title": "" }, { "docid": "b3b28f4cdc1d3596005a0d02f736390b", "score": "0.43970066", "text": "func Deprecated() ParamOption {\n\treturn &paramOption{func(p *openAPIParam) {\n\t\tp.Deprecated = true\n\t}}\n}", "title": "" }, { "docid": "37b99b081d771b5186f09be34a5e1421", "score": "0.4389997", "text": "func (self *Pointer) PointerId() int{\n return self.Object.Get(\"pointerId\").Int()\n}", "title": "" }, { "docid": "e5e959831d22546b58e5e9727360b242", "score": "0.4385365", "text": "func main() {\n\nmystructvar := mypackage.Mystruct {\"helloo\" }\n\nfmt.Println(\"value is\",mystructvar.Mymethod())\t// <<<<< rename,12,36,12,36,Renamed,pass\n\n\n}", "title": "" }, { "docid": "b0af0d0cecd861115c3eaf5317188113", "score": "0.43832883", "text": "func (b *SchemaMutator) Deprecated(v bool) *SchemaMutator {\n\tb.proxy.deprecated = v\n\treturn b\n}", "title": "" }, { "docid": "ccc32f1aaab09092be3b52b132215928", "score": "0.43785098", "text": "func Error() string { return \"\" }", "title": "" }, { "docid": "aeab816ab8d3d2b38f971c4ade9bca79", "score": "0.4377403", "text": "func (*FS) Name() string { return \"FS\" }", "title": "" }, { "docid": "940bcdac1055a54bfd9a4b0a3f1a2d43", "score": "0.43733367", "text": "func makeUsage(msg string) {\n\tusage := flag.Usage\n\tflag.Usage = func() {\n\t\tfmt.Fprintln(os.Stderr, msg)\n\t\tusage()\n\t}\n}", "title": "" }, { "docid": "e45138c436960318d34383c05e85d27c", "score": "0.4359294", "text": "func (o ThingTypeOutput) Deprecated() pulumi.BoolPtrOutput {\n\treturn o.ApplyT(func(v *ThingType) pulumi.BoolPtrOutput { return v.Deprecated }).(pulumi.BoolPtrOutput)\n}", "title": "" }, { "docid": "00f0a224a532ffcc6bea56ee3fb79d1c", "score": "0.43503627", "text": "func UseInternal2() {\n\tSayHello()\n}", "title": "" }, { "docid": "b9c93d170903206324b445728bc70c50", "score": "0.43458894", "text": "func (ExperimentalObject1) ExpMember1() {\n\n}", "title": "" }, { "docid": "c5e81917d86561a2bb4a28995c7d394a", "score": "0.43420672", "text": "func (self *Loader) _withSyncPointDepth() interface{}{\n return self.Object.Get(\"_withSyncPointDepth\")\n}", "title": "" }, { "docid": "13ecc9407f0f9e4af7a43462a1f636d6", "score": "0.43402225", "text": "func (s AEmpty) GUMIDraw() {\n}", "title": "" }, { "docid": "174109c12b7fb3ed136c0e44c1bbdeae", "score": "0.4333411", "text": "func update () {}", "title": "" }, { "docid": "2129285fd98dfa5863f69406c8b38a5a", "score": "0.43326676", "text": "func usageErr(err error) {\n\tusage()\n\tfmt.Fprintf(os.Stderr, \"Error: %v\\n\\n\", err)\n}", "title": "" }, { "docid": "22e848b8e3d76c6cf16484c304cb02af", "score": "0.43187752", "text": "func appendDeprecationWarning(s *SessionVars, name, replacement string) {\n\ts.StmtCtx.AppendWarning(errWarnDeprecatedSyntax.FastGenByArgs(name, replacement))\n}", "title": "" }, { "docid": "8dacc084b26416b6efd8ed1116c8650c", "score": "0.43096608", "text": "func (test *SelfTest) IncorrectDocs(foo, bar string) {\n}", "title": "" }, { "docid": "9a8e2a9270917a23ee1a83b2f9002e2c", "score": "0.43087587", "text": "func (*Struct) MetadataNode() {}", "title": "" }, { "docid": "91772961793f6489e86d8ff8ff217bd1", "score": "0.43087444", "text": "func newSpecializedMDNode(old ast.SpecializedMDNode) metadata.SpecializedNode {\n\tswitch old := old.(type) {\n\tcase *ast.DIBasicType:\n\t\treturn &metadata.DIBasicType{}\n\tcase *ast.DICommonBlock:\n\t\treturn &metadata.DICommonBlock{}\n\tcase *ast.DICompileUnit:\n\t\treturn &metadata.DICompileUnit{}\n\tcase *ast.DICompositeType:\n\t\treturn &metadata.DICompositeType{}\n\tcase *ast.DIDerivedType:\n\t\treturn &metadata.DIDerivedType{}\n\tcase *ast.DIEnumerator:\n\t\treturn &metadata.DIEnumerator{}\n\tcase *ast.DIExpression:\n\t\treturn &metadata.DIExpression{}\n\tcase *ast.DIFile:\n\t\treturn &metadata.DIFile{}\n\tcase *ast.DIGlobalVariable:\n\t\treturn &metadata.DIGlobalVariable{}\n\tcase *ast.DIGlobalVariableExpression:\n\t\treturn &metadata.DIGlobalVariableExpression{}\n\tcase *ast.DIImportedEntity:\n\t\treturn &metadata.DIImportedEntity{}\n\tcase *ast.DILabel:\n\t\treturn &metadata.DILabel{}\n\tcase *ast.DILexicalBlock:\n\t\treturn &metadata.DILexicalBlock{}\n\tcase *ast.DILexicalBlockFile:\n\t\treturn &metadata.DILexicalBlockFile{}\n\tcase *ast.DILocalVariable:\n\t\treturn &metadata.DILocalVariable{}\n\tcase *ast.DILocation:\n\t\treturn &metadata.DILocation{}\n\tcase *ast.DIMacro:\n\t\treturn &metadata.DIMacro{}\n\tcase *ast.DIMacroFile:\n\t\treturn &metadata.DIMacroFile{}\n\tcase *ast.DIModule:\n\t\treturn &metadata.DIModule{}\n\tcase *ast.DINamespace:\n\t\treturn &metadata.DINamespace{}\n\tcase *ast.DIObjCProperty:\n\t\treturn &metadata.DIObjCProperty{}\n\tcase *ast.DIStringType:\n\t\treturn &metadata.DIStringType{}\n\tcase *ast.DISubprogram:\n\t\treturn &metadata.DISubprogram{}\n\tcase *ast.DISubrange:\n\t\treturn &metadata.DISubrange{}\n\tcase *ast.DISubroutineType:\n\t\treturn &metadata.DISubroutineType{}\n\tcase *ast.DITemplateTypeParameter:\n\t\treturn &metadata.DITemplateTypeParameter{}\n\tcase *ast.DITemplateValueParameter:\n\t\treturn &metadata.DITemplateValueParameter{}\n\tcase *ast.GenericDINode:\n\t\treturn &metadata.GenericDINode{}\n\tdefault:\n\t\tpanic(fmt.Errorf(\"support for %T not yet implemented\", old))\n\t}\n}", "title": "" }, { "docid": "a5f388073ad712f563d18296adf0d7f4", "score": "0.43083623", "text": "func (b *ParameterBuilder) Deprecated(v bool) *ParameterBuilder {\n\tb.target.deprecated = v\n\treturn b\n}", "title": "" }, { "docid": "0a769658118bfbe5931e6bc6c18c2279", "score": "0.43051383", "text": "func ItWorks() {}", "title": "" }, { "docid": "16f8f66a06f1001f375b9ff6fed45265", "score": "0.43039826", "text": "func (*Vector) MetadataNode() {}", "title": "" }, { "docid": "c537e84cd941288470242d6b488c6cbd", "score": "0.43016362", "text": "func current() string {\n\treturn \"CURRENT\"\n}", "title": "" }, { "docid": "15aa47b07c608052c73be9968457e66b", "score": "0.42998418", "text": "func Example() {\n\n}", "title": "" }, { "docid": "b7559f7f0b396d82e0d017a68eb26a93", "score": "0.42974773", "text": "func PublicFoo() string {\r\n return \"public function\"\r\n}", "title": "" }, { "docid": "763cce75aa812c47668db65ffe95a54a", "score": "0.4294456", "text": "func NotFound() {\n\n}", "title": "" }, { "docid": "470715bd7d170c07c36e7dc8fe195a87", "score": "0.4293679", "text": "func (self *Loader) GetVideoURLI(args ...interface{}) string{\n return self.Object.Call(\"getVideoURL\", args).String()\n}", "title": "" }, { "docid": "e27882aef4f9a5cff2aab768f05349cc", "score": "0.4284214", "text": "func newError(msg string) error {\n\t_, file, line, _ := runtime.Caller(1)\n\treturn &errorWrapper{errors.New(msg), file, line}\n}", "title": "" }, { "docid": "78b616bb9bbba9fa737bea548ad1cf08", "score": "0.42792416", "text": "func newErrInitBackend(provider string) error { return &cmn.ErrInitBackend{Provider: provider} }", "title": "" }, { "docid": "30ce9f6247efc5f427692cc7c31f4cb7", "score": "0.42748633", "text": "func makeUuid() string {\r\n return uuid.New().String()\r\n}", "title": "" }, { "docid": "6f9b5127f34fa868bc071086c35edc0e", "score": "0.42738757", "text": "func (*A_VT) SwissArmyKnife() {}", "title": "" }, { "docid": "e9f673be53d5447fb17b2de852dfe63c", "score": "0.4272593", "text": "func (_ WordLength) redHerring() {\n}", "title": "" }, { "docid": "953ae421f7a538c769e93b5bffac216b", "score": "0.42723173", "text": "func (*context) String() string { return \"Blase\" }", "title": "" }, { "docid": "f248797f723559b97d8f3af10ed65c39", "score": "0.42703998", "text": "func NopLogger(_ string) {}", "title": "" }, { "docid": "95f45f23167310b4bbc64a3c6d78bfb4", "score": "0.42695245", "text": "func setError() {\n\n}", "title": "" }, { "docid": "edbc8866c9c96bce25d31bdb58ebdd6e", "score": "0.42672762", "text": "func newParseError(filename string, line int) error {\n\treturn &ParseError{filename, line}\n}", "title": "" }, { "docid": "351f2f812d8f6ce183ed9ac1871c0dc1", "score": "0.42670035", "text": "func (c *API) Keyless() {\n}", "title": "" }, { "docid": "5c2e05d77ea322a625864dd5374a33aa", "score": "0.4266673", "text": "func main() {\n\n\n}", "title": "" }, { "docid": "5c2e05d77ea322a625864dd5374a33aa", "score": "0.4266673", "text": "func main() {\n\n\n}", "title": "" }, { "docid": "4990a4ccea426ab21bdc3fcc02a79bed", "score": "0.42646608", "text": "func (lv *LeafveinServer) free() {\n\t//\tTODO Temporarily invalid, keep extension\n}", "title": "" }, { "docid": "e1fb3cbf92dc94049fe5d63e7f381b13", "score": "0.42628884", "text": "func (this *RangerPHPClassWriter) Get_thisName() string {\n return this.thisName\n}", "title": "" } ]
12779a71ba055d03d9b1d82327db721c
Handle User Token Creation, require username. Please use adminrouter to handle this function
[ { "docid": "c5b97967ac40e658ec67fa44c0ee7ca0", "score": "0.7519475", "text": "func (a *AutoLoginHandler)HandleUserTokenCreation(w http.ResponseWriter, r *http.Request){\n\tusername, err := mv(r, \"username\", false)\n\tif err != nil{\n\t\tsendErrorResponse(w, \"Invalid username\");\n\t\treturn\n\t}\n\n\t//Check if user exists\n\tauthAgent := a.userHandler.GetAuthAgent();\n\tif !authAgent.UserExists(username){\n\t\tsendErrorResponse(w, \"User not exists!\")\n\t\treturn\n\t}\n\n\t//Generate and send the token to client\n\ttoken:= authAgent.NewAutologinToken(username)\n\tjsonString, _ := json.Marshal(token)\n\tsendJSONResponse(w, string(jsonString))\n}", "title": "" } ]
[ { "docid": "3d8643edad118d7178fde84bb8729f0e", "score": "0.68570215", "text": "func createToken(w http.ResponseWriter, r *http.Request) {\n creds := godotenv.Load(\".env\")\n\tif creds != nil {\n\t\tlog.Fatalf(\"Error loading .env file\")\n\t}\n\tuname := os.Getenv(\"AUTH_USER\")\n\ttoken := uuid.New().String()\n\tuser := auth.NewDefaultUser(uname, \"1\", nil, nil)\n\ttokenStrategy := authenticator.Strategy(bearer.CachedStrategyKey)\n\tauth.Append(tokenStrategy, token, user, r)\n\tbody := fmt.Sprintf(\"token: %s \\n\", token)\n\tw.Write([]byte(body))\n}", "title": "" }, { "docid": "08453345118eff39d935b118401b6ea3", "score": "0.68063253", "text": "func handleToken(w http.ResponseWriter, r *http.Request) (*User, error) {\n\tuser := new(User)\n\tprovidedTokens := strings.Split(r.Header.Get(\"Authorization\"), \" \")\n\tif len(providedTokens) != 2 {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tw.Write([]byte(\"Invlaid number of tokens provided\"))\n\t\treturn nil, errors.New(\"Invlaid number of tokens provided\")\n\t}\n\tuser.Token = providedTokens[1]\n\n\tuser, err := db.GetUserSession(user)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\tw.Write([]byte(err.Error()))\n\t\treturn nil, err\n\t}\n\n\tvalid, _ := validateToken(user)\n\tif !valid {\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\tw.Write([]byte(\"Invalid web token\"))\n\t\treturn nil, errors.New(\"Invalid web token\")\n\t}\n\treturn user, nil\n}", "title": "" }, { "docid": "0eb1e237f8db827797c753811c0d2e9e", "score": "0.67225", "text": "func (db *Env) generateToken(w http.ResponseWriter, req *http.Request) {\n\n\tlog.Println(\" ### Generate Token ### \")\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\n\tusername := req.FormValue(\"email\")\n\tpasswd := req.FormValue(\"password\")\n\n\t// check if username exists\n\tu, err := db.checkCredentials(username, passwd)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tlog.Printf(\" Generating token for user: %s \", username)\n\n\ttkn, err := createToken(username)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t_, err := io.WriteString(w, `{\"error\":\"token_generation_failed\"}`)\n\t\tcheck(err)\n\t\treturn\n\t}\n\n\tu.Token = tkn\n\t_, err = io.WriteString(w, `{\"token\":\"`+tkn+`\"}`)\n\tcheck(err)\n\n}", "title": "" }, { "docid": "74721b3c675f7e5d8e9571f9ccb831b8", "score": "0.66115725", "text": "func postNewUser(w http.ResponseWriter, req *http.Request) (int, error) {\n\tut := backend.MakeUserTable(false)\n\t// load request json data into a Dog struct and throw an err to client\n\t// if they send us fields not declared in Dog\n\tdecoder := json.NewDecoder(req.Body)\n\tdecoder.DisallowUnknownFields()\n\tu := BlankUser()\n\terr := decoder.Decode(&u)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tlookup := ReverseLookupItem(u.Username)\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tif lookup.Exists {\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\tWriteStruct(w, &AuthorizeTokenAttemptResponse{\"\", \"\", false})\n\t} else {\n\t\t// give user IDs and add them to table\n\t\tu.Setup()\n\t\tut.AddItem(u)\n\t\t// add mapping of username --> ID for login\n\t\trlt := backend.MakeReverseLookupTable(false)\n\t\trte := &ReverseLookup{u.Username, u.ID}\n\t\trlt.AddItem(rte)\n\t\t// add mapping of token --> ID for auth\n\t\ttoken := newToken()\n\t\trte = &ReverseLookup{token, u.ID}\n\t\trlt.AddItem(rte)\n\t\tw.WriteHeader(http.StatusCreated)\n\t\tWriteStruct(w, &AuthorizeTokenAttemptResponse{token, u.Username, true})\n\t}\n\treturn 1, nil\n}", "title": "" }, { "docid": "a89627d6637b54c3046153dc2b647b73", "score": "0.65597266", "text": "func (t *TokenServer) HandleTokenCreate() http.HandlerFunc {\n\ttype GetTokenRequest struct {\n\t\tUserName string `json:\"username\"`\n\t\tPassword string `json:\"password\"`\n\t}\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tctx := r.Context()\n\t\tlogging.Set(ctx, logging.KeyContext, \"create-session\")\n\n\t\tvar getTokenRequest GetTokenRequest\n\t\tif err := readBody(r, &getTokenRequest); err != nil {\n\t\t\tHandleError(ctx, w, apperror.NewError(apperror.CodeInvalidRequest, \"invalid json\"))\n\t\t\treturn\n\t\t}\n\n\t\tlogging.Set(ctx, \"user_name\", getTokenRequest.UserName)\n\n\t\tresponse, err := t.tokenService.CreateToken(getTokenRequest.UserName, getTokenRequest.Password)\n\t\tif err != nil {\n\t\t\tHandleError(ctx, w, err)\n\t\t\treturn\n\t\t}\n\n\t\tweb.HandleSuccess(ctx, w, response)\n\t}\n}", "title": "" }, { "docid": "e100b81142249293bfc4f6ae3cead7e8", "score": "0.654007", "text": "func (cui *createUserImpl) Handle(params user.CreateUserParams) middleware.Responder {\n\tlog := logging.FromContext(params.HTTPRequest.Context())\n\t// check cookie TODO: replace to middleware!!!\n\tcookie, err := params.HTTPRequest.Cookie(apimiddleware.JWTCookieName)\n\tif err != nil {\n\t\tlog.Errorf(\"get cookie %s error, %s\", apimiddleware.JWTCookieName, err)\n\t\treturn user.NewCreateUserDefault(http.StatusUnauthorized).\n\t\t\tWithPayload(&models.ErrorResponse{Message: \"not token cookie\"}).\n\t\t\tWithXRequestID(apimiddleware.GetRequestID(params.HTTPRequest))\n\t}\n\tif cookie == nil {\n\t\treturn user.NewCreateUserDefault(http.StatusUnauthorized).\n\t\t\tWithPayload(&models.ErrorResponse{Message: \"empty token cookie\"}).\n\t\t\tWithXRequestID(apimiddleware.GetRequestID(params.HTTPRequest))\n\t}\n\tok, err := cui.tm.Check(params.HTTPRequest.Context(), cookie.Value)\n\tif err != nil {\n\t\tlog.Errorf(\"check token %s error, %s\", cookie.Value, err)\n\t\treturn user.NewCreateUserDefault(http.StatusUnauthorized).\n\t\t\tWithPayload(&models.ErrorResponse{Message: \"check token error\"}).\n\t\t\tWithXRequestID(apimiddleware.GetRequestID(params.HTTPRequest))\n\t}\n\tif !ok {\n\t\treturn user.NewCreateUserDefault(http.StatusUnauthorized).\n\t\t\tWithPayload(&models.ErrorResponse{Message: \"token expired\"}).\n\t\t\tWithXRequestID(apimiddleware.GetRequestID(params.HTTPRequest))\n\t}\n\t// check admin role\n\tuid, uname, roleID, err := cui.tm.FindUserData(cookie.Value)\n\tif err != nil {\n\t\tlog.Errorf(\"get user attributes from token %s error, %s\", cookie.Value, err)\n\t\treturn user.NewCreateUserDefault(http.StatusForbidden).\n\t\t\tWithPayload(&models.ErrorResponse{Message: \"check permission error\"}).\n\t\t\tWithXRequestID(apimiddleware.GetRequestID(params.HTTPRequest))\n\t}\n\tif roleID != handlers.UserRoleAdmin {\n\t\tlog.With(zap.Int64(\"user_id\", uid), zap.String(\"user_name\", uname)).\n\t\t\tWarnf(\"create user forbidden for user.role_id=%d\", roleID)\n\t\treturn user.NewCreateUserDefault(http.StatusForbidden).\n\t\t\tWithPayload(&models.ErrorResponse{Message: \"operation not permitted\"}).\n\t\t\tWithXRequestID(apimiddleware.GetRequestID(params.HTTPRequest))\n\t}\n\t// end check cookie and access rights\n\n\t// make password hash\n\tsalt := MakeSalt(SaltLen)\n\tpasswordHash := HashPass(salt, params.Body.Password)\n\tlog.Debugf(\"found user: %+v\", params.Body)\n\t// insert user to repo\n\t_user, err := cui.storage.StoreUser(params.HTTPRequest.Context(), params.Body, passwordHash)\n\tif err != nil {\n\t\tlog.Errorf(\"repo.StoreUser error, %s\", err)\n\t\treturn user.NewCreateUserDefault(http.StatusInternalServerError).\n\t\t\tWithPayload(&models.ErrorResponse{Message: \"db error happen\"}).\n\t\t\tWithXRequestID(apimiddleware.GetRequestID(params.HTTPRequest))\n\t}\n\t// all ok return new user\n\treturn user.NewCreateUserCreated().WithPayload(_user).\n\t\tWithXRequestID(apimiddleware.GetRequestID(params.HTTPRequest))\n}", "title": "" }, { "docid": "e48d217aa4e96a70ce53e805f44b13f5", "score": "0.6533881", "text": "func (a *JwtAuthenticator) CreateToken(username string) (string, error) {\n\tclaims := &UserClaims{\n\t\tStandardClaims: jwt.StandardClaims{\n\t\t\tExpiresAt: time.Now().Add(a.ttl).Unix(),\n\t\t},\n\t\tUsername: username,\n\t}\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS512, claims)\n\treturn token.SignedString([]byte(a.secret))\n}", "title": "" }, { "docid": "25c1ed95a3984b3e7fcb6c0c326b1b91", "score": "0.651193", "text": "func (a *Handler) CreateToken(w http.ResponseWriter, r *http.Request) {\n\ta.muxToken.CreateToken(w, r)\n}", "title": "" }, { "docid": "062868b568e31d5d5b52a42fb56bc59d", "score": "0.6481598", "text": "func GenToken(username string) string {\n\ttimestamp := fmt.Sprintf(\"%x\", time.Now().Unix())\n\ttokenPrefix := util.MD5([]byte(username + timestamp + tokenSalt))\n\ttoken := tokenPrefix + timestamp[:8]\n\tfmt.Printf(\"username: %s Token: %s\\n\", username, token)\n\treturn token\n}", "title": "" }, { "docid": "04721679ba6a535ae5a28d83b288cd6b", "score": "0.63497716", "text": "func (q *UserClient) CreateToken(targetUser, newTokenName string) (status *Token, err error) {\n\tif newTokenName == \"\" {\n\t\tnewTokenName = fmt.Sprintf(\"jcli-%s\", randomdata.SillyName())\n\t}\n\n\tif targetUser == \"\" {\n\t\ttargetUser = q.UserName\n\t}\n\n\tapi := fmt.Sprintf(\"/user/%s/descriptorByName/jenkins.security.ApiTokenProperty/generateNewToken\", targetUser)\n\n\tformData := url.Values{}\n\tformData.Add(\"newTokenName\", newTokenName)\n\tpayload := strings.NewReader(formData.Encode())\n\n\terr = q.RequestWithData(http.MethodPost, api,\n\t\tmap[string]string{httpdownloader.ContentType: httpdownloader.ApplicationForm}, payload, 200, &status)\n\treturn\n}", "title": "" }, { "docid": "cae05b6d2810d8ae1cfbf7af471c016f", "score": "0.63166064", "text": "func (h *Handler) createPrivilegeTokenHandle(w http.ResponseWriter, r *http.Request, params httprouter.Params, ctx *SessionContext) (interface{}, error) {\n\tvar req privilegeTokenRequest\n\tif err := httplib.ReadJSON(r, &req); err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\tprotoReq := &proto.CreatePrivilegeTokenRequest{}\n\n\tswitch {\n\tcase req.SecondFactorToken != \"\":\n\t\tprotoReq.ExistingMFAResponse = &proto.MFAAuthenticateResponse{Response: &proto.MFAAuthenticateResponse_TOTP{\n\t\t\tTOTP: &proto.TOTPResponse{Code: req.SecondFactorToken},\n\t\t}}\n\tcase req.WebauthnResponse != nil:\n\t\tprotoReq.ExistingMFAResponse = &proto.MFAAuthenticateResponse{Response: &proto.MFAAuthenticateResponse_Webauthn{\n\t\t\tWebauthn: wantypes.CredentialAssertionResponseToProto(req.WebauthnResponse),\n\t\t}}\n\tdefault:\n\t\t// Can be empty, which means user did not have a second factor registered.\n\t}\n\n\tclt, err := ctx.GetClient()\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\ttoken, err := clt.CreatePrivilegeToken(r.Context(), protoReq)\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\treturn token.GetName(), nil\n}", "title": "" }, { "docid": "28516e92a17016cc7ca72b071dc623dd", "score": "0.6315174", "text": "func Register(ctx *fasthttp.RequestCtx) {\n\t// If there is unique username, create the new user and return the login token\n\t// Else return 409 because account with username already exists\n\tif userData, exists := userExists(ctx.PostBody()); !exists {\n\t\tnewUser := createUser(userData)\n\t\tutil.SetResponse(ctx)\n\t\tif err := json.NewEncoder(ctx).Encode(newUser); err != nil {\n\t\t\tctx.Error(err.Error(), fasthttp.StatusInternalServerError)\n\t\t}\n\t} else {\n\t\tctx.Response.SetStatusCode(409)\n\t}\n}", "title": "" }, { "docid": "48340757447e959561c36f4b815937b1", "score": "0.6299531", "text": "func CreateToken(userID uint32, userMod []uint32) (string, error) {\n\n\t//\tCria um mapa de informações sobre o user\n\tclaims := jwt.MapClaims{}\n\n\t//\tAutoriza o usuario\n\tclaims[\"authorized\"] = true\n\n\t//\tid do usuario\n\tclaims[\"userID\"] = userID\n\n\t//\tModulos do usuario\n\tclaims[\"userMod\"] = userMod\n\n\t//\tToken expira depois de 8 hrs\n\tclaims[\"exp\"] = time.Now().Add(time.Hour * 8).Unix()\n\n\t//\tCriptografa o token no metodo HS256\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)\n\n\t//\tAPI_SECRET esta nas variaveis de ambiente\n\treturn token.SignedString([]byte(os.Getenv(\"API_SECRET\")))\n}", "title": "" }, { "docid": "0a35d6ebc496d96e589bc4ab91009866", "score": "0.62664", "text": "func token() httprouter.Handle {\n\treturn func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\t\t// Pull user data out of request...\n\t\tgrantType := r.PostFormValue(\"grant_type\")\n\t\tusername := r.PostFormValue(\"username\")\n\t\tpassword := r.PostFormValue(\"password\")\n\t\tif grantType == \"\" || username == \"\" || password == \"\" {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t\tlog.Printf(\"User %s has requested a token. \\n\", username)\n\t\t// Create the token\n\t\t// TODO - actually validate the user and assign roles\n\t\troles := []string{\"ROLE_USER\", \"ROLE_ADMIN\"}\n\t\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\t\"roles\": roles,\n\t\t\t\"admin\": true,\n\t\t\t\"name\": username,\n\t\t\t\"exp\": time.Now().Add(time.Hour * 1).Unix(),\n\t\t})\n\t\t// Sign the token with our secret\n\t\ttokenString, _ := token.SignedString(mySigningKey)\n\t\ttk := TokenStruct{\n\t\t\tAccessToken: tokenString,\n\t\t\tExpiresIn: 3600,\n\t\t\tTokenType: \"bearer\",\n\t\t\tRefreshToken: \"ToBeImplemented\",\n\t\t}\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tjson.NewEncoder(w).Encode(&tk)\n\t}\n}", "title": "" }, { "docid": "91ed51e04254dbfd6e81b3b35d87b169", "score": "0.6231001", "text": "func Create(t *types.Token, c chan<- *types.TokenOutput) {\n\tcreated, err := client.UserTokens.CreateOne(\n\t\tdb.UserTokens.Lifetime.Set(time.Unix(t.Expiration, 0)),\n\t\tdb.UserTokens.Token.Set(t.Token),\n\t\tdb.UserTokens.User.Link(db.User.ID.Equals(t.UserId)),\n\t).Exec(database.Context)\n\n\tif err != nil {\n\t\tr := &types.TokenOutput{\n\t\t\tErr: errors.New(\"token not created\"),\n\t\t\tErrStatus: 500,\n\t\t\tToken: nil,\n\t\t}\n\n\t\tc <- r\n\t\treturn\n\t}\n\n\tc <- &types.TokenOutput{Err: nil, ErrStatus: 0, Token: created}\n}", "title": "" }, { "docid": "309c8a41e842c038bcbb167bf287bb81", "score": "0.62244004", "text": "func (c *Accounts) Token(w http.ResponseWriter, r *http.Request) {\n params := model.Account{}\n c.App.Parse(r, &params)\n\n account := model.Account{}\n row := c.App.DB.QueryRow(\"SELECT a.username, a.password FROM `accounts` a WHERE a.username = ?\", params.Username)\n err := row.Scan(&account.Username, &account.Password)\n\n if err != nil {\n c.App.NotFound(w)\n return\n }\n\n err = bcrypt.CompareHashAndPassword([]byte(account.Password), []byte(params.Password))\n\n if err != nil {\n c.App.Error(w, \"Password incorrect\", http.StatusBadRequest)\n return\n }\n\n // remove current token (if exists)\n c.App.DB.Exec(\"DELETE FROM `tokens` WHERE `username` = ?\", account.Username)\n\n // generate token hash\n data := make([]byte, 10)\n rand.Read(data)\n hash := fmt.Sprintf(\"%x\", sha256.Sum256(data))\n\n // store token on database\n token := model.Token{account.Username, hash}\n stmt, err := c.App.DB.Prepare(\"INSERT INTO `tokens` (`username`, `token`) VALUES (?, ?)\")\n defer stmt.Close()\n\n if err != nil {\n c.App.Error(w, err.Error(), http.StatusBadRequest)\n return\n }\n\n _, err = stmt.Exec(token.Username, token.Token)\n\n if err != nil {\n c.App.Error(w, err.Error(), http.StatusBadRequest)\n return\n }\n\n // success\n c.App.Json(w, token, http.StatusOK)\n}", "title": "" }, { "docid": "73d2e1bd1cbf1458fbcfdd99fa35a8c0", "score": "0.62242234", "text": "func CreateToken(username, password string) {\n\thi.token = base64.StdEncoding.EncodeToString([]byte(username + \":\" + password))\n}", "title": "" }, { "docid": "1da14c5dc74d2a2c3b3f432be1a1a0a2", "score": "0.62061787", "text": "func CreateUserHandler(c *gin.Context) {\n\tvar user User\n\terr := c.BindJSON(&user)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(400, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\tquery := fmt.Sprintf(GetUserByEmail, user.Email)\n\tvar results []User\n\terr = GetDb().Select(&results, query)\n\n\tif len(results) > 0 {\n\t\tc.AbortWithStatusJSON(400, gin.H{\"error\": \"User already exists\"})\n\t\treturn\n\t}\n\tsalt := MakeSalt(24)\n\tuser.Salt = salt\n\n\tsecret := os.Getenv(\"SECRET_KEY\")\n\tpass := Encrypt(user.Password+salt, secret)\n\tuser.Password = pass\n\n\tres, err := GetDb().NamedExec(CreateUser, user)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(400, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\tid, _ := res.LastInsertId()\n\tuser.ID = int32(id)\n\tuser.BoardOrder = \"[]\"\n\ttoken, err := CreateJwt(user)\n\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(400, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tc.JSON(200, gin.H{\"jwt\": token, \"ID\": id})\n}", "title": "" }, { "docid": "47bc84617f4f37691ccd8c747f110c62", "score": "0.6204995", "text": "func createToken(user models.User) (string, string) {\n\tvar header string = b64Encode(`{\"alg\":\"HS256\",\"typ\":\"JWT\"}`)\n\tvar payload string = b64Encode(`{\"email\":` + user.Email + `}`)\n\tvar signature string = header + \".\" + payload\n\t// Assign secret key\n\tsecretHash := sha256.New()\n\tsecretHash.Write([]byte(fmt.Sprintf(\"%v\", time.Now())))\n\tsecretKey := hex.EncodeToString(secretHash.Sum(nil))\n\t// Assign signature hash string\n\tjwt := signature + \".\" + hex.EncodeToString(secretHash.Sum([]byte(signature)))\n\treturn jwt, string(secretKey)\n}", "title": "" }, { "docid": "341efb8503cd0d9492a7bcafb2411dcc", "score": "0.62033457", "text": "func CreatUserHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\terr := r.ParseForm()\n\tusername := r.Form.Get(\"username\")\n\tauthentication := r.Form.Get(\"authentication\")\n\tuserType, _ := strconv.ParseUint(r.Form.Get(\"type\"), 10, 8)\n\n\tuser := new(models.User)\n\tif username == \"\" || authentication == \"\" || userType == 0 {\n\t\terr = errors.New(\"Empty username or authentication\")\n\t} else if len(username) <= 20 && len(authentication) == 32 {\n\t\tuser := models.User{Username: username, Authentication: authentication, Type: uint8(userType)}\n\t\tlog.Printf(\"[GetUserHandler] Creating username=%s, type=%d\\n\", username, userType)\n\t\terr = user.CreateUser()\n\t} else {\n\t\terr = errors.New(\"Illegal username or authentication\")\n\t}\n\n\tif err != nil {\n\t\tlog.Printf(\"[GetUserHandler] Error: %s\\n\", err.Error())\n\t}\n\tuser.GetUserByUsername(username)\n\n\tSendResponse(w, user, err)\n}", "title": "" }, { "docid": "731b0687a1cadcb4d495426758ab40cd", "score": "0.6201452", "text": "func (s JWTAuthService) CreateToken(user models.User) string {\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\"id\": user.ID,\n\t\t\"name\": user.Name,\n\t\t\"email\": *user.Email,\n\t})\n\n\ttokenString, err := token.SignedString([]byte(s.env.JWTSecret))\n\n\tif err != nil {\n\t\ts.logger.Error(\"JWT validation failed: \", err)\n\t}\n\n\treturn tokenString\n}", "title": "" }, { "docid": "3a992b455fed4b595dcf62492f8fb95b", "score": "0.62009263", "text": "func CreateToken(username, role string) (string, error) {\n\tlog.Println(\"util--CreateToken start.\")\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\"username\": username,\n\t\t\"role\": role,\n\t\t\"expiryTime\": time.Now().Add(time.Minute * 5).Format(time.RFC822),\n\t})\n\ttokenString, err := token.SignedString([]byte(\"secretKey\"))\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tlog.Println(\"util--CreateToken end.\")\n\treturn tokenString, nil\n}", "title": "" }, { "docid": "a42c0b072665017b17eaa7ee81a9365a", "score": "0.61589235", "text": "func GenerateToken(username string) (token *schema.Token, err error) {\n\tjwtToken, expiresAt, err := GenerateJWT(username)\n\tif err != nil {\n\t\treturn\n\t}\n\n\ttoken = &schema.Token{\n\t\tToken: jwtToken,\n\t\tExpiresAt: expiresAt,\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "78ad4e9ba57320f288b8b3cc4c7d4664", "score": "0.6154752", "text": "func (r *AdminService) GenToken(userName string, adminuserreq *AdminUserReq) *AdminGenTokenCall {\n\tc := &AdminGenTokenCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userName = userName\n\tc.adminuserreq = adminuserreq\n\treturn c\n}", "title": "" }, { "docid": "2c481770d4fe282210aa9a8af28d6d4b", "score": "0.61525303", "text": "func postLoginHandler(formatter *render.Render, repo repository) http.HandlerFunc {\n return func(w http.ResponseWriter, req *http.Request) {\n var user User\n var token Token\n payload, _ := ioutil.ReadAll(req.Body)\n err := json.Unmarshal(payload, &user)\n if err != nil {\n formatter.JSON(w, http.StatusBadRequest, \"Failed to parse user.\")\n return\n }\n oldPassword := user.Password\n user, err = repo.getUserByUsername(user.Username)\n if err != nil {\n formatter.JSON(w, http.StatusBadRequest, \"Username/password not valid\")\n return\n }\n compare := user.comparePassword(oldPassword)\n if compare == false {\n formatter.JSON(w, http.StatusBadRequest, \"Username/password not valid\")\n return\n }\n token.UserID = user.ID\n err = repo.addToken(token)\n if err != nil {\n formatter.JSON(w, http.StatusInternalServerError, \"Failed to create token.\")\n return\n }\n formatter.JSON(w, http.StatusOK, token)\n }\n}", "title": "" }, { "docid": "f6a313a2b88c332535ca5f6d3d24cba7", "score": "0.6134784", "text": "func (r *AdminService) GenToken(userName string, adminuserreq *AdminUserReq) *AdminGenTokenCall {\n\tc := &AdminGenTokenCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.userName = userName\n\tc.adminuserreq = adminuserreq\n\treturn c\n}", "title": "" }, { "docid": "63e806d3a1234dc464718cc2a113f4b0", "score": "0.6132776", "text": "func generateToken(u model.User) string {\n\tnow := time.Now().Unix()\n\treturn fmt.Sprintf(\"%s%x\", u.Id, now)\n}", "title": "" }, { "docid": "145bd623ff294cfe78d81a381443eb5a", "score": "0.6132727", "text": "func (u *Users) Token(ctx context.Context, w http.ResponseWriter, r *http.Request) error {\n\n\tctx, span := trace.StartSpan(ctx, \"Handlers.Users.Token\")\n\tdefer span.End()\n\n\tv, ok := ctx.Value(web.KeyValues).(*web.Values)\n\tif !ok {\n\t\treturn errors.New(\"web value missing from context\")\n\t}\n\n\temail, pass, ok := r.BasicAuth()\n\tif !ok {\n\t\terr := errors.New(\"must provide email and password in BasicAuth\")\n\t\treturn web.NewRequestError(err, http.StatusUnauthorized)\n\t}\n\n\tclaims, err := user.Authenticate(ctx, u.DB, v.Start, email, pass)\n\tif err != nil {\n\t\tswitch err {\n\t\tcase user.ErrAuthenticationFailure:\n\t\t\treturn web.NewRequestError(err, http.StatusUnauthorized)\n\t\tdefault:\n\t\t\t// return errors.Wrap(err, \"authenticating\")\n\t\t\treturn err\n\t\t}\n\t}\n\n\tvar tkn struct {\n\t\tToken string `json:\"token\"`\n\t}\n\ttkn.Token, err = u.authenticator.GenerateToken(claims)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"generating token\")\n\t}\n\n\treturn web.Respond(ctx, w, tkn, http.StatusOK)\n}", "title": "" }, { "docid": "42d9f6b464edb74216669d5512449af5", "score": "0.60985", "text": "func CreateTokenEndpoint(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"content-type\", \"application/json\")\n\temail := r.FormValue(\"email\")\n\tpass := r.FormValue(\"password\")\n\tres, e := db.GetOneByFilter(db.GetUsersColl(), bson.M{\"email\": email})\n\tif e != nil {\n\t\tapp.WriteLog(e.Error())\n\t\tw.Write([]byte(`{ \"error\": \"` + e.Error() + `\" }`))\n\t\treturn\n\t}\n\te = bcrypt.CompareHashAndPassword([]byte(res[\"password\"].(string)), []byte(pass))\n\tif e != nil {\n\t\tapp.WriteLog(e.Error())\n\t\tw.Write([]byte(`{ \"error\": \"` + e.Error() + `\" }`))\n\t\treturn\n\t}\n\ttimeEx := app.TimeExpire(1 * time.Hour)\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\"email\": email,\n\t\t\"expire\": timeEx,\n\t})\n\ttokenString, e := token.SignedString(jwtSecret)\n\tif e != nil {\n\t\tapp.WriteLog(e.Error())\n\t\tw.Write([]byte(`{ \"error\": \"` + e.Error() + `\" }`))\n\t\treturn\n\t}\n\t_, e = db.Create(db.GetTokenColl(), bson.M{\"token\": tokenString, \"expire\": timeEx})\n\tif e != nil {\n\t\tw.Write([]byte(`{ \"error\": \"` + e.Error() + `\" }`))\n\t\treturn\n\t}\n\tw.Write([]byte(`{ \"token\": \"` + tokenString + `\", \"expiration time\": \"` + timeEx + `\" }`))\n}", "title": "" }, { "docid": "17ae7ec0234a2e1dc9a5c9889497a627", "score": "0.6079119", "text": "func HandleCreate(context router.Context) error {\n\n\t// Check csrf token\n\terr := authorise.AuthenticityToken(context)\n\tif err != nil {\n\t\treturn router.NotAuthorizedError(err)\n\t}\n\n\t// Setup context\n\tparams, err := context.Params()\n\tif err != nil {\n\t\treturn router.InternalError(err)\n\t}\n\n\t// Check for email duplicates\n\temail := params.Get(\"email\")\n\tif len(email) > 0 {\n\n\t\tif len(email) < 3 || !strings.Contains(email, \"@\") {\n\t\t\treturn router.InternalError(err, \"Invalid email\", \"Please just miss out the email field, or use a valid email.\")\n\t\t}\n\n\t\tcount, err := users.Query().Where(\"email=?\", email).Count()\n\t\tif err != nil {\n\t\t\treturn router.InternalError(err)\n\t\t}\n\t\tif count > 0 {\n\t\t\treturn router.NotAuthorizedError(err, \"User already exists\", \"Sorry, a user already exists with that email.\")\n\t\t}\n\t}\n\n\t// Check for invalid or duplicate names\n\tname := params.Get(\"name\")\n\tif len(name) < 2 {\n\t\treturn router.InternalError(err, \"Name too short\", \"Please choose a username longer than 2 characters\")\n\t}\n\n\tcount, err := users.Query().Where(\"name=?\", name).Count()\n\tif err != nil {\n\t\treturn router.InternalError(err)\n\t}\n\tif count > 0 {\n\t\treturn router.NotAuthorizedError(err, \"User already exists\", \"Sorry, a user already exists with that name, please choose another.\")\n\t}\n\n\t// Set some defaults for the new user\n\tparams.SetInt(\"status\", status.Published)\n\tparams.SetInt(\"role\", users.RoleReader)\n\tparams.SetInt(\"points\", 1)\n\n\t// Now try to create the user - NB AllowedParamsAdmin, we allow points etc on create as we explicitly set them\n\tid, err := users.Create(params.Clean(users.AllowedParamsAdmin()))\n\tif err != nil {\n\t\treturn router.InternalError(err, \"Error\", \"Sorry, an error occurred creating the user record.\")\n\t}\n\n\tcontext.Logf(\"#info Created user id,%d\", id)\n\n\t// Find the user again so we can save login\n\tuser, err := users.Find(id)\n\tif err != nil {\n\t\tcontext.Logf(\"#error parsing user id: %s\", err)\n\t\treturn router.NotFoundError(err)\n\t}\n\n\t// Save the fact user is logged in to session cookie\n\terr = loginUser(context, user)\n\tif err != nil {\n\t\treturn router.InternalError(err)\n\t}\n\n\t// Redirect to root\n\treturn router.Redirect(context, \"/?message=welcome\")\n}", "title": "" }, { "docid": "3581ad46db6b4886e25afad029a05215", "score": "0.605268", "text": "func createUser(res http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\tvar body request\n\trawData, err := getBody(req)\n\tif err != nil {\n\t\thandleError(res, \"Internal error\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif auth := isAuthorized(req, rawData); !auth {\n\t\thandleError(res, \"Unauthorized\", http.StatusUnauthorized)\n\t\treturn\n\t}\n\n\tif err = json.Unmarshal(rawData, &body); err != nil {\n\t\thandleError(res, \"Error processing request\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tuname, ok := body[\"username\"]\n\tif !ok {\n\t\thandleError(res, \"Error processing request: Missing Username\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tpwd, ok := body[\"password\"]\n\tif !ok {\n\t\thandleError(res, \"Error processing request: Missing Password\", http.StatusBadRequest)\n\t\treturn\n\t}\n\temail, ok := body[\"email\"]\n\tif !ok {\n\t\thandleError(res, \"Error processing request: Missing Email\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif isRegistered(uname) {\n\t\thandleError(res, \"Error: Username Taken\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tusr, err := user.NewUser(uname, pwd, email)\n\tif err != nil {\n\t\thandleError(res, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tif err := insertUser(usr); err != nil {\n\t\thandleError(res, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tres.WriteHeader(http.StatusCreated)\n\tjson.NewEncoder(res).Encode(response{\n\t\tStatus: \"ok\",\n\t\tResult: map[string]string{\n\t\t\t\"username\": usr.Username,\n\t\t\t\"verifier\": usr.Verifier,\n\t\t\t\"email\": usr.Email,\n\t\t\t\"identity\": usr.IH,\n\t\t\t\"privateKey\": hex.EncodeToString(usr.PrivateKey),\n\t\t},\n\t})\n}", "title": "" }, { "docid": "42f6b68a78321c10e1f2b00395fd7ba7", "score": "0.60374534", "text": "func (c *Client) createUserHandler(server string, username string) {\n\tif !c.Connect(server) {\n\t\treturn\n\t}\n\n\t// Generate new key pair\n\tprivKey, pubKey := util.GenKeyPair()\n\n\t// Send a request to register the user\n\tregUserMsg := &websock.RegisterUserMessage{\n\t\tUsername: username,\n\t\tPublicKey: util.MarshalPublic(pubKey)}\n\n\twebsock.Send(c.ws, &websock.Message{Type: websock.RegisterUser, Message: regUserMsg})\n\n\t_, err := c.wsReader.GetNext()\n\tif err != nil {\n\t\tc.gui.ShowDialog(\"Did not get a response from the server\", nil)\n\t\treturn\n\t}\n\n\t// Save private key to file\n\tsavePrivKey(username, privKey)\n\n\tc.gui.ShowDialog(\"User created. You can now log in.\", nil)\n}", "title": "" }, { "docid": "62c74b60f3815487fc9d73d9febafc2f", "score": "0.60369277", "text": "func MiddlewareCreateUser(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {\n\n\t\t//for now everyone can create a user for testing purposes, otherwise only a logged in user can create another user\n\t\t/*//validate the token\n\t\t_, err := security.ValidateToken(w, r)\n\t\tif err != nil {\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t\tw.Write([]byte(err.Error()))\n\t\t\treturn\n\t\t}*/\n\n\t\t//deserializes the user from the request\n\t\tvar user data.Person\n\t\terr := json.NewDecoder(r.Body).Decode(&user)\n\t\tif err != nil {\n\t\t\trw.WriteHeader(http.StatusBadRequest)\n\t\t\trw.Write([]byte(err.Error()))\n\t\t\treturn\n\t\t}\n\n\t\t//validates the user\n\t\terr = user.ValidateUser()\n\t\tif err != nil {\n\t\t\trw.WriteHeader(http.StatusBadRequest)\n\t\t\trw.Write([]byte(err.Error()))\n\t\t\treturn\n\t\t}\n\n\t\t//hash the password\n\t\thashedP, err := HashPassword(user.Password)\n\t\tif err != nil {\n\t\t\trw.WriteHeader(http.StatusInternalServerError)\n\t\t\trw.Write([]byte(err.Error()))\n\t\t\treturn\n\t\t}\n\n\t\tuser.Password = string(hashedP)\n\n\t\tvar group data.Group\n\t\tperson := r.Context().Value(KeyUser{}).(data.Person)\n\n\t\t//check if the users group exists\n\t\tif person.GroupName != \"\" {\n\t\t\tfoundGroup := data.DB.Where(\"name = ?\", person.GroupName).First(&group)\n\t\t\tif foundGroup.Error != nil {\n\t\t\t\trw.WriteHeader(http.StatusBadRequest)\n\t\t\t\trw.Write([]byte(foundGroup.Error.Error()))\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\t//adds the user to the context and calls the next handler if there are no errors\n\t\tctx := context.WithValue(r.Context(), KeyUser{}, user)\n\t\tr = r.WithContext(ctx)\n\t\tnext.ServeHTTP(rw, r)\n\t})\n}", "title": "" }, { "docid": "08e3347945cba15c4c9688c9b6e57eaa", "score": "0.6031465", "text": "func UserCreate(w http.ResponseWriter, r *http.Request) {\n\n}", "title": "" }, { "docid": "a1d34d0cc49185674114d46abda3d714", "score": "0.60255396", "text": "func createUser(rw http.ResponseWriter, request *http.Request) {\n\tdecoder := json.NewDecoder(request.Body)\n\n\tvar t User\n\terr := decoder.Decode(&t)\n\n\tif err != nil {\n\t\tfmt.Fprintf(rw, \"Unable to create User - reason: %s\\n\", err)\n\t\treturn\n\t}\n\texistingUser, err := findUsersByUserID(t.Id)\n\tif err != nil {\n\t\tfmt.Fprintf(rw, \"Unable to create User - reason: %s\\n\", err)\n\t\treturn\n\t}\n\tif len(existingUser) > 0 {\n\t\tfmt.Fprintf(rw, \"User \\\"%s\\\" already exists\\n\", t.Id)\n\t\treturn\n\t}\n\tSha2512hasher := sha512.New()\n\tSha2512hasher.Write([]byte(t.Password))\n\tpassHash := hex.EncodeToString(Sha2512hasher.Sum(nil))\n\tt.Password = passHash\n\tinsertedUser, err := InsertUser(t)\n\tif err != nil {\n\t\tfmt.Fprintf(rw, \"Unable to create User - reason: %s\\n\", err)\n\t\treturn\n\t}\n\tfmt.Fprintf(rw, \"UserId %s created successfully for %s \\n\", t.Id, t.Name)\n\tvar _ = insertedUser\n}", "title": "" }, { "docid": "898fb643917df26b51131ce75f2681b6", "score": "0.60217166", "text": "func CreateToken(user core.User, exp int64) string {\n\tvar MySigningKey = []byte(os.Getenv(\"SECRET\"))\n\ttoken := jwt.New(jwt.SigningMethodHS256)\n\tclaims := token.Claims.(jwt.MapClaims)\n\tclaims[\"SystemRole\"] = user.SystemRole\n\tclaims[\"Username\"] = user.Username\n\tclaims[\"Id\"] = user.Id\n\tclaims[\"GroupId\"] = user.GroupIds[0]\n\tclaims[\"exp\"] = exp\n\ttokenString, _ := token.SignedString(MySigningKey)\n\treturn tokenString\n}", "title": "" }, { "docid": "0d4dd3abdf3271470789ea0b137517d3", "score": "0.6016233", "text": "func CreateToken(userID string) string {\n\trand.Seed(time.Now().UnixNano())\n\tvar letters = []rune(\"ab0cdefghi9i8jklmn123opqrst547uv9wxyz1234ABCDEFGHIJKLMNOfg5386PQRSTUVWXYZ56789\")\n\tb := make([]rune, 4)\n\tfor i := range b {\n\t\tb[i] = letters[rand.Intn(len(letters)-1)]\n\t}\n\n\trandByte4 := string(b[:4])\n\n\treturn fmt.Sprintf(\"%s%s\", randByte4, userID)\n}", "title": "" }, { "docid": "31b6b81b43c101eb05222a37d8154123", "score": "0.60094315", "text": "func registerHandler(w http.ResponseWriter, req *http.Request) {\n\tvar user User\n\terr := req.ParseForm()\n\n\tif err != nil && req.PostForm[\"username\"] != nil && req.PostForm[\"password\"] != nil {\n\t\tw.WriteHeader(http.StatusForbidden)\n\t\tfmt.Println(\"Error in register request format\")\n\t\tw.Write([]byte(\"register request format is wrong.\\n\"))\n\t\treturn\n\t}\n\n\tuser.Username = req.PostForm[\"username\"][0]\n\tuser.Password = req.PostForm[\"password\"][0]\n\n\tif database.CheckKeyExist([]byte(\"users\"), []byte(user.Username)) {\n\t\tw.WriteHeader(http.StatusForbidden)\n\t\tfmt.Println(\"Error in register info\")\n\t\tw.Write([]byte(\"Existent username.\\n\"))\n\t\treturn\n\t}\n\n\tdatabase.Update([]byte(\"users\"), []byte(user.Username), []byte(user.Password))\n\n\tw.WriteHeader(http.StatusOK)\n\tw.Write([]byte(\"Create a account\\n\"))\n}", "title": "" }, { "docid": "97479fd916b92a0beacd2095d4c1dad8", "score": "0.5997164", "text": "func (ct *CreateToken) Execute(user entities.User) (token string, err error) {\n\ttoken, err = ct.userProxy.CreateToken(user)\n\tif err != nil {\n\t\tct.logger.Errorf(\"failed to create an user's token: %s\", err.Error())\n\t\treturn \"\", err\n\t}\n\n\treturn token, err\n}", "title": "" }, { "docid": "99e04e867e0a2ff190cedd764c2d04d0", "score": "0.59732074", "text": "func handler(user request) (response, error) {\n\n\tUserID := CreateUserInDB(user)\n\n\t// If CreateUserInDB returns without error, return the new UserID\n\tif UserID != \"Error in Adding Users\" {\n\t\treturn response{\n\t\t\tUser: fmt.Sprintf(UserID),\n\t\t}, nil\n\t} else {\n\t\treturn response{\n\t\t\tUser: fmt.Sprintf(\"Creation of User %s failed\", user.Username),\n\t\t}, nil\n\t}\n}", "title": "" }, { "docid": "4057d2082e2068f35204f0c9061f002a", "score": "0.5961758", "text": "func NewToken(user *session.User) (string, error) {\n\treturn generator.CreateToken(fireauth.Data{\n\t\t\"uid\": user.Email,\n\t\t\"username\": user.Username,\n\t\t\"role\": map[string]bool{\n\t\t\t\"dev\": true, // TODO get these from Okta\n\t\t\t\"qa\": true,\n\t\t},\n\t}, authOptions)\n}", "title": "" }, { "docid": "9444dd5798869113d3ba7942a4165e90", "score": "0.59382933", "text": "func handleMsgCreateUser(ctx sdk.Context, keeper Keeper, msg MsgCreateUser) sdk.Result {\n\n\t// check if Creator is valid\n\tif true {\n\n\t}\n\n\t// check if user already exists\n\tif keeper.GetUser(ctx, msg.NewUser).Alias == \"\" {\n\t\tkeeper.InitUser(ctx, msg.NewUser, msg.Alias)\n\t} else {\n\t\tkeeper.SetUserName(ctx, msg.NewUser, msg.Alias)\n\t}\n\n\t// this has been moved to keeper.InitUser, but maybe will be back here some day?\n\t// give starting credits\n\t//if(!keeper.GetPublicPoolCredits(ctx).IsZero()) {\n\t//keeper.SubtractPublicPoolCredits(ctx, sdk.NewInt64Coin(\"credits\", 1))\n\t//keeper.coinKeeper.AddCoins(ctx, msg.NewUser, sdk.Coins{sdk.NewInt64Coin(\"credits\", 1)})\n\t//}\n\n\treturn sdk.Result{}\n}", "title": "" }, { "docid": "f22336d37057f553fcecdafaf394660b", "score": "0.59313875", "text": "func (this *authSvc) CreateToken(user *cmdb.User) (*Token, error) {\n\n\tclaims := &Claims {\n\n\t\tStandardClaims: jwt.StandardClaims {\n\t\t\tIssuedAt: time.Now().Unix(),\n\t\t\tExpiresAt: time.Now().Add(this.AuthMaxAge).Unix(),\n\t\t},\n\n\t\tAuthClaims: AuthClaims {\n\t\t\tUsername: user.Username,\n\t\t\tLocked: user.Locked,\n\t\t\tRole: user.Role,\n\t\t},\n\t}\n\n\treturn &Token{jwt.NewWithClaims(jwt.GetSigningMethod(`RS256`), claims)}, nil\n}", "title": "" }, { "docid": "0b8692f8c649a2474676f7df7650bfc4", "score": "0.59302354", "text": "func (s *APIserver) CreateUser() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\t// VALIDATE ON FRONTEND\n\n\t\ttype checkUser struct {\n\t\t\tTelegramID string `json:\"telegram_id\"`\n\t\t}\n\n\t\tvar u checkUser\n\t\tif err := c.BindJSON(&u); err != nil {\n\t\t\ts.logger.Error(err)\n\t\t\tc.JSON(http.StatusConflict, store.User{})\n\t\t\treturn\n\t\t}\n\n\t\ttgid := u.TelegramID\n\t\tif tgid == \"\" {\n\t\t\ts.logger.Error(\"telegram-id is empty\")\n\t\t\tc.JSON(http.StatusConflict, store.User{})\n\t\t\treturn\n\t\t}\n\t\t// Generate \"new\" account\n\t\taccount, _ := nft.GenerateWallet()\n\t\t// Create new user\n\t\tnewUser := store.User{\n\t\t\tTelegramID: tgid,\n\t\t\t// TODO REPLACE TO AUTOGENERATION\n\t\t\tSeedPhrase: \"tag volcano eight thank tide danger coast health above argue embrace heavy\",\n\t\t\tAddrWallet: account.Address.Hex(),\n\t\t}\n\n\t\tif err := s.store.Database.Create(&newUser).Error; err != nil {\n\t\t\tc.JSON(http.StatusConflict, store.User{})\n\t\t\ts.logger.Error(\"%v\", err)\n\t\t\treturn\n\t\t}\n\t\t// DEV TEST\n\t\t// data, err := os.ReadFile(\"testdata.txt\")\n\t\t// if err != nil {\n\t\t// \ts.logger.Error(err)\n\t\t// }\n\t\t// s.logger.Info(data)\n\t\t// sign, err := wallet.SignDataWithPassphrase(account, \"tag volcano eight thank tide danger coast health above argue embrace heavy\", \"text/plain\", data)\n\t\t// fmt.Println(sign, err)\n\t\t// addrm := account.Address\n\t\t// fmt.Println(addrm, err)\n\t\t// nft.SignTransaction(*wallet, account, data)\n\n\t\tc.JSON(http.StatusCreated, newUser)\n\t}\n}", "title": "" }, { "docid": "c79500c4f1e562043678fe9e985724ac", "score": "0.59266025", "text": "func (h *handler) createUser(w http.ResponseWriter, r *http.Request) {\n\tbody, _ := ioutil.ReadAll(r.Body)\n\tif !fastjson.Exists(body, \"username\") {\n\t\thttp.Error(w, \"Missing Field \\\"username\\\"\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tusername := fastjson.GetString(body, \"username\")\n\tif len(username) == 0 {\n\t\thttp.Error(w, \"Field \\\"username\\\" must be a string and have non-zero length\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tid, err := h.store.CreateUser(r.Context(), username)\n\tif err != nil {\n\t\tif errors.Is(err, storage.ErrUserExists) {\n\t\t\thttp.Error(w, \"User already exists\", http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t\th.logger.Error(err)\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tpayload := []byte(`{\"id\":` + strconv.FormatInt(id, 10) + `}`)\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusCreated)\n\t_, err = w.Write(payload)\n\tif err != nil {\n\t\th.logger.Errorf(\"writing marshaled data to ResponseWriter: %v\", err)\n\t\thttp.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)\n\t}\n}", "title": "" }, { "docid": "9ca609c09da4a282288996f493c54221", "score": "0.5913297", "text": "func GenerateToken(username string) (string, error) {\n\ttoken := jwt.New(jwt.SigningMethodHS256)\n\t/* Create a map to store our claims */\n\tclaims := token.Claims.(jwt.MapClaims)\n\t/* Set token claims */\n\tclaims[\"username\"] = username\n\tclaims[\"exp\"] = time.Now().Add(time.Hour * 24).Unix()\n\ttokenString, err := token.SignedString(SecretKey)\n\tif err != nil {\n\t\tlog.Fatal(\"Error in Generating key\")\n\t\treturn \"\", err\n\t}\n\treturn tokenString, nil\n}", "title": "" }, { "docid": "0a7ae8220c9fd1b44912b0d4ad307eab", "score": "0.59059155", "text": "func HandleGenerateAdminToken(adminMan *admin.Manager) http.HandlerFunc {\n\ttype Request struct {\n\t\tClaims map[string]interface{} `json:\"claims\"`\n\t}\n\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\t// Get the JWT token from header\n\t\ttoken := utils.GetTokenFromHeader(r)\n\n\t\t// Load the request from the body\n\t\treq := new(Request)\n\t\t_ = json.NewDecoder(r.Body).Decode(req)\n\t\tdefer utils.CloseTheCloser(r.Body)\n\n\t\tnewToken, err := adminMan.GenerateToken(r.Context(), token, req.Claims)\n\t\tif err != nil {\n\t\t\t_ = helpers.Response.SendErrorResponse(r.Context(), w, http.StatusForbidden, err)\n\t\t\treturn\n\t\t}\n\n\t\t_ = helpers.Response.SendResponse(r.Context(), w, http.StatusOK, model.Response{Result: map[string]string{\"token\": newToken}})\n\t}\n}", "title": "" }, { "docid": "2c9497bfb3dfc148bb2b11b19cce6a5f", "score": "0.59056807", "text": "func CreateToken(userID, roleID int64, username string) string {\n\tclaims := MyCustomClaims{\n\t\tuserID,\n\t\tusername,\n\t\troleID,\n\t\tjwt.StandardClaims{\n\t\t\tExpiresAt: time.Now().Add(time.Hour * 24).Unix(),\n\t\t},\n\t}\n\n\tgodotenv.Load()\n\n\tkey := os.Getenv(\"SECRET\")\n\tmySigningKey := []byte(key)\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)\n\ttokenString, err := token.SignedString(mySigningKey)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\treturn tokenString\n}", "title": "" }, { "docid": "7e833a8c9a72ccfc2b578b45e613263b", "score": "0.5903131", "text": "func (ts *TokenStore) handleCreate(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {\n\treturn ts.handleCreateCommon(ctx, req, d, false, nil)\n}", "title": "" }, { "docid": "aece481e53873e68c125909a2785da3e", "score": "0.58948225", "text": "func (s *Handler) PostUserHandler(w http.ResponseWriter, r *http.Request) {\n\tvar p = struct {\n\t\tUsername string `json:\"username\"`\n\t}{}\n\terr := json.NewDecoder(r.Body).Decode(&p)\n\tif err != nil {\n\t\tfmt.Printf(\"error %v\", err)\n\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tfmt.Printf(\"PostUserHandler: %s \\n\", p.Username)\n\n\terr = s.service.ReserveUsername(p.Username)\n\tif err != nil {\n\t\tfmt.Printf(\"error %v\", err)\n\t\thttp.Error(w, \"user exists\", http.StatusBadRequest)\n\t\t// TODO(dankins): return proper status code if already exists\n\t\treturn\n\t}\n\n\t_, _ = fmt.Fprintf(w, \"ok\")\n\n}", "title": "" }, { "docid": "f6b9c916b6a529bad7dc0d06656ae4d0", "score": "0.5886747", "text": "func (c TokenController) Create(ctx *fasthttp.RequestCtx) {\n\ttokenRequest := new(model.TokenRequest)\n\n\tc.JSONBody(ctx, &tokenRequest)\n\tif errs, err := database.ValidateStruct(tokenRequest); err != nil {\n\t\tc.JSONResponse(ctx, model.ResponseError{\n\t\t\tErrors: errs,\n\t\t\tDetail: fasthttp.StatusMessage(fasthttp.StatusUnprocessableEntity),\n\t\t}, fasthttp.StatusUnprocessableEntity)\n\t\treturn\n\t}\n\n\tpassphrase := new(model2.UserPassphrase)\n\tresult := c.GetDB().QueryRowWithModel(passphrase.PassphraseQuery(c.GetDB()),\n\t\tpassphrase,\n\t\ttokenRequest.Passphrase)\n\tif result.Error != nil {\n\t\tc.JSONResponse(ctx, model.ResponseError{\n\t\t\tDetail: fasthttp.StatusMessage(fasthttp.StatusNotFound),\n\t\t}, fasthttp.StatusNotFound)\n\t\treturn\n\t}\n\n\tuser := new(model2.User)\n\tresult = c.GetDB().QueryRowWithModel(\"SELECT u.* FROM \"+user.TableName()+\" AS u \"+\n\t\t\"WHERE u.id = $1 AND u.is_active = true\",\n\t\tuser,\n\t\tpassphrase.UserID)\n\tif result.Error != nil {\n\t\tc.JSONResponse(ctx, model.ResponseError{\n\t\t\tDetail: fasthttp.StatusMessage(fasthttp.StatusNotFound),\n\t\t}, fasthttp.StatusNotFound)\n\t\treturn\n\t}\n\n\trole := new(model2.Role)\n\troleAssignment := new(model2.UserRoleAssignment)\n\terr := c.GetDB().DB.QueryRowx(\n\t\tfmt.Sprintf(\"SELECT r.code, ra.role_id FROM %s AS ra \"+\n\t\t\t\"LEFT OUTER JOIN %s AS ra2 ON ra.user_id = ra2.user_id and ra.id < ra2.id \"+\n\t\t\t\"INNER JOIN %s AS r ON ra.role_id = r.id \"+\n\t\t\t\"WHERE ra2.id IS NULL AND ra.user_id = $1\",\n\t\t\troleAssignment.TableName(),\n\t\t\troleAssignment.TableName(),\n\t\t\trole.TableName()),\n\t\tuser.ID,\n\t).Scan(&role.Code, &roleAssignment.RoleID)\n\tif err != nil {\n\t\tc.JSONResponse(ctx, model.ResponseError{\n\t\t\tDetail: fasthttp.StatusMessage(fasthttp.StatusNotFound),\n\t\t}, fasthttp.StatusNotFound)\n\t\treturn\n\t}\n\n\tjwt, _ := c.API.JWTAuth.Generate(user.ID, roleAssignment.RoleID, role.Code)\n\n\tc.JSONResponse(ctx, model.ResponseSuccessOne{\n\t\tData: model.ResponseToken{\n\t\t\tJWT: jwt,\n\t\t\tUserID: user.ID,\n\t\t\tRole: role.Code,\n\t\t},\n\t}, fasthttp.StatusCreated)\n}", "title": "" }, { "docid": "9116f192531ba211233e4ddc5421ed3a", "score": "0.58740664", "text": "func (ts *AdminTestSuite) TestAdminUserCreateWithManagementToken() {\n\tvar buffer bytes.Buffer\n\trequire.NoError(ts.T(), json.NewEncoder(&buffer).Encode(map[string]interface{}{\n\t\t\"email\": \"test2@example.com\",\n\t\t\"password\": \"test2\",\n\t}))\n\n\t// Setup request\n\tw := httptest.NewRecorder()\n\treq := httptest.NewRequest(http.MethodPost, \"/admin/users\", &buffer)\n\n\treq.Header.Set(\"Authorization\", \"Bearer foobar\")\n\treq.Header.Set(\"X-JWT-AUD\", \"op-test-aud\")\n\n\tts.API.handler.ServeHTTP(w, req)\n\trequire.Equal(ts.T(), http.StatusOK, w.Code)\n\n\tdata := models.User{}\n\trequire.NoError(ts.T(), json.NewDecoder(w.Body).Decode(&data))\n\n\tassert.NotNil(ts.T(), data.ID)\n\tassert.Equal(ts.T(), \"test2@example.com\", data.Email)\n}", "title": "" }, { "docid": "968ac4edc98a0fa50514e9be901a9b58", "score": "0.586775", "text": "func CreateToken(userID string) (string, error) {\n\tclaims := jwt.MapClaims{}\n\tclaims[\"authorized\"] = true\n\tclaims[\"userId\"] = userID\n\tclaims[\"exp\"] = time.Now().Add(time.Minute * 15).Unix()\n\tat := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)\n\ttoken, err := at.SignedString([]byte(\"SECRET_KEY\"))\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn token, nil\n}", "title": "" }, { "docid": "7222947a3bad24f5d681a0f1acf2db05", "score": "0.58673453", "text": "func (maker *PasetoMaker) CreateToken(username string, duration time.Duration) (string, error) {\n\tpayload, err := NewPayload(username, duration)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn maker.paseto.Encrypt(maker.symmetricKey, payload, nil)\n}", "title": "" }, { "docid": "feef3f12be5e581bd1c65c885e41a817", "score": "0.58635694", "text": "func (c *XChatHTTPClient) newToken() (string, error) {\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\"ns\": \"\",\n\t\t\"is_admin\": true,\n\t\t\"exp\": time.Now().Add(30 * 24 * time.Hour).Unix(),\n\t})\n\treturn token.SignedString(c.userKey)\n}", "title": "" }, { "docid": "e8c8cce56de14a06a3d3d08a393bef5e", "score": "0.586153", "text": "func UserCreateHandler(ac *settings.AppContext, w http.ResponseWriter, r *http.Request) (int, error) {\n\tvar err error\n\tvar user models.ApiUserCreate\n\n\tbody, err := ioutil.ReadAll(io.LimitReader(r.Body, 1048576))\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tac.HandlerResp = http.StatusInternalServerError\n\t\treturn http.StatusInternalServerError, err\n\t}\n\n\terr = json.Unmarshal(body, &user)\n\n\tif err != nil {\n\t\tac.HandlerResp = 422\n\t\treturn ac.HandlerResp, err\n\t}\n\n\terr = models.CreateUser(ac.Db, user)\n\tif err, ok := err.(*pq.Error); ok {\n\t\tif err.Code.Name() == \"unique_violation\" {\n\t\t\tac.HandlerResp = http.StatusConflict\n\t\t\treturn ac.HandlerResp, err\n\t\t}\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\tac.HandlerResp = http.StatusCreated\n\tw.WriteHeader(ac.HandlerResp)\n\terr = json.NewEncoder(w).Encode(settings.JsonResp{ac.HandlerResp, \"New user created.\"})\n\tif err != nil {\n\t\tac.HandlerResp = http.StatusInternalServerError\n\t\treturn ac.HandlerResp, err\n\t}\n\treturn ac.HandlerResp, nil\n}", "title": "" }, { "docid": "5d77e812536ffeb6f4f8523b8594ad49", "score": "0.5861105", "text": "func writeNewToken(w http.ResponseWriter, r *http.Request, user *User) {\n\ttoken, err := generateToken(user.UserID)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tw.Write([]byte(\"Unable to provide web token\"))\n\t\treturn\n\t}\n\tuser.Token = token\n\n\tJSON, err := json.Marshal(Response{token})\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\tw.WriteHeader(http.StatusAccepted)\n\tw.Write(JSON)\n}", "title": "" }, { "docid": "48ee621723740e2565bb0581e985e44e", "score": "0.58536565", "text": "func CreateUser(w http.ResponseWriter, r *http.Request) {\n\n\tuser, err := GetUser(r)\n\tif err != nil {\n\t\tlog.Println(\"route CreateUser, user context:\", err)\n\t\trender.R.JSON(w, 500, map[string]interface{}{\"error\": \"Internal server error\"})\n\t\treturn\n\t}\n\n\tif Settings.AllowRegistrations == false {\n\t\tlog.Println(\"Denied a new registration.\")\n\t\tswitch Root(r) {\n\t\tcase \"api\":\n\t\t\trender.R.JSON(w, 403, map[string]interface{}{\"error\": \"New registrations are not allowed at this time.\"})\n\t\t\treturn\n\t\tcase \"user\":\n\t\t\trender.R.HTML(w, 403, \"user/login\", \"New registrations are not allowed at this time.\")\n\t\t\treturn\n\t\t}\n\t}\n\tuser, err = user.Insert()\n\tif err != nil {\n\t\tlog.Println(\"route CreateUser, user.Insert:\", err)\n\t\tif err.Error() == \"user email exists\" {\n\t\t\trender.R.JSON(w, 422, map[string]interface{}{\"error\": \"Email already in use\"})\n\t\t\treturn\n\t\t}\n\t\tif err.Error() == \"user location invalid\" {\n\t\t\trender.R.JSON(w, 422, map[string]interface{}{\"error\": \"Location invalid. Please use IANA timezone database compatible locations.\"})\n\t\t\treturn\n\t\t}\n\t\trender.R.JSON(w, 500, map[string]interface{}{\"error\": \"Internal server error\"})\n\t\treturn\n\t}\n\tuser, err = user.Login()\n\tif err != nil {\n\t\tlog.Println(\"route CreateUser, user.Login:\", err)\n\t\trender.R.JSON(w, 500, map[string]interface{}{\"error\": \"Internal server error\"})\n\t\treturn\n\t}\n\n\tSessionSetValue(w, r, \"id\", user.ID)\n\n\tswitch Root(r) {\n\tcase \"api\":\n\t\trender.R.JSON(w, 200, user)\n\tcase \"user\":\n\t\thttp.Redirect(w, r, \"/user\", 302)\n\t}\n}", "title": "" }, { "docid": "033c143367d7b36622a3bf80097efd98", "score": "0.5852622", "text": "func CreateToken(user Login) string {\n\t//Retrieves phonenr and converts it to string\n\tphonenr := GetPhoneNumberForToken(user.Card)\n\tphonestring := strconv.Itoa(phonenr)\n\t//Create the Claims for the JWT\n\tclaims := jws.Claims{}\n\tclaims.SetExpiration(time.Now().AddDate(1, 0, 0))\n\tclaims.SetIssuer(\"Sjukvårdsgruppen\")\n\tclaims.SetSubject(phonestring)\n\tclaims.SetAudience(\"mobile\")\n\tclaims.SetIssuedAt(time.Now())\n\n\t//Sign it with the privatekey and return it\n\tbytes, _ := ioutil.ReadFile(\"key.unencrypted.pem\")\n\trsaPrivate, _ := crypto.ParseRSAPrivateKeyFromPEM(bytes)\n\tjwt := jws.NewJWT(claims, crypto.SigningMethodRS256)\n\tb, _ := jwt.Serialize(rsaPrivate)\n\treturn string(b)\n\n}", "title": "" }, { "docid": "af1705a62e9827cbee22c987a55b9bf6", "score": "0.58515054", "text": "func (c *Controller) CreateTestToken(username string) (string, error) {\n\tep, _ := url.Parse(\"api/V2/testmodeonly/token\")\n\taurl := c.testURL().ResolveReference(ep)\n\tbody := map[string]interface{}{\"user\": username, \"type\": \"Login\"}\n\tretbody, err := sendJSON(http.MethodPost, aurl, body)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tvar bd map[string]interface{}\n\terr = json.Unmarshal(*retbody, &bd)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn bd[\"token\"].(string), nil\n}", "title": "" }, { "docid": "e877301d0331249a2335d1da5631980c", "score": "0.58439875", "text": "func registerHandler(w http.ResponseWriter, r *http.Request) {\n\tvar user User\n\terr := json.NewDecoder(r.Body).Decode(&user)\n\tif err != nil {\n\t\tlog.Println(\"Error decoding user: \" + err.Error())\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\treturn\n\t}\n\n\ttokenString, err := createJwtClaim(user.Username, user.Password)\n\n\tif err != nil {\n\t\tlog.Println(\"Error creating Jwt claim: \" + err.Error())\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\treturn\n\t}\n\n\terr = redisClient.Set(credkey(user.Username), user.Password, 0).Err()\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusOK)\n\tjson.NewEncoder(w).Encode(JwtToken{Token: tokenString})\n}", "title": "" }, { "docid": "741441bfdac918223c06975c0ac71077", "score": "0.5830781", "text": "func createToken(id string, passwd string) (*string, error) {\n\torigPasswdHash, ok := configs.UIConfig.Users[id]\n\tif !ok {\n\t\treturn nil, errors.New(\"Not authenticated\")\n\t}\n\n\t// test the password to make sure it's valid\n\thash := sha512.New()\n\t_, err := hash.Write([]byte(passwd))\n\tif err != nil {\n\t\treturn nil, errors.New(\"Error authenticating\")\n\t}\n\tif origPasswdHash != hex.EncodeToString(hash.Sum(nil)) {\n\t\treturn nil, errors.New(\"Not authenticated\")\n\t}\n\n\t// set some claims\n\tclaims := make(jwt.MapClaims)\n\tclaims[username] = id\n\tclaims[password] = passwd\n\tclaims[expiration] = time.Now().Add(time.Hour * 1).Unix()\n\n\t// create the token\n\tjwtClaim := jwt.New(jwt.SigningMethodHS256)\n\tjwtClaim.Claims = claims\n\n\t// Sign and get the complete encoded token as string\n\ttoken, err := jwtClaim.SignedString(jwtKey)\n\treturn &token, err\n}", "title": "" }, { "docid": "9bb738a33f8a24df78808a1146101cfb", "score": "0.58259994", "text": "func CreateUser(w http.ResponseWriter, r *http.Request) {\n\n\tuser := pogos.User{}\n\terr := json.NewDecoder(r.Body).Decode(&user)\n\n\tif user.Username == \"\" {\n\t\thttp.Error(w, \"Empty username not permitted\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tpass, err := bcrypt.GenerateFromPassword([]byte(user.Pass), bcrypt.DefaultCost)\n\tif err != nil {\n\t\tl.Println(err)\n\t\thttp.Error(w, \"Error trying to encrypt password\", http.StatusInternalServerError)\n\t}\n\n\tuser.Pass = string(pass)\n\n\terr = database.WriteUser(&user)\n\n\tif err != nil {\n\t\tl.Println(\"Error while writing user\", err)\n\t\thttp.Error(w, \"Error while writing user\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tcookie, done := setToken(w, user)\n\tif !done {\n\t\treturn\n\t}\n\thttp.SetCookie(w, &cookie)\n\tjson.NewEncoder(w).Encode(user.Username)\n}", "title": "" }, { "docid": "1dc8ade88d5fea91f16ca6e0fae6905c", "score": "0.58235353", "text": "func (h *Handlers) Create(w http.ResponseWriter, r *http.Request) {\n\tif r.Method == \"POST\" {\n\t\tdecoder := json.NewDecoder(r.Body)\n\t\tvar data OriginalToken\n\t\terr := decoder.Decode(&data)\n\t\tif err != nil {\n\t\t\th.logger.Fatal(err)\n\t\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\t}\n\n\t\tourKeys := keys.LoadKeys()\n\t\tourJWT, err := magictoken.Create(data.GithubToken, data.Scopes, ourKeys)\n\t\tif err != nil {\n\t\t\th.logger.Fatal(err)\n\t\t\thttp.Error(w, \"Error JWT\", http.StatusBadRequest)\n\t\t}\n\t\ttokenResponse := &TokenResponse{\n\t\t\tJWT: ourJWT,\n\t\t}\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.WriteHeader(http.StatusOK)\n\t\tjson.NewEncoder(w).Encode(tokenResponse)\n\t\treturn\n\t}\n\n\thttp.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)\n}", "title": "" }, { "docid": "8f6cb6ee7ee5e7de93e34b863fdc9228", "score": "0.5820652", "text": "func CreateUserHandler(c *gin.Context) {\n\tvar user Create\n\terr := c.BindJSON(&user)\n\tif err != nil {\n\t\tc.AbortWithStatus(http.StatusInternalServerError)\n\t}\n\n\tif CreateUser(user) {\n\t\tc.JSON(http.StatusOK, gin.H{\"status\": \"accepted\"})\n\t} else {\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"status\": \"failed\", \"message\": \"Duplicate entry of unique ID\"})\n\t}\n}", "title": "" }, { "docid": "2f7052f2284652ae3ccc157eb9a6ac91", "score": "0.5817238", "text": "func JwtHAMACTokenHandler(username string) (string, error) {\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\"user\": username,\n\t\t\"exp\": time.Now().Add(time.Hour * time.Duration(5)).Unix(),\n\t\t\"iat\": time.Now().Unix(),\n\t})\n\n\ttokenString, err := token.SignedString([]byte(\"SECRET_API_KEY\"))\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn tokenString, nil\n}", "title": "" }, { "docid": "753d5b8b876fcfd4d006424e0f73dac3", "score": "0.58117163", "text": "func (ap ApiToken) NewToken(owner string, level string) (status bool, token string) {\n\tap.owner = owner\n\tap.level = level\n\tap.token = uuid.New()\n\tap.requests = 0\n\t// insert it into database here\n\t// check for errors\n\t// if err {\n\t// \t\treturn (false, message)\n\t// } else {\n\t// \t\treturn (true, token)\n\t// }\n\ttoken = ap.token\n\tstatus = true\n\treturn\n}", "title": "" }, { "docid": "64822fffd5f1ae147675da74f0010f09", "score": "0.5804824", "text": "func HandleCreateUser(request *restful.Request, response *restful.Response) {\n\tlog.Printf(\"call HandleCreateUser\")\n\trequestRaw := new(model.RequestUserCreate)\n\terr := request.ReadEntity(requestRaw)\n\tif err != nil {\n\t\thandleInternalError(response, err)\n\t\treturn\n\t}\n\tresult, err := users.CreateUser(requestRaw)\n\tif err != nil {\n\t\thandleInternalError(response, err)\n\t\treturn\n\t}\n\tresponse.WriteHeaderAndEntity(http.StatusOK, result)\n}", "title": "" }, { "docid": "77324b6c57c5547f407b6456ab922c1e", "score": "0.57860386", "text": "func Registration(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-type\", \"application/json\")\n\tvar user models.User\n\tjson.NewDecoder(r.Body).Decode(&user)\n\tif len(user.Username) == 0 || len(user.Password) == 0 || len(user.FirstName) == 0 || len(user.LastName) == 0 || len(user.Email) == 0 {\n\t\tvar res models.StatusRes\n\t\tres.Status = 400\n\t\tres.Msg = \"All Fields are Required\"\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tjson.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\tstmt, err := createUser(user)\n\tif err == nil {\n\t\ttoken, err := helper.GetToken(user.Username, user.Password)\n\t\tif err != nil {\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\tvar res models.StatusRes\n\t\t\tres.Status = 500\n\t\t\tres.Msg = \"Error generating JWT token\"\n\n\t\t\tjson.NewEncoder(w).Encode(res)\n\t\t} else {\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\tvar res models.UserStatusResSuccss\n\t\t\tres.Status = 200\n\t\t\tres.Msg = \"User has been created successfully\"\n\t\t\tres.Data = stmt\n\t\t\tres.Data.Token = token\n\t\t\tjson.NewEncoder(w).Encode(res)\n\t\t}\n\t} else {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tvar res models.StatusRes\n\t\tres.Status = 500\n\t\tres.Msg = \"Something wont wrong\"\n\t\tjson.NewEncoder(w).Encode(res)\n\t}\n}", "title": "" }, { "docid": "fa112cf936c8fac0de7bd7aa34d30aa0", "score": "0.57839966", "text": "func (a *AutoLoginHandler)HandleUserTokensListing(w http.ResponseWriter, r *http.Request){\n\tusername, err := mv(r, \"username\", false)\n\tif err != nil{\n\t\tsendErrorResponse(w, \"Invalid username\");\n\t\treturn\n\t}\n\n\tif !a.userHandler.GetAuthAgent().UserExists(username){\n\t\tsendErrorResponse(w, \"User not exists!\")\n\t\treturn\n\t}\n\n\ttokens := a.userHandler.GetAuthAgent().GetTokensFromUsername(username)\n\ttokensOnly := []string{}\n\tfor _, token := range tokens{\n\t\ttokensOnly = append(tokensOnly, token.Token)\n\t}\n\tjsonString, _ := json.Marshal(tokensOnly)\n\tsendJSONResponse(w, string(jsonString))\n}", "title": "" }, { "docid": "64240eea71ce633a0f776e0020e58441", "score": "0.5770868", "text": "func tokenCreate(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tctx := r.Context()\n\tvar args authTypes.TeamTokenCreateArgs\n\terr = ParseInput(r, &args)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif args.Team == \"\" {\n\t\targs.Team, err = autoTeamOwner(ctx, t, permission.PermTeamTokenCreate)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tallowed := permission.Check(t, permission.PermTeamTokenCreate,\n\t\tpermission.Context(permTypes.CtxTeam, args.Team),\n\t)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tevt, err := event.New(&event.Opts{\n\t\tTarget: teamTarget(args.Team),\n\t\tKind: permission.PermTeamTokenCreate,\n\t\tOwner: t,\n\t\tRemoteAddr: r.RemoteAddr,\n\t\tCustomData: event.FormToCustomData(InputFields(r)),\n\t\tAllowed: event.Allowed(permission.PermTeamReadEvents, permission.Context(permTypes.CtxTeam, args.Team)),\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer func() { evt.Done(err) }()\n\ttoken, err := servicemanager.TeamToken.Create(ctx, args, t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err != nil {\n\t\tif err == authTypes.ErrTeamTokenAlreadyExists {\n\t\t\treturn &errors.HTTP{\n\t\t\t\tCode: http.StatusConflict,\n\t\t\t\tMessage: err.Error(),\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n\tw.WriteHeader(http.StatusCreated)\n\treturn json.NewEncoder(w).Encode(token)\n}", "title": "" }, { "docid": "32ae3d8a774db3e4e853840b690f8366", "score": "0.57695395", "text": "func CreateAccount(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tusername, password := vars[\"username\"], vars[\"password\"]\n\tfmt.Printf(\"Username = %v , password = %v\\n\", username, password)\n\tuser, err := data.CreateUser(username, password)\n\tif err != nil {\n\t\t//TODO: how to marshall the AuthenticationToken ?\n\t\tjson.NewEncoder(w).Encode(err)\n\t\treturn\n\t}\n\tif err := json.NewEncoder(w).Encode(*user); err != nil {\n\t\tlog.Fatal(err)\n\t\tpanic(err)\n\t}\n}", "title": "" }, { "docid": "41c3c96f2694eeac0691dc5d2ed031e3", "score": "0.57646316", "text": "func (a *Authentication) CreateTokenEndpoint(response http.ResponseWriter, request *http.Request) {\n\n\tdefer catchAuthError(response)\n\n\tvar creds Credentials\n\tif err := json.NewDecoder(request.Body).Decode(&creds); err != nil {\n\t\tpanic(fmt.Errorf(\"payload inválido: %v\", err))\n\t}\n\n\tuser, err := dao.Authenticate(creds.Email, creds.Password)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttokenString, err := a.GenerateToken(user)\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"erro ao criar token JWT usuario %s: %v\", user.Username, err))\n\t}\n\n\tSetAuthToken(response, tokenString)\n\n\tresponse.Header().Set(\"content-type\", \"application/json\")\n\t_, _ = response.Write([]byte(`{ \"token\": \"` + tokenString + `\" }`))\n}", "title": "" }, { "docid": "a3707ce73b73434f97e3d2155447dfd3", "score": "0.5759107", "text": "func HandleCreateUser(w http.ResponseWriter, r *http.Request) {\n\ttype UserBody struct {\n\t\tID \tint64 `jsonapi:\"primary,users\" json:\"id\"`\n\t\tEmail \t\t\tstring `jsonapi:\"attr,email\" json:\"email\" validate:\"required, email\"`\n\t\tPassword \t\tstring `jsonapi:\"attr,password\" json:\"password\"`\n\t\tName \t\tstring `jsonapi:\"attr,name\" json:\"name\"`\n\t}\n\tvar user UserBody\n\tvar response helpers.ResponseData\n\n\tdb := helpers.Connect()\n\tdefer db.Close()\n\n\terr := jsonapi.UnmarshalPayload(r.Body, &user)\n\n\tif err != nil {\n\t\tlog.Print(err)\n\t\treturn\n\t}\n\thash, err := bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.MinCost)\n\n\tif err != nil {\n\t\tlog.Print(err)\n\t\treturn\n\t}\n\n\tquery, err := db.Prepare(\"INSERT INTO users (name, email, password) VALUES (?, ?, ?)\")\n\t\n\tif err != nil {\n\t\tlog.Print(err)\n\t\treturn\n\t}\n\n\tresult, err := query.Exec(user.Name, user.Email, hash)\n\n\tif err != nil {\n\t\tlog.Print(err)\n\t\treturn\n\t}\n\n\tlastID, err := result.LastInsertId()\n\tuser.Password = \"\"\n\n\tif err != nil {\n\t\tlog.Print(err)\n\t\treturn\n\t}\n\n\tuser.ID = lastID\n\n\tresponse.Status = true\n\tresponse.Message = \"User created\"\n\tresponse.Data = &user\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(200)\n\tjson.NewEncoder(w).Encode(response)\n}", "title": "" }, { "docid": "91c7b9c7f8106a7d7cc2f59b118e7034", "score": "0.5757336", "text": "func (s SessionCollection) TokenFromUsername(u string) string {\n\treturn Redis.HGet(\"lan/session_lookup_tables/username/\"+string(s), u).Val()\n}", "title": "" }, { "docid": "c8ed086f7fb13189b72411350696a376", "score": "0.57543314", "text": "func (s *JWTService) CreateToken(authSecret, user string) (string, error) {\n\tclaims := &Claims{\n\t\tStandardClaims: jwt.StandardClaims{\n\t\t\tExpiresAt: time.Now().Add(time.Minute * time.Duration(s.Config.AuthTimeoutInMinutes)).Unix(),\n\t\t\tIssuer: JWTIssuer,\n\t\t},\n\t\tUser: user,\n\t}\n\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)\n\treturn token.SignedString([]byte(authSecret))\n}", "title": "" }, { "docid": "e060d9ff4c0ffdcb731f5970b68e1239", "score": "0.5751319", "text": "func (ur *UserRouter) CreateHandler(w http.ResponseWriter, r *http.Request) {\n\tvar u user.User\n\terr := json.NewDecoder(r.Body).Decode(&u)\n\tif err != nil {\n\t\tresponse.HTTPError(w, r, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\tdefer r.Body.Close()\n\n\tctx := r.Context()\n\terr = ur.Repository.Create(ctx, &u)\n\tif err != nil {\n\t\tresponse.HTTPError(w, r, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\tu.Password = \"\"\n\tw.Header().Add(\"Location\", fmt.Sprintf(\"%s%d\", r.URL.String(), u.ID))\n\tresponse.JSON(w, r, http.StatusCreated, response.Map{\"user\": u})\n}", "title": "" }, { "docid": "8bcc05eaa51445f22b37f9deaadb6a6c", "score": "0.5740546", "text": "func CreateUser(c *gin.Context) {\n\tvar newUser models.PFUser\n\n\terr := c.BindJSON(&newUser)\n\tif err != nil {\n\t\tc.JSON(http.StatusBadRequest,\n\t\t\tgin.H{\"error\": \"Could not read request\"})\n\t\treturn\n\t}\n\n\terr = data.CreateUser(&newUser)\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError,\n\t\t\tgin.H{\"error\": \"Failed to create the user\"})\n\t\treturn\n\t}\n\n\ttkn, err := data.GetSession(&newUser)\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError,\n\t\t\tgin.H{\"error\": \"Could not create session token\"})\n\t\treturn\n\t}\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"token\": &tkn,\n\t})\n}", "title": "" }, { "docid": "775b38dcfd6f3a20d8bfe56650411a0e", "score": "0.5739416", "text": "func (a *prTokenApp) createToken(w http.ResponseWriter, r *http.Request) {\n\t// New token structure\n\tToken := PrToken{}\n\n\t// Read URI variables\n\t//vars := mux.Vars(r)\n\t//fmt.Println(vars)\n\n\thtmlData, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tos.Exit(1)\n\t}\n\n\terr = json.Unmarshal(htmlData, &Token)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tos.Exit(1)\n\t}\n\n\t//var out bytes.Buffer\n\t//json.Indent(&out, htmlData, \"\", \"\\t\")\n\t//out.WriteTo(os.Stdout)\n\n\tct := time.Now().UTC()\n\tToken.Created = ct.Format(time.RFC3339)\n\tToken.Updated = ct.Format(time.RFC3339)\n\n\tif err := Token.createToken(a.DB); err != nil {\n\t\trespondWithError(w, http.StatusBadRequest, \"Invalid request payload\")\n\t\treturn\n\t}\n\n\trespondWithJSON(w, http.StatusCreated, Token)\n}", "title": "" }, { "docid": "a082256c955fc2c23fdecba2cb8c8401", "score": "0.57321256", "text": "func (app *App) apiCreateUserHandler(w http.ResponseWriter, r *http.Request) {\n\tuser := &models.User{}\n\tjson.NewDecoder(r.Body).Decode(user)\n\n\tpass, err := bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.DefaultCost)\n\tif err != nil {\n\t\thttp.Error(w, \"Password encryption failed!\", http.StatusInternalServerError)\n\t\tlog.Error(err)\n\t\treturn\n\t}\n\n\tuser.Password = string(pass)\n\n\tres := app.DataBase.Create(&user)\n\tif res.Error != nil {\n\t\thttp.Error(w, \"Failed to create user!\", http.StatusInternalServerError)\n\t\tlog.Error(res.Error)\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(user)\n}", "title": "" }, { "docid": "cfea05ea8354e0dc0ec84da548c75388", "score": "0.57320005", "text": "func HandleLogin(c *gin.Context) (string, error) {\n\tvar credentials Credentials\n\tc.Bind(&credentials)\n\n\t// Look up the user\n\tuser := model.User{}\n\terr := database.DB.Get(&user, \"SELECT * FROM user WHERE email = ? LIMIT 1\", credentials.Email)\n\n\t// Fail immediately if any error occurs\n\tif err != nil {\n\t\tlog.Println(\"Error retrieving user:\", err)\n\t\tc.JSON(401, gin.H{\n\t\t\t\"status\": \"Not authorized\",\n\t\t\t\"message\": \"The email address or password was incorrect.\",\n\t\t})\n\t\tc.Abort()\n\t\treturn \"\", err\n\t}\n\n\t// Compare password with bcrypt\n\terr = bcrypt.CompareHashAndPassword([]byte(*user.Password), []byte(credentials.Password))\n\tif err != nil {\n\t\tlog.Println(\"Error authenticating user:\", err)\n\t\tc.JSON(401, gin.H{\n\t\t\t\"status\": \"Not authorized\",\n\t\t\t\"message\": \"The email address or password was incorrect.\",\n\t\t})\n\t\tc.Abort()\n\t\treturn \"\", err\n\t}\n\n\t// TODO: consider storing JWTs in Redis for token revocation\n\t// uuid := uuid.NewV4()\n\t// fmt.Println(\"UUID: \", uuid)\n\n\t// Build the token\n\ttoken := jwt_lib.New(jwt_lib.GetSigningMethod(\"HS256\"))\n\ttoken.Claims[\"userid\"] = user.GetID()\n\ttoken.Claims[\"exp\"] = time.Now().Add(time.Hour*72).Unix() * 1000\n\ttokenString, err := token.SignedString([]byte(os.Getenv(\"JWT_SECRET\")))\n\tif err != nil {\n\t\tc.JSON(500, gin.H{\n\t\t\t\"status\": \"Error\",\n\t\t\t\"message\": \"Could not log user in\",\n\t\t})\n\t\tc.Abort()\n\t\treturn \"\", err\n\t}\n\treturn tokenString, nil\n}", "title": "" }, { "docid": "1672d57edd0a11563492cbe79f90388f", "score": "0.57277936", "text": "func (u *User) GenToken() string {\n\ttk := &utils.Token{\n\t\tUserId: u.IDUser,\n\t}\n\ttoken := jwt.NewWithClaims(jwt.GetSigningMethod(\"HS256\"), tk)\n\ttokenString, _ := token.SignedString([]byte(os.Getenv(\"token_password\")))\n\treturn tokenString\n}", "title": "" }, { "docid": "8e1b624fd20da0f18cf9ba85f3fa830f", "score": "0.57215226", "text": "func (da PostgresDataAccess) TokenGenerate(ctx context.Context, username string, duration time.Duration) (rest.Token, error) {\n\ttr := otel.GetTracerProvider().Tracer(telemetry.ServiceName)\n\tctx, sp := tr.Start(ctx, \"postgres.TokenGenerate\")\n\tdefer sp.End()\n\n\texists, err := da.UserExists(ctx, username)\n\tif err != nil {\n\t\treturn rest.Token{}, err\n\t}\n\tif !exists {\n\t\treturn rest.Token{}, errs.ErrNoSuchUser\n\t}\n\n\t// If a token already exists for this user, automatically invalidate it.\n\ttoken, err := da.TokenRetrieveByUser(ctx, username)\n\tif err == nil {\n\t\tda.TokenInvalidate(ctx, token.Token)\n\t}\n\n\ttokenString, err := data.GenerateRandomToken(64)\n\tif err != nil {\n\t\treturn rest.Token{}, err\n\t}\n\n\tvalidFrom := time.Now().UTC()\n\tvalidUntil := validFrom.Add(duration)\n\n\ttoken = rest.Token{\n\t\tDuration: duration,\n\t\tToken: tokenString,\n\t\tUser: username,\n\t\tValidFrom: validFrom,\n\t\tValidUntil: validUntil,\n\t}\n\n\tdb, err := da.connect(ctx, \"gort\")\n\tif err != nil {\n\t\treturn rest.Token{}, err\n\t}\n\tdefer db.Close()\n\n\tquery := `INSERT INTO tokens (token, username, valid_from, valid_until)\n\tVALUES ($1, $2, $3, $4);`\n\t_, err = db.ExecContext(ctx, query, token.Token, token.User, token.ValidFrom, token.ValidUntil)\n\tif err != nil {\n\t\treturn rest.Token{}, gerr.Wrap(errs.ErrDataAccess, err)\n\t}\n\n\treturn token, nil\n}", "title": "" }, { "docid": "949cf3e0bd4cf3c585f47ad138c08f1e", "score": "0.57210565", "text": "func (ur *UserRouter) CreateHandler(w http.ResponseWriter, r *http.Request) {\n\tvar user model.User\n\terr := json.NewDecoder(r.Body).Decode(&user)\n\tif err != nil {\n\t\tmiddleware.HTTPError(w, r, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tdefer r.Body.Close()\n\tif err := user.HashPassword(); err != nil {\n\t\tmiddleware.HTTPError(w, r, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tctx := r.Context()\n\terr = ur.Repo.Create(ctx, &user)\n\tif err != nil {\n\t\tmiddleware.HTTPError(w, r, http.StatusConflict, err.Error())\n\t\treturn\n\t}\n\tuser.Password = \"\"\n\tw.Header().Add(\"Location\", fmt.Sprintf(\"%s%d\", r.URL.String(), user.ID))\n\tmiddleware.JSON(w, r, http.StatusCreated, user)\n}", "title": "" }, { "docid": "be5b4f37e93bc439a1a1fe3edbf190d2", "score": "0.5716163", "text": "func GenerateToken(username string) ([]byte, error) {\n\ttoken := jwt.New(jwt.SigningMethodHS256)\n\n\tclaims := token.Claims.(jwt.MapClaims)\n\n\tclaims[\"username\"] = username\n\tclaims[\"exp\"] = time.Now().Add(time.Hour * 24).Unix()\n\n\ttokenString, err := token.SignedString(secret)\n\tif err != nil {\n\t\tlog.Println(`Error generating a signed token string!`)\n\t\treturn nil, err\n\t}\n\n\tjsonToken, _ := json.Marshal(tokenRes{TokenString: tokenString})\n\n\treturn jsonToken, nil\n}", "title": "" }, { "docid": "59a57b65240d0735ff1ef76e77a7777f", "score": "0.5712774", "text": "func (s Server) CreateToken(addr *net.UDPAddr) string {\n\treturn s.createToken(addr, s.timeNow())\n}", "title": "" }, { "docid": "f6d55b42f1d3a084a0ef1df43a998fbe", "score": "0.5709522", "text": "func handleCreateToken(file, ttl string) (string, error) {\n\tencode := false\n\n\tif os.Getenv(\"VAULT_TOKEN\") == \"\" {\n\t\tlog.Fatal(\"no VAULT_TOKEN environment variable set\")\n\t}\n\n\tv, err := vaultInit()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tsecretText, err := askSecret(file)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\t// set the secret in a cubbyhole with specified ttl\n\tif file != \"\" || isPiped() {\n\t\tencode = true\n\t}\n\t// set the secret in a cubbyhole with specified ttl\n\ttoken, err := setCubby(v, secretText, ttl, encode)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn token, nil\n}", "title": "" }, { "docid": "0179b1be311bdd1372f91c437674c20c", "score": "0.5692789", "text": "func (ct *CreateToken) Execute(user entities.User, tokenType string, duration int) (token string, err error) {\n\tif tokenType == \"user\" {\n\t\ttoken, err = ct.usersProxy.CreateToken(user)\n\t} else if tokenType == \"app\" {\n\t\ttoken, err = ct.authProxy.CreateAppToken(user, duration)\n\t} else {\n\t\terr = entities.ErrInvalidTokenType\n\t}\n\n\tif err != nil {\n\t\tct.logger.Errorf(\"failed to create a token: %s\", err.Error())\n\t\treturn \"\", err\n\t}\n\n\treturn token, nil\n}", "title": "" }, { "docid": "9467ec22b283cd915114c55ab426b831", "score": "0.5692591", "text": "func RegisterToken(userID int64, duration int) string {\n\ttoken := utils.GenerateToken()\n\n\t// remove the older token\n\t_, err := db.DB.Exec(\"delete from t_login_token where c_user_id = ?\", userID)\n\tif err != nil {\n\t\tlogrus.Error(err.Error())\n\t}\n\n\t// insert new token into database\n\t_, err = db.DB.Exec(\"insert into t_login_token (c_user_id, c_token, c_expire_time) values\"+\n\t\t\" (?, ?, now() + interval ? day)\",\n\t\tuserID, token, duration)\n\tif err != nil {\n\t\tlogrus.Error(err.Error())\n\t}\n\n\treturn token\n}", "title": "" }, { "docid": "97f9ee430cd9b6077f0698dca12d610d", "score": "0.5686668", "text": "func (o *Okta) oktaCallbackHandler(writer rest.ResponseWriter, request *rest.Request) {\n\tvar idToken map[string]string\n\tif err := request.DecodeJsonPayload(&idToken); err != nil {\n\t\trest.Error(writer, fmt.Sprintf(\"No id token received: %#v\", idToken), http.StatusBadRequest)\n\t\treturn\n\t}\n\ttoken, err := jwt.Parse(idToken[\"idToken\"], o.getKey)\n\n\t// JWT Successfull\n\tif token.Valid {\n\t\toktaClaims := token.Claims.(jwt.MapClaims)\n\t\temail := oktaClaims[\"email\"].(string)\n\t\tname := oktaClaims[\"name\"].(string)\n\t\tgroups := oktaClaims[\"groups\"].([]interface{})\n\n\t\tuser := User{\n\t\t\tName: name,\n\t\t\tUsername: email,\n\t\t\tEmail: email,\n\t\t\tIsAdmin: false,\n\t\t\tIsEngineer: false,\n\t\t}\n\n\t\tusersCol := db.Collection(\"users\")\n\t\tif err := usersCol.FindOne(bson.M{\"email\": user.Email}, &user); err != nil {\n\t\t\tif _, ok := err.(*bongo.DocumentNotFoundError); ok {\n\t\t\t\tlog.Printf(\"Users::FindOne::DocumentNotFound: email: `%v`\", user.Email)\n\t\t\t\tlog.Printf(\"Creating new user: email: `%v`\", user.Email)\n\t\t\t\tupdateUserPermissions(&user, groups)\n\t\t\t\tif err := usersCol.Save(&user); err != nil {\n\t\t\t\t\tlog.Printf(\"Users::Save::Error: %v\", err.Error())\n\t\t\t\t\trest.Error(writer, err.Error(), http.StatusInternalServerError)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tlog.Printf(\"Users::FindOne::Error: %v\", err.Error())\n\t\t\t\trest.Error(writer, err.Error(), http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\tupdateUserPermissions(&user, groups)\n\t\t\tif err := usersCol.Save(&user); err != nil {\n\t\t\t\tlog.Printf(\"Users::Save::Error: %v\", err.Error())\n\t\t\t\trest.Error(writer, err.Error(), http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\ttoken := jwt.New(jwt.GetSigningMethod(o.JWTMiddleware.SigningAlgorithm))\n\t\tclaims := make(jwt.MapClaims)\n\n\t\tif o.JWTMiddleware.PayloadFunc != nil {\n\t\t\tfor key, value := range o.JWTMiddleware.PayloadFunc(user.Username) {\n\t\t\t\tclaims[key] = value\n\t\t\t}\n\t\t}\n\n\t\tclaims[\"sub\"] = user.Username\n\t\tclaims[\"exp\"] = time.Now().Add(o.JWTMiddleware.Timeout).Unix()\n\n\t\tif o.JWTMiddleware.MaxRefresh != 0 {\n\t\t\tclaims[\"orig_iat\"] = time.Now().Unix()\n\t\t}\n\n\t\ttoken.Claims = claims\n\t\ttokenString, err := token.SignedString(o.JWTMiddleware.Key)\n\n\t\tif err != nil {\n\t\t\to.unauthorized(writer)\n\t\t\treturn\n\t\t}\n\n\t\twriter.WriteJson(resultToken{Token: tokenString})\n\t\treturn\n\t} else if ve, ok := err.(*jwt.ValidationError); ok {\n\t\tif ve.Errors&jwt.ValidationErrorMalformed != 0 {\n\t\t\trest.Error(writer, fmt.Sprintf(\"ID Token is malformed\"), http.StatusBadRequest)\n\t\t\treturn\n\t\t} else if ve.Errors&(jwt.ValidationErrorExpired|jwt.ValidationErrorNotValidYet) != 0 {\n\t\t\trest.Error(writer, fmt.Sprintf(\"ID Token is expired or not active yet: %s\", err), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t\trest.Error(writer, fmt.Sprintf(\"Could not handle ID Token: %s\", err), http.StatusBadRequest)\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "76dcd21b304014e71e048ac5d5f37e19", "score": "0.56794053", "text": "func generateToken(u string) (string, time.Time, error) {\n\tvar token *jwt.Token\n\tvar expirationTime time.Time\n\n\t// Declare the expiration time of the token\n\t// here, we have kept it as 60 minutes\n\texpirationTime = time.Now().Add(60 * time.Minute)\n\n\t// Create the JWT claims, which includes the username and expiry time\n\tclaims := &Claims{\n\t\tUname: u,\n\t\tStandardClaims: jwt.StandardClaims{\n\t\t\t// In JWT, the expiry time is expressed as unix milliseconds\n\t\t\tExpiresAt: expirationTime.Unix(),\n\t\t},\n\t}\n\n\t// Declare the token with the algorithm used for signing, and the claims\n\ttoken = jwt.NewWithClaims(jwt.SigningMethodHS256, claims)\n\n\t// Create the JWT string\n\ttokenString, err := token.SignedString(JwtSecretKey)\n\n\treturn tokenString, expirationTime, err\n}", "title": "" }, { "docid": "b00be79bed22dc88c1b7d3e9e139bc0f", "score": "0.56773025", "text": "func HandleCreateUser(c *gin.Context) {\n\tvar user models.User\n\tif err := c.ShouldBindJSON(&user); err != nil {\n\t\trestErr := errors.NewBadRequestError(\"Invalid JSON body\")\n\t\tc.JSON(restErr.Status, restErr)\n\t\treturn\n\t}\n\tcreatedUser, err := services.CreateUser(&user)\n\tif err != nil {\n\t\tc.JSON(err.Status, err)\n\t\treturn\n\t}\n\tc.JSON(http.StatusCreated, createdUser)\n}", "title": "" }, { "docid": "b48583c87ee2517a9d5b7b937dc0c742", "score": "0.5664161", "text": "func (fn CreateTokenHandlerFunc) Handle(params CreateTokenParams, principal interface{}) middleware.Responder {\n\treturn fn(params, principal)\n}", "title": "" }, { "docid": "acbc32dfb052cd92c3bb952733e7fb4e", "score": "0.5659101", "text": "func (server *Server) UserCreate(w http.ResponseWriter, r *http.Request) {\n\tvar (\n\t\tmessage, mail, pass, token string\n\t\terr error\n\t\trequestParams map[string]interface{}\n\t\titem interface{}\n\t\tctx context.Context\n\t\tisExist, ok bool\n\t\tuser User\n\t)\n\n\tctx = r.Context()\n\trequestParams = ctx.Value(\"requestParams\").(map[string]interface{})\n\n\titem, isExist = requestParams[\"mail\"]\n\tif !isExist {\n\t\tserver.Logger.LogWarning(r, \"mail not exist\")\n\t\tserver.error(w, errors.NoArgument.WithArguments(\"Поле mail отсутствует\", \"mail field expected\"))\n\t\treturn\n\t}\n\n\tmail, ok = item.(string)\n\tif !ok {\n\t\tserver.Logger.LogWarning(r, \"mail has wrong type\")\n\t\tserver.error(w, errors.InvalidArgument.WithArguments(\"Поле mail имеет неверный тип\", \"mail field has wrong type\"))\n\t\treturn\n\t}\n\n\titem, isExist = requestParams[\"pass\"]\n\tif !isExist {\n\t\tserver.Logger.LogWarning(r, \"password not exist\")\n\t\tserver.error(w, errors.NoArgument.WithArguments(\"Поле pass отсутствует\", \"pass field expected\"))\n\t\treturn\n\t}\n\n\tpass, ok = item.(string)\n\tif !ok {\n\t\tserver.Logger.LogWarning(r, \"password has wrong type\")\n\t\tserver.error(w, errors.InvalidArgument.WithArguments(\"Поле pass имеет неверный тип\", \"pass field has wrong type\"))\n\t\treturn\n\t}\n\n\tmessage = \"request was recieved, mail: \" + BLUE + mail + NO_COLOR + \" password: hidden\"\n\tserver.Logger.Log(r, message)\n\n\tif mail == \"\" || pass == \"\" {\n\t\tserver.Logger.LogWarning(r, \"mail or password is empty\")\n\t\tserver.error(w, errors.InvalidArgument.WithArguments(\"логин или пароль пусты\", \"login or password is empty\"))\n\t\treturn\n\t}\n\n\terr = handlers.CheckMail(mail)\n\tif err != nil {\n\t\tserver.Logger.LogWarning(r, \"mail - \"+err.Error())\n\t\tserver.error(w, errors.InvalidArgument.WithArguments(err))\n\t\treturn\n\t}\n\n\terr = handlers.CheckPass(pass)\n\tif err != nil {\n\t\tserver.Logger.LogWarning(r, \"password - \"+err.Error())\n\t\tserver.error(w, errors.InvalidArgument.WithArguments(err))\n\t\treturn\n\t}\n\n\tisExist, err = server.Db.IsUserExistsByMail(mail)\n\tif err != nil {\n\t\tserver.Logger.LogError(r, \"IsUserExistsByMail returned error \"+err.Error())\n\t\tserver.error(w, errors.DatabaseError)\n\t\treturn\n\t}\n\tif isExist {\n\t\tserver.Logger.LogWarning(r, \"user \"+BLUE+mail+NO_COLOR+\" alredy exists\")\n\t\tserver.error(w, errors.RegFailUserExists)\n\t\treturn\n\t}\n\n\tuser, err = server.Db.SetNewUser(mail, handlers.PassHash(pass))\n\tif err != nil {\n\t\tserver.Logger.LogError(r, \"SetNewUser returned error \"+err.Error())\n\t\tserver.error(w, errors.DatabaseError)\n\t\treturn\n\t}\n\n\ttoken, err = handlers.TokenMailEncode(mail)\n\tif err != nil {\n\t\tserver.Logger.LogError(r, \"TokenMailEncode returned error \"+err.Error())\n\t\tserver.error(w, errors.MarshalError)\n\t\treturn\n\t}\n\n\terr = server.Db.SetNewDevice(user.Uid, r.UserAgent())\n\tif err != nil {\n\t\tserver.Logger.LogError(r, \"SetNewDevice returned error \"+err.Error())\n\t\tserver.error(w, errors.DatabaseError)\n\t\treturn\n\t}\n\n\tw.WriteHeader(201)\n\tw.Write([]byte(`{\"uid\":` + strconv.Itoa(user.Uid) + `}`))\n\tserver.Logger.LogSuccess(r, \"user \"+BLUE+mail+NO_COLOR+\" was created successfully. Uid #\"+BLUE+strconv.Itoa(user.Uid)+NO_COLOR)\n\n\tgo func(mail string, xRegToken string, r *http.Request, mailConf *config.Mail) {\n\t\terr := handlers.SendMail(mail, xRegToken, mailConf)\n\t\tif err != nil {\n\t\t\tserver.Logger.LogError(r, \"SendMail returned error \"+err.Error())\n\t\t} else {\n\t\t\tserver.Logger.LogSuccess(r, \"Confirm mail for user \"+BLUE+mail+NO_COLOR+\" was send successfully\")\n\t\t}\n\t}(mail, token, r, &server.mailConf)\n}", "title": "" }, { "docid": "0d4451786de182551b794c52780aef30", "score": "0.56401736", "text": "func UserCreate(fn core.UserCreateFunc) Handler {\n\treturn func(ctx context.Context, w http.ResponseWriter, r *http.Request) {\n\t\tvar (\n\t\t\tcurrentApp = appFromContext(ctx)\n\t\t\tdeviceID = deviceIDFromContext(ctx)\n\t\t\tp = payloadUser{}\n\t\t\ttokenType = tokenTypeFromContext(ctx)\n\n\t\t\torigin = createOrigin(deviceID, tokenType, 0)\n\t\t)\n\n\t\terr := json.NewDecoder(r.Body).Decode(&p)\n\t\tif err != nil {\n\t\t\trespondError(w, 0, wrapError(ErrBadRequest, err.Error()))\n\t\t\treturn\n\t\t}\n\n\t\tu, err := fn(currentApp, origin, p.user)\n\t\tif err != nil {\n\t\t\trespondError(w, 0, err)\n\t\t\treturn\n\t\t}\n\n\t\trespondJSON(w, http.StatusCreated, &payloadUser{user: u})\n\t}\n}", "title": "" }, { "docid": "1fbd9c6939dedce1bb4d5b9369c45ffd", "score": "0.5638737", "text": "func Login(w http.ResponseWriter, r *http.Request) {\n\ttype login struct {\n\t\tUsername string `json:\"username,omitempty\"`\n\t}\n\n\treqBody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tfmt.Fprintf(w, \"There is an issue with the user creation\")\n\t}\n\tloginParams := login{}\n\tjson.Unmarshal(reqBody, &loginParams)\n\n\tif loginParams.Username == \"mike\" || loginParams.Username == \"rama\" {\n\t\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{\n\t\t\t\"user\": loginParams.Username,\n\t\t\t\"nbf\": time.Date(2018, 01, 01, 12, 0, 0, 0, time.UTC).Unix(),\n\t\t})\n\n\t\ttokenStr, err := token.SignedString([]byte(\"supersaucysecret\"))\n\t\tif err != nil {\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\tjson.NewEncoder(w).Encode(UnsignedResponse{\n\t\t\t\tMessage: err.Error(),\n\t\t\t})\n\t\t\treturn\n\t\t}\n\n\t\tw.WriteHeader(http.StatusCreated)\n\t\tjson.NewEncoder(w).Encode(SignedResponse{\n\t\t\tToken: tokenStr,\n\t\t\tMessage: \"logged in\",\n\t\t})\n\t\treturn\n\t}\n\tjson.NewEncoder(w).Encode(UnsignedResponse{\n\t\tMessage: \"bad username : \" + loginParams.Username,\n\t})\n}", "title": "" }, { "docid": "ba348f90306477aaada0536850ac9ff9", "score": "0.5638355", "text": "func CreateUserHandler(ctx echo.Context) error {\n\n\tuser := models.User{}\n\tif ctx.Bind(&user) != nil {\n\t\treturn ctx.JSON(http.StatusBadRequest, errors.GetBadRequestBodyError())\n\t}\n\n\tif userserv.CreateUser(user) != nil {\n\t\treturn ctx.JSON(http.StatusInternalServerError, errors.GetInternalServerError())\n\t}\n\n\treturn ctx.NoContent(http.StatusCreated)\n\n}", "title": "" }, { "docid": "35f13270f6716997a0bd08c29432d569", "score": "0.5633254", "text": "func generateToken(username string, gold int, secret string) string {\n\n\texpireTime := time.Now().Add(time.Hour * 1)\n\n\tclaims := UserStateClaims{\n\t\tusername,\n\t\tgold,\n\t\tjwt.StandardClaims{\n\t\t\tExpiresAt: expireTime.Unix(),\n\t\t},\n\t}\n\n\t// Create a signed JWT with the user's state to validate their gold amount\n\ttoken := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)\n\tsignedToken, err := token.SignedString([]byte(secret))\n\tif err != nil {\n\t\tllog.Error(\"Failed to create and sign token because of error: \" + err.Error())\n\t}\n\treturn signedToken\n}", "title": "" }, { "docid": "44eb13daed0e221bcaf8358da59ca720", "score": "0.5629545", "text": "func NewUserHandler(w http.ResponseWriter, r *http.Request) {\n\n\temail := r.FormValue(\"email\")\n\tpassword := r.FormValue(\"password\")\n\n\tlog.Println(email)\n\tlog.Println(password)\n\n\tapi.ResourceHandler(newUser(uuid.New()), w, r)\n}", "title": "" }, { "docid": "520c2b38c8adfbf577f62b3495d9b018", "score": "0.56279296", "text": "func (a *AutoLoginHandler)HandleUserTokenRemoval(w http.ResponseWriter, r *http.Request){\n\ttoken, err := mv(r, \"token\", false)\n\tif err != nil{\n\t\tsendErrorResponse(w, \"Invalid username\");\n\t\treturn\n\t}\n\n\tauthAgent := a.userHandler.GetAuthAgent();\n\tauthAgent.RemoveAutologinToken(token)\n\n\tsendOK(w)\n\n}", "title": "" } ]
2fd450c5295b5634897f39a4b5e03982
Close the store service in a proper way
[ { "docid": "3be5778e5c9945df68e7378b969145dc", "score": "0.724651", "text": "func (s *Service) Close() error {\n\tif s.store != nil { // is used?\n\t\treturn s.store.Close()\n\t}\n\treturn nil\n}", "title": "" } ]
[ { "docid": "d1ff398fe5eb7c72566502b15fbf5e74", "score": "0.7240257", "text": "func (s BoltSystemStore) Close() {\n s.db.Close()\n}", "title": "" }, { "docid": "b13f02247fea01b13cab20a036a61afc", "score": "0.72343165", "text": "func (s *MockStoreProvider) CloseStore(name string) error {\n\treturn nil\n}", "title": "" }, { "docid": "a7aae8ef03ba892ab80e7618126386ec", "score": "0.7130254", "text": "func (p *Provider) CloseStore(name string) error {\n\tp.mux.Lock()\n\tdefer p.mux.Unlock()\n\n\tif p.dbPrefix != \"\" {\n\t\tname = p.dbPrefix + \"_\" + name\n\t}\n\n\tstore, exists := p.dbs[name]\n\tif !exists {\n\t\treturn storage.ErrStoreNotFound\n\t}\n\n\tdelete(p.dbs, name)\n\n\terr := store.db.Close(context.Background())\n\tif err != nil {\n\t\treturn fmt.Errorf(failureDuringCouchDBCloseCall, err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "3fd3a7d91100b11f1ad0b81b6275cfd6", "score": "0.7110185", "text": "func (r *RESTProvider) CloseStore(string) error {\n\treturn nil\n}", "title": "" }, { "docid": "2dcbc904d4db9f9aa5daaa4bad9f8ac7", "score": "0.70354795", "text": "func (s *MockStoreProvider) Close() error {\n\treturn nil\n}", "title": "" }, { "docid": "83a83ccacababd8e34bd81c64456e688", "score": "0.6925982", "text": "func (es *eStore) Close() error {\n\terr := cpa(es.pathmh, es.path)\n\tos.Remove(es.pathwrk)\n\tos.Remove(es.pathmh)\n\treturn err\n}", "title": "" }, { "docid": "2f8535dbc6dfe7fdef18b856672c9b0b", "score": "0.6912156", "text": "func (ds *AppEngineDatastore) Close() {}", "title": "" }, { "docid": "ea894cf48894141bc9f7da667a86b62a", "score": "0.6903633", "text": "func (s *Store) Close() {\n\t// Nothing to close.\n}", "title": "" }, { "docid": "83d5e98dcf7fcb0e8e7e835c28e75437", "score": "0.6862577", "text": "func (s *Store) Close() error {\n\treturn s.handle.Close()\n}", "title": "" }, { "docid": "0a2f90b82c7cac58d34eff45b7ca81c5", "score": "0.68194413", "text": "func (s *storeManager) CloseStore(name string) error {\n\tstore, ok := s.GetStoreByName(name)\n\tif !ok {\n\t\treturn nil\n\t}\n\tkvLogger.Info(\"close kv store\", logger.String(\"kv\", name))\n\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\t// remove store from cache\n\tdelete(s.stores, name)\n\tif err := store.close(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "d5b4b2a9c8cf92e334c8bf185838c01a", "score": "0.67875767", "text": "func (ms *MemoryStore) Close() error {\n\treturn nil\n}", "title": "" }, { "docid": "76b23e44fe92dc5d8b4d3da7db63229b", "score": "0.6760514", "text": "func (s *localStore) Close(ctx context.Context) {\n\ts.manager.Close()\n}", "title": "" }, { "docid": "9a2f83d581ac99c2daf0401644f0fa91", "score": "0.6757414", "text": "func (store *MetricStore) Close() error {\n\tatomic.AddInt64(&store.closing, 1)\n\t_ = store.egressServer.Shutdown(context.Background())\n\t// if store.discoveryAgent != nil {\n\t// \tstore.discoveryAgent.Stop()\n\t// }\n\t// if store.scrapeManager != nil {\n\t// \tstore.scrapeManager.Stop()\n\t// }\n\t_ = store.promRuleManagers.DeleteAll()\n\t_ = store.replicatedStorage.Close()\n\tstore.ingressListener.Close()\n\tstore.internodeListener.Close()\n\treturn nil\n}", "title": "" }, { "docid": "a8e58576e3c69875e7f8b42e18717597", "score": "0.67322826", "text": "func (s *Store) Close() error {\n\ts.logger.Info(\"closing store\")\n\terr := s.ml.Leave(1 * time.Second)\n\tif err != nil {\n\t\ts.logger.Error(\"error leaving member list\", zap.Error(err))\n\t}\n\tfor k, v := range s.replicatorChans {\n\t\tclose(v)\n\t\tdelete(s.replicatorChans, k)\n\t}\n\ts.stopGRPC()\n\tif s.vlogTicker != nil {\n\t\ts.vlogTicker.Stop()\n\t}\n\tif s.mandatoryVlogTicker != nil {\n\t\ts.mandatoryVlogTicker.Stop()\n\t}\n\n\terr = s.db.Close()\n\tif err != nil {\n\t\ts.logger.Error(\"error closing DB\", zap.Error(err))\n\t}\n\treturn err\n}", "title": "" }, { "docid": "c5ce3f81de7bd7f33834d56e936574f0", "score": "0.6708776", "text": "func (s *Store) Close() {\n\ts.ticker.Stop()\n\ts.db.Db.Close()\n}", "title": "" }, { "docid": "a153a954a0f5bd0786bbb25c0339f0f9", "score": "0.66761523", "text": "func (s *store) Close() error {\n\tif !atomic.CompareAndSwapUint32(&s.stopped, 0, 1) {\n\t\treturn nil\n\t}\n\n\t// Close the connection pool.\n\ts.pool.close()\n\treturn nil\n}", "title": "" }, { "docid": "2d4229748823010cf2e667138840067c", "score": "0.6673602", "text": "func (s *Store) Close() {\n\ts.ticker.Stop()\n}", "title": "" }, { "docid": "2d4229748823010cf2e667138840067c", "score": "0.6673602", "text": "func (s *Store) Close() {\n\ts.ticker.Stop()\n}", "title": "" }, { "docid": "e0b7e5ee54e6e17177ec197608e11339", "score": "0.6645078", "text": "func (store *DiskStore) Close() error {\n\tstore.th.Call(func() { C.spdk_put_io_channel(store.c.ch) })\n\teal.Free(store.c)\n\treturn store.bd.Close()\n}", "title": "" }, { "docid": "4cd7f32e6927264827b4112fd37dc5b6", "score": "0.66376114", "text": "func (s *Store) Close() {\n\tif err := s.db.Sync(); err != nil {\n\t\tlog.Println(\"Sync Store failed: \", err)\n\t}\n\n\tif err := s.db.Close(); err != nil {\n\t\tlog.Println(\"Close Store failed: \", err)\n\t}\n}", "title": "" }, { "docid": "6e30d2a7097eb52410154839e5ae731f", "score": "0.6617103", "text": "func (s *Store) Close() error {\n\treturn s.client.Close()\n}", "title": "" }, { "docid": "d47633321a529d1506fbbd32a14c8b98", "score": "0.66038704", "text": "func (s *kvstore) Close() {\n\ts.db.Close()\n}", "title": "" }, { "docid": "85a6272dd037199f90adc80de7e4120c", "score": "0.66010314", "text": "func (s *Store) Close() {\n\ts.db.Close()\n}", "title": "" }, { "docid": "055b13bd14b235091bbe5083dd2c5ccf", "score": "0.6598541", "text": "func (mavls *MavlStore) Close() {\n\tmavl.ClosePrune()\n\tkmlog.Info(\"store mavl closed\")\n}", "title": "" }, { "docid": "efca26bce708bb998d1cc45bc7b8c07e", "score": "0.659563", "text": "func (p *FormattedProvider) CloseStore(name string) error {\n\terr := p.provider.CloseStore(name)\n\tif err != nil {\n\t\treturn fmt.Errorf(failCloseUnderlyingStore, err)\n\t}\n\n\tif p.cacheProvider != nil {\n\t\tif err := p.cacheProvider.CloseStore(name); err != nil {\n\t\t\treturn fmt.Errorf(failCloseUnderlyingStore, err)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "01a8141fc75f84a5d3027b130ffc94ce", "score": "0.65943897", "text": "func (s *Store) Close() error {\n\tdefer os.RemoveAll(s.Path())\n\treturn s.Store.Close()\n}", "title": "" }, { "docid": "f1cc18f0ea38257944bd5e9b87b20622", "score": "0.6581852", "text": "func (s *RediStore) Close() {\n\ts.Pool.Close()\n}", "title": "" }, { "docid": "0a9f9fb190535962eda2c288bfa75863", "score": "0.6561211", "text": "func (s *Store) Close() error {\n\ts.fileLocks = nil\n\treturn nil\n}", "title": "" }, { "docid": "605414d97e8a8feb5a3980e53f6a8ac5", "score": "0.6553621", "text": "func (s S3StoreBase) Close() error { return nil }", "title": "" }, { "docid": "874c6773efb3cbe27ef0d9348450e510", "score": "0.6545832", "text": "func (m *mongoStore) Close() error {\n\treturn m.client.Disconnect(context.TODO())\n}", "title": "" }, { "docid": "317bc23f1e6faf05f9c559af5840d0d4", "score": "0.6543115", "text": "func (d *DataStore) Close() {\n\td.s.Close()\n}", "title": "" }, { "docid": "b1539991d3073f1ae26f2260475613f6", "score": "0.6521878", "text": "func (st *Store) Close() error {\n\tif st.db != nil {\n\t\tst.db.Close()\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "67706f674c5a95012f2abe2646203c36", "score": "0.65176135", "text": "func (mong *MongoStore) Close() {\n\t// mong.Session.Close()\n}", "title": "" }, { "docid": "b3bafcdfb8ca844aa116f565353eb39d", "score": "0.6510921", "text": "func CloseKvStore() error {\n\tif badgerDB == nil {\n\t\treturn nil\n\t}\n\n\terr := badgerDB.Close()\n\tLogIfError(err, nil)\n\tbadgerDB = nil\n\treturn err\n}", "title": "" }, { "docid": "5b90038364f1df9b0a2b3960ea3ccf95", "score": "0.6484462", "text": "func (s *InmemoryStore) Close() error {\n\treturn nil\n}", "title": "" }, { "docid": "1627fc7a6b2580cdaf13ac3fb47e430d", "score": "0.6456643", "text": "func (s *Service) Close() error { return s.db.Close() }", "title": "" }, { "docid": "8dec4708f5ff5c4accf656dc31312a20", "score": "0.6445928", "text": "func (machine *ShardKVMachine) Close() {\n\tmachine.kvstore.Close()\n}", "title": "" }, { "docid": "46f15bc6843644ca9323d14017140788", "score": "0.64218944", "text": "func (p *Provider) Close() {\n\tp.blkStoreProvider.Close()\n}", "title": "" }, { "docid": "88817429968a4c3ae567d7e31690a021", "score": "0.6420943", "text": "func (mong *MongoStore) Close() {\n\tmong.Session.Close()\n\n}", "title": "" }, { "docid": "9018c4718a4c4590c94fae38e9a8daed", "score": "0.64184165", "text": "func (s *Store) Close() error {\n\treturn s.closeFunc()\n}", "title": "" }, { "docid": "98e0c94a0f6ed2b114f4f0d2c1991dd7", "score": "0.64097244", "text": "func (s *boltStore) Close() {\n\ts.db.Close()\n}", "title": "" }, { "docid": "d8eded9bb20bbc611c41c5a2bc67cc32", "score": "0.639977", "text": "func (store *LocalStore) Close() {\n\tif store.db != nil {\n\t\tstore.db.Close()\n\t\tstore.db = nil\n\t}\n\tcloseFilePtr(&store.currSegmentFile)\n}", "title": "" }, { "docid": "7b604c5cf1a4df39f3dba0f2b491e7ff", "score": "0.63929874", "text": "func (store *Store) Close() error {\n\tif store.db != nil {\n\t\treturn store.db.Close()\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "cf87052e853d51c611a6c9cd1394e2c0", "score": "0.63889086", "text": "func (p *Provider) Close() {\n\tp.blkStoreProvider.Close()\n\tp.pvtdataStoreProvider.Close()\n}", "title": "" }, { "docid": "38e341ecbd252f89f811e6d38507fdd8", "score": "0.63572955", "text": "func (store *Storage) Close() error {\n\tstore.session.Close()\n\treturn nil\n}", "title": "" }, { "docid": "41b1307f1453a4bf70ff1d52ed39534b", "score": "0.63509864", "text": "func (s *Store) Close() {\n\tsetnil := func() interface{} {\n\t\treturn nil\n\t}\n\n\ttable.MigrateTables(&s.table, nil)\n\ttable.MigrateCaches(&s.cache, setnil)\n\n\ts.mainDb.Close()\n\ts.async.Close()\n}", "title": "" }, { "docid": "a3cf0904bc5462524fddacda0e4a2ae7", "score": "0.63508505", "text": "func (f *Factory) Close() error {\n\tclose(f.maintenanceDone)\n\terr := f.store.Close()\n\treturn err\n}", "title": "" }, { "docid": "72157d260d649eaca63b4fa9bd42f0f5", "score": "0.634706", "text": "func (s *BoltMetaStore) Close() {\n\ts.db.Close()\n}", "title": "" }, { "docid": "c1db2220acacead84196a03afc795f46", "score": "0.63424414", "text": "func (s *Store) Close() error {\n\treturn s.storage.Close()\n}", "title": "" }, { "docid": "2f0b718d5ec4816a316df63e9c89f617", "score": "0.63284546", "text": "func (s Store) Close() error {\n\treturn s.db.Close()\n}", "title": "" }, { "docid": "e2795b21713e25fae589312c99514c8c", "score": "0.63076025", "text": "func (s *store) Close() error {\n\treturn s.db.Close()\n}", "title": "" }, { "docid": "a350721f4c2ea4f0252fcfb2a531fbbc", "score": "0.6282431", "text": "func (s *Shard) close() error {\n\treturn s.store.Close()\n}", "title": "" }, { "docid": "9d0bd68e309fcdf7a158d53777e639f0", "score": "0.6281208", "text": "func (s *LevelDBStore) Close() {\n\t_ = s.db.Close()\n}", "title": "" }, { "docid": "46fdc09cd89eaff2dcfcabccf0d47817", "score": "0.62728816", "text": "func (_m *DataStoreTxnProvider) Close() {\n\t_m.Called()\n}", "title": "" }, { "docid": "58ffb2d752a69d99e0781dd6e2cbc48c", "score": "0.62647897", "text": "func (w *BigSegmentStoreWrapper) Close() {\n\tw.lock.Lock()\n\tif w.pollCloser != nil {\n\t\tclose(w.pollCloser)\n\t\tw.pollCloser = nil\n\t}\n\tif w.contextCache != nil {\n\t\tw.contextCache.Stop()\n\t\tw.contextCache = nil\n\t}\n\tw.lock.Unlock()\n\n\t_ = w.store.Close()\n}", "title": "" }, { "docid": "8a04cb2316f8c9ae1cd2bad7ec2eec9e", "score": "0.62600785", "text": "func (kvs *KVStore) Close() error {\n\treturn kvs.db.Close()\n}", "title": "" }, { "docid": "16de242d56a84d232610d1d0bc28f460", "score": "0.6248198", "text": "func (s *Datastore) Close() error {\n\n\treturn nil\n}", "title": "" }, { "docid": "17daf97606a9a4bd38fcd01ce82a4791", "score": "0.6224712", "text": "func (s *RethinkStore) Close() {\n\ts.Rethink.Close()\n}", "title": "" }, { "docid": "b34ab2329a07572468f1762c4a7b86e7", "score": "0.6185649", "text": "func (s *Store) Close() error {\n\treturn s.db.Close()\n}", "title": "" }, { "docid": "72e9d4cbe8ea696de1e472f4dbbc54d4", "score": "0.6183176", "text": "func (p *Provider) Close() error {\n\tp.mux.Lock()\n\tdefer p.mux.Unlock()\n\n\tfor _, store := range p.dbs {\n\t\terr := store.db.Close(context.Background())\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(failureDuringCouchDBCloseCall, err)\n\t\t}\n\t}\n\n\terr := p.couchDBClient.Close(context.Background())\n\tif err != nil {\n\t\treturn fmt.Errorf(failureWhileClosingKivikClient, err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "7800e372aae1c21bef44e2aeca83781f", "score": "0.6178149", "text": "func (store *Store) Close() error {\n\tif err := store.db.Close(); err != nil {\n\t\treturn fmt.Errorf(\"failed to close database: %v\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "066bb94f1bff1765072bba692511d3f6", "score": "0.6174629", "text": "func (s *Service) Close() {\n\tif s.dao != nil {\n\t\ts.dao.Close()\n\t}\n\tif s.mgr != nil {\n\t\ts.mgr.Close()\n\t}\n}", "title": "" }, { "docid": "db0ebf4866fe8f861bdb5fa8269ddad1", "score": "0.6167035", "text": "func (s *ServiceRepository) Close() (err error) {\r\n\tlog.Println(\"service repository cleanup\")\r\n\ts.Saver.Close()\r\n\ts.Fetcher.Close()\r\n\treturn\r\n}", "title": "" }, { "docid": "b096f8189ab7d071ef82e6145fe18e18", "score": "0.6161459", "text": "func (s nopAttrStore) Close() error { return nil }", "title": "" }, { "docid": "57fc900be14c865a2929ff7fb0b1755f", "score": "0.61472476", "text": "func (s *Store) Close() error {\n\treturn s.DB.Close()\n}", "title": "" }, { "docid": "5f41d825d88d5e9b23da8f515c186734", "score": "0.61278856", "text": "func CloseDB() {\n\tstorageService.CloseDB()\n}", "title": "" }, { "docid": "9a66e1e278ff0615ad0ea896911a08fd", "score": "0.61025816", "text": "func (s *Store) Close() {\n\tif s.coprCache != nil {\n\t\ts.coprCache.cache.Close()\n\t}\n}", "title": "" }, { "docid": "6e689c8b263c2b2dbfcb458f788604d0", "score": "0.60963947", "text": "func (s *CertStore) Close() error {\n\tif C.CertCloseStore(s.hStore, C.CERT_CLOSE_STORE_CHECK_FLAG) == 0 {\n\t\treturn getErr(\"Error closing cert store\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "43bc15c6dbc8ed4d601576dc427b4b16", "score": "0.6094826", "text": "func (store *EntryStore) Close() error {\n\treturn store.db.Close()\n}", "title": "" }, { "docid": "03a26fdd8e8fdc4197941a94c93483f6", "score": "0.6093652", "text": "func (s *storeServiceImpl) Stop() {\n\ts.doStop(s.mu)\n}", "title": "" }, { "docid": "cdd3084d07bb94bf47d41164517f3371", "score": "0.60931706", "text": "func (m *MySQLMetaStore) Close() {\n\tdefer m.client.Close()\n\treturn\n}", "title": "" }, { "docid": "b3c7c4c7ee99358461375b2da86be755", "score": "0.6091813", "text": "func (s *Service) Close() {\n}", "title": "" }, { "docid": "b3c7c4c7ee99358461375b2da86be755", "score": "0.6091813", "text": "func (s *Service) Close() {\n}", "title": "" }, { "docid": "4c33d70912a5ae7a79f90c3d1e5695a4", "score": "0.6084959", "text": "func (s *StoreStruct) Close() error {\n\treturn s.db.Close()\n}", "title": "" }, { "docid": "30b2705c9bb2972c6fc9dd84ffdd2c00", "score": "0.60689706", "text": "func (s *service) Close() {\n}", "title": "" }, { "docid": "8e89a302d99911a3db6bb07b395fc4f0", "score": "0.60603267", "text": "func (b *BadgerStore) Close() error {\n\treturn b.db.Close()\n}", "title": "" }, { "docid": "2a2b622c554d27e68ab0c04c00608361", "score": "0.60327417", "text": "func (s *Service) Close() {\n\tif s.leaderShip != nil {\n\t\tfor k, v := range s.autoIDMap {\n\t\t\tif v.base > 0 {\n\t\t\t\terr := v.forceRebase(context.Background(), s.store, k.dbID, k.tblID, v.base, v.isUnsigned)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlogutil.BgLogger().Warn(\"save cached ID fail when service exit\", zap.String(\"category\", \"autoid service\"),\n\t\t\t\t\t\tzap.Int64(\"db id\", k.dbID),\n\t\t\t\t\t\tzap.Int64(\"table id\", k.tblID),\n\t\t\t\t\t\tzap.Int64(\"value\", v.base),\n\t\t\t\t\t\tzap.Error(err))\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\ts.leaderShip.Cancel()\n\t}\n}", "title": "" }, { "docid": "93f48a370d8efcc65294b8067635ecfd", "score": "0.6030859", "text": "func (mds *MockDatastoreClient) Close() error {\n\treturn nil\n}", "title": "" }, { "docid": "0eb7cd8acb598ab0ab69a5e0a58b8226", "score": "0.6029394", "text": "func (storage *profileStorageService) Close() {\n\tstorage.db.Close()\n}", "title": "" }, { "docid": "c0f5a4c0982ad0bcf57b5f8370cd5c0c", "score": "0.60203046", "text": "func (s *Store) Stop() {\n\tclose(s.donec)\n}", "title": "" }, { "docid": "50c9e81c496e5b6c12334185016b364c", "score": "0.6018148", "text": "func (s *DedupService) Close() {\n\ts.db.Close()\n}", "title": "" }, { "docid": "f48ed57990b86a0a9819f9c00ee96a2c", "score": "0.60054225", "text": "func (s *EtcdStore) Close() error {\n\treturn s.client.Close()\n}", "title": "" }, { "docid": "1db74d0372cde8ea40ff7c989fac8e33", "score": "0.5991883", "text": "func (store *DB) Close() error {\n\treturn store.db.Close()\n}", "title": "" }, { "docid": "ba89ebd1b17e486697febee40a15391f", "score": "0.5971784", "text": "func (s *Store) Teardown() {\n\ts.mgoSession.Close()\n}", "title": "" }, { "docid": "7b4aa918cec715c67b9666349eef877d", "score": "0.59588206", "text": "func (p *PostgresStore) Close() error {\n\treturn nil\n}", "title": "" }, { "docid": "7bb9a60ff62332f5662bda0c3b36b42a", "score": "0.59530926", "text": "func closeDB() {\n\tlog.Println(\"Closing datastore...\")\n\tnaprrql.GetDB().Close()\n\tlog.Println(\"Datastore closed.\")\n}", "title": "" }, { "docid": "78d5ec4255513e36a169d435b4777299", "score": "0.5943746", "text": "func (svc ItemCatalogService) Close() {\n\terr := svc.CacheFinder.Close()\n\tif err != nil {\n\t\tsvc.Logger.Log(\"failed to close cache connection: [error=%s]\", err.Error())\n\t}\n\terr = svc.ItemDao.Close()\n\tif err != nil {\n\t\tsvc.Logger.Log(\"failed to close mongo connection: [error=%s]\", err.Error())\n\t}\n}", "title": "" }, { "docid": "c3dc242d5834e92d77703238fcda9ccd", "score": "0.5943366", "text": "func (store *MongoStorage) Close() error {\n\tstore.session.Close()\n\treturn nil\n}", "title": "" }, { "docid": "1be54133fd940e9ee84546d11eb5cd83", "score": "0.5939367", "text": "func (s *SFTPStoreBase) Close() error {\n\tif s.cancel != nil {\n\t\tdefer s.cancel()\n\t}\n\treturn s.client.Close()\n}", "title": "" }, { "docid": "202b03d233ae6553ab6124e83c6546a4", "score": "0.5931438", "text": "func (us *UserStorage) Close() {}", "title": "" }, { "docid": "a55486d5f244d1b43023b38a65e3b25c", "score": "0.5927998", "text": "func (s *CassandraStore) Close() {\n\ts.session.Close()\n}", "title": "" }, { "docid": "edaca80fd72e40de97bfeaaed3e91bfe", "score": "0.5925897", "text": "func (ds *DataStore) Close(ctx context.Context, txn Transaction) {\n\t// TODO(tsandall):\n}", "title": "" }, { "docid": "b9cba48270e2104facfa3ff82ea5ffd3", "score": "0.59229225", "text": "func (_m *Storage) Close() {\n\t_m.Called()\n}", "title": "" }, { "docid": "779225198914b36241dac8fa2437a53c", "score": "0.59137064", "text": "func (t *Store) Close() error {\n\tdefer t.log.Debugf(\"Store closed\")\n\tt.wg.Wait()\n\tt.tree.Close()\n\treturn t.db.Close()\n}", "title": "" }, { "docid": "9cbf7e0c5550f9d42e9f0fabd57dcde9", "score": "0.5906259", "text": "func (provider AVQuoteProvider) Close() {\n}", "title": "" }, { "docid": "83ddd2318a0b2a2ada5d63a55724557f", "score": "0.5895395", "text": "func (_m *Store) TerminateEmbededStorage() {\n\t_m.Called()\n}", "title": "" }, { "docid": "7753c80fce68cf8733b7a0a6a473b865", "score": "0.5873844", "text": "func (s *storeServiceImpl) Shutdown() {\n\tselect {\n\tcase <-s.shutdownCtx.Done():\n\t\t// the Store is already shutdown\n\t\treturn\n\tdefault:\n\t}\n\ts.Stop()\n\ts.ingestwg.Wait() // wait until we are done ingesting new messages\n\ts.shutdownStore()\n\ts.logger.Debug(\"Store service waits for end of store goroutines\")\n\ts.store.WaitFinished()\n\t_ = s.pipe.Close()\n}", "title": "" }, { "docid": "ef8371d9d0dcd9118d7f8a75f4703b6a", "score": "0.58700854", "text": "func (sp *SaseServiceProcessor) Close() error {\n\treturn nil\n}", "title": "" }, { "docid": "e9c8c14f577ef265612c5b02366a7d14", "score": "0.58606815", "text": "func closeTrxDatabase(dbsvc *dbService) {\n\tdbsvc.lvldb.Close()\n\tos.RemoveAll(dbsvc.dir)\n}", "title": "" }, { "docid": "0cc1ad9a9d56f0c399ff204c4550eff0", "score": "0.5856052", "text": "func (s *Services) Close() error {\n\treturn s.db.Close()\n}", "title": "" } ]
e9e9f683156f6283587893eb4c6cecbc
DeleteFile: Invalidate the file in local cache.
[ { "docid": "23fcf6002daec34a400db189bda14860", "score": "0.71577185", "text": "func (fc *FileCache) DeleteFile(options internal.DeleteFileOptions) error {\n\tlog.Trace(\"FileCache::DeleteFile : name=%s\", options.Name)\n\n\tflock := fc.fileLocks.Get(options.Name)\n\tflock.Lock()\n\tdefer flock.Unlock()\n\n\terr := fc.NextComponent().DeleteFile(options)\n\terr = fc.validateStorageError(options.Name, err, \"DeleteFile\", false)\n\tif err != nil {\n\t\tlog.Err(\"FileCache::DeleteFile : error %s [%s]\", options.Name, err.Error())\n\t\treturn err\n\t}\n\n\tlocalPath := filepath.Join(fc.tmpPath, options.Name)\n\terr = deleteFile(localPath)\n\tif err != nil && !os.IsNotExist(err) {\n\t\tlog.Err(\"FileCache::DeleteFile : failed to delete local file %s [%s]\", localPath, err.Error())\n\t}\n\n\tfc.policy.CachePurge(localPath)\n\n\treturn nil\n}", "title": "" } ]
[ { "docid": "fc6162b70ec45fe9f32340f53c9ef266", "score": "0.67942065", "text": "func (r *Renter) DeleteFile(siaPath modules.SiaPath) error {\n\tif err := r.tg.Add(); err != nil {\n\t\treturn err\n\t}\n\tdefer r.tg.Done()\n\n\t// Call threadedBubbleMetadata on the old directory to make sure the system\n\t// metadata is updated to reflect the move\n\tdefer func() error {\n\t\tdirSiaPath, err := siaPath.Dir()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tgo r.threadedBubbleMetadata(dirSiaPath)\n\t\treturn nil\n\t}()\n\n\treturn r.staticFileSet.Delete(siaPath)\n}", "title": "" }, { "docid": "a925001c9ef68763f4c4b5de5277d80e", "score": "0.67666054", "text": "func (s *MemStorage) DeleteFile(ctx context.Context, name string) error {\n\tselect {\n\tcase <-ctx.Done():\n\t\treturn ctx.Err()\n\tdefault:\n\t\t// continue on\n\t}\n\tif !path.IsAbs(name) {\n\t\treturn errors.Errorf(\"file name is not an absolute path: %s\", name)\n\t}\n\ts.rwm.Lock()\n\tdefer s.rwm.Unlock()\n\tif _, ok := s.dataStore[name]; !ok {\n\t\treturn errors.Errorf(\"cannot find the file: %s\", name)\n\t}\n\tdelete(s.dataStore, name)\n\treturn nil\n}", "title": "" }, { "docid": "8f838d5d4e1419eb86c01e80212f21fd", "score": "0.67333496", "text": "func (r *Renter) DeleteFile(nickname string) error {\n\tlockID := r.mu.Lock()\n\tf, exists := r.files[nickname]\n\tif !exists {\n\t\tr.mu.Unlock(lockID)\n\t\treturn ErrUnknownPath\n\t}\n\tdelete(r.files, nickname)\n\tdelete(r.persist.Tracking, nickname)\n\n\terr := persist.RemoveFile(filepath.Join(r.persistDir, f.SiaPath()+ShareExtension))\n\tif err != nil {\n\t\tr.log.Println(\"WARN: couldn't remove file :\", err)\n\t}\n\n\tr.saveSync()\n\tr.mu.Unlock(lockID)\n\n\t// mark the file as deleted\n\tf.Delete()\n\n\t// TODO: delete the sectors of the file as well.\n\n\treturn nil\n}", "title": "" }, { "docid": "c8d4b8eef6513e780c42f456a4dbbc73", "score": "0.66264015", "text": "func (d *Driver) DeleteFile(name string) (status vi.Status) {\r\n\tb := fmt.Sprintf(\"MMEM:DEL '%s'\", name)\r\n\t_, status = d.Write([]byte(b), uint32(len(b)))\r\n\treturn\r\n}", "title": "" }, { "docid": "c4f36cfcc82461f0faeef1ed94cc6882", "score": "0.66207457", "text": "func FileDelete(filename string) error {\r\n\treturn os.Remove(filename)\r\n}", "title": "" }, { "docid": "2fd3f38f034ace1b48caa83e3e528470", "score": "0.66037583", "text": "func (backend *Backend) DeleteFile(id string) error {\n\treturn os.Remove(path.Join(backend.BaseDir, id))\n}", "title": "" }, { "docid": "72f6d1814d10357f896a251a3e2d2a0d", "score": "0.65740085", "text": "func (sf *settingsFile) deleteFile() error {\n\tif sf.fileExists(sf.filePath) {\n\t\treturn os.Remove(sf.filePath)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "9f468dabc20cd05d9a48115921f40156", "score": "0.6545044", "text": "func (l *LocalStorage) Delete(filePath string) error {\n\tsrcFileFullPath := filepath.Join(l.rootFolder, filePath)\n\n\t// make sure the source file exists\n\ts, err := os.Stat(srcFileFullPath)\n\tif os.IsNotExist(err) {\n\t\treturn err\n\t}\n\tif !s.Mode().IsRegular() {\n\t\treturn errors.New(\"File is not in regular mode\")\n\t}\n\n\tos.Remove(srcFileFullPath)\n\n\treturn err\n}", "title": "" }, { "docid": "a27ef2518e9e6949c957f8f843795b1d", "score": "0.65185374", "text": "func (futl *Fileutil) DeleteFile(filepath string) (err error) {\n\treturn futl.fs.Remove(filepath)\n}", "title": "" }, { "docid": "f735bc3446c8f9942f907acdb122c3a9", "score": "0.6499769", "text": "func (s *SafeFile) SafeDeleteFile() {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tDeleteFile(s.path)\n}", "title": "" }, { "docid": "42687f489b7865c46f95b01aae44b34c", "score": "0.6490859", "text": "func (fc *FileCache) Delete(id string) error {\n\tfc.cacheLock.Lock()\n\tdefer fc.cacheLock.Unlock()\n\n\tdelete(fc.stateCache, id)\n\tfc.isDirty = true\n\tif err := fc.saveToFile(fc.filename, fc.stateCache); err != nil {\n\t\treturn err\n\t}\n\tfc.isDirty = false\n\treturn nil\n}", "title": "" }, { "docid": "d62c063c51f82b83c6434af308ca051a", "score": "0.6482644", "text": "func DeleteFile(file *entity.File, id string, client *statsd.Client) (err error) {\n\tt := client.NewTiming()\n\tif config.DB.Where(\"id = ?\", id).First(&file); file.ID == \"\" {\n\t\treturn errors.New(\"the file doesn't exist!!!\")\n\t}\n\tconfig.DB.Where(\"id = ?\", id).Delete(&file)\n\tt.Send(\"delete_file.query_time\")\n\treturn nil\n}", "title": "" }, { "docid": "82ca7208c9a7fc6981ea678cf48992d2", "score": "0.64436674", "text": "func DeleteFile(filepath string) (err error) {\n\treturn fs.Remove(filepath)\n}", "title": "" }, { "docid": "349b992f774c4810e548e16aa7bc3a25", "score": "0.6435445", "text": "func (driver *MainDriver) DeleteFile(cc server.ClientContext, path string) error {\n\treturn nil\n}", "title": "" }, { "docid": "3f3c41cf4c273f6d4c262b8e4aa5c5ee", "score": "0.6434394", "text": "func DeleteFile(p string) error {\n\terr := os.Remove(p)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn err\n\t}\n\tfmt.Println(\"++> done deleting File\")\n\treturn nil\n}", "title": "" }, { "docid": "ff9bddb28ad3309996e40d3bba1e55a6", "score": "0.6425328", "text": "func deleteFile(file string) {\n\tvar err = os.Remove(file)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "title": "" }, { "docid": "a472a8e1a7880a28a53b7c63fc1295a7", "score": "0.6407019", "text": "func (r *FsRepo) Delete(key string) {\n\tr.fileCache.Delete(key)\n}", "title": "" }, { "docid": "5fa4573afae3204e24255c6fa19d9944", "score": "0.6386335", "text": "func (entry *localFileEntry) Delete() error {\n\tvar persist metadata.Persist\n\tif err := entry.GetMetadata(&persist); err != nil {\n\t\tif !os.IsNotExist(err) {\n\t\t\treturn fmt.Errorf(\"get persist metadata: %s\", err)\n\t\t}\n\t} else {\n\t\tif persist.Value {\n\t\t\treturn ErrFilePersisted\n\t\t}\n\t}\n\n\t// Remove files.\n\treturn os.RemoveAll(filepath.Dir(entry.GetPath()))\n}", "title": "" }, { "docid": "26f8ed0b8eae83d4b515528a980d5471", "score": "0.6379842", "text": "func DeleteFile(c *gin.Context) {\n\t// 获取file_id和user_id\n\tfileId, userId, ok := getFileIdAndUserId(c)\n\tif !ok {\n\t\treturn\n\t}\n\n\t// 获取文件信息\n\tfileM, status := db.FindFileOfUserByID(userId, fileId)\n\tif !status.Success() {\n\t\tutils.Response(c, http.StatusOK, errno.RespCode{Code: errno.RespFileNotFound}, nil)\n\t\treturn\n\t}\n\n\tassert.IsEqual(fileM.ID, fileId)\n\tif fileM.Type == models.FileType {\n\t\t// 在数据库中软删除记录\n\t\tstatus = db.DeleteFileOnly(fileId)\n\t\tif !status.Success() {\n\t\t\tutils.Response(c, http.StatusOK, errno.RespCode{Code: errno.RespFileDeleteFailed}, nil)\n\t\t\treturn\n\t\t}\n\n\t\t// 如果是文件则将文件移入\"回收站\", 目录格式 : <回收站>/<精确到天的格式化时间YYYY-MM-DD>/<fileId>\n\t\t// 以后会编写工具, 定期删除超时的文件夹\n\t\tvar srcAddr string = fileM.LocalAddr\n\t\tvar destAddr string\n\n\t\tdestDirAddr := filepath.Join(\n\t\t\tconfig.Cfg.LocalSaveCfg.TrashPath,\n\t\t\ttime.Now().Format(\"2006-01-02\"),\n\t\t)\n\t\tdestFilename := strconv.FormatInt(fileId, 10)\n\t\tdestAddr = filepath.Join(destDirAddr, destFilename)\n\t\terr := os.MkdirAll(destDirAddr, 0755)\n\t\tif err != nil {\n\t\t\tlg.Logger.Println(\"Server Error, Mkdir %s Failed When Calling DeleteFile\\n\", destDirAddr)\n\t\t\tutils.Response(c, http.StatusInternalServerError, errno.RespCode{Code: errno.RespFileDeleteFailed}, nil)\n\t\t\treturn\n\t\t}\n\n\t\terr = os.Rename(srcAddr, destAddr) // 移入回收站\n\t\tif err != nil {\n\t\t\tlg.Logger.Println(\"Server Error, Move(Rename) %s Failed When Calling DeleteFile\\n\", destDirAddr)\n\t\t\tutils.Response(c, http.StatusInternalServerError, errno.RespCode{Code: errno.RespFileDeleteFailed}, nil)\n\t\t\treturn\n\t\t}\n\n\t\t// 成功应答\n\t\tutils.Response(c, http.StatusOK, errno.RespCode{Code: errno.RespSuccess}, gin.H{\n\t\t\t\"count\": 1,\n\t\t})\n\t} else {\n\t\t// 如果是目录, 则需要递归的删除其下的所有文件和文件夹(软删除其记录, 将所有文件移入回收站)\n\t\t//// 递归的找到该目录下的所有文件\n\t\tfiles, status := db.FindFilesRecursively(fileId)\n\t\tif !status.Success() {\n\t\t\tutils.Response(c, http.StatusOK, errno.RespCode{Code: errno.RespFileDeleteFailed}, nil)\n\t\t\treturn\n\t\t}\n\n\t\t//// 删除记录\n\t\tids := make([]int64, len(files))\n\t\tfor i, item := range files {\n\t\t\tids[i] = item.ID\n\t\t}\n\t\tif s := db.DeleteFilesOnly(ids); !s.Success() {\n\t\t\tutils.Response(c, http.StatusOK, errno.RespCode{Code: errno.RespFileDeleteFailed}, nil)\n\t\t\treturn\n\t\t}\n\t\t//// 移入回收站, 出错打日志并向前端报告\n\t\tdestDirAddr := filepath.Join(\n\t\t\tconfig.Cfg.LocalSaveCfg.TrashPath,\n\t\t\ttime.Now().Format(\"2006-01-02\"),\n\t\t)\n\t\tokNum := mvFilesToTrans(destDirAddr, files)\n\n\t\t// 成功应答, 报告成功删除的个数\n\t\tutils.Response(c, http.StatusOK, errno.RespCode{Code: errno.RespSuccess}, gin.H{\n\t\t\t\"count\": okNum,\n\t\t})\n\t\treturn\n\t}\n}", "title": "" }, { "docid": "9ab1fb397977f95e742ecd92f07c2f56", "score": "0.63750595", "text": "func DeleteFile(path string) error {\n\treturn fs.DeleteFile(path)\n}", "title": "" }, { "docid": "9ab1fb397977f95e742ecd92f07c2f56", "score": "0.63750595", "text": "func DeleteFile(path string) error {\n\treturn fs.DeleteFile(path)\n}", "title": "" }, { "docid": "bae48bdee92e02c63a83256d4f61693d", "score": "0.6349603", "text": "func (f *File) Delete() error {\n\terr := f.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\tc_status := C.int(0)\n\tC.fits_delete_file(f.c, &c_status)\n\treturn to_err(c_status)\n}", "title": "" }, { "docid": "8c2096f8d5789f99e275c95df88334b2", "score": "0.6326203", "text": "func (l *Local) DeleteFile(ctx context.Context, project, path string) error {\n\tp := l.rootPath + \"/\" + project + path\n\tif isPathDir(p) {\n\t\treturn errors.New(\"Local: Provided path is not a directory\")\n\t}\n\n\treturn os.Remove(p)\n}", "title": "" }, { "docid": "4c25abc40dd572a9ad8558480fd877f6", "score": "0.62879837", "text": "func (fs *localFS) DeleteFile(path string) error {\n\tvar err error\n\tpath, err = fs.Abs(path)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif exists, isDir := fs.Exists(path); exists && !isDir {\n\t\tif isDir {\n\t\t\treturn ErrIsDir\n\t\t}\n\n\t\treturn file.Remove(path)\n\t}\n\n\treturn os.ErrNotExist\n}", "title": "" }, { "docid": "bfefecfdbcd721ed547a75143e48cfd7", "score": "0.6265438", "text": "func Delete(filename string) error {\n\treturn os.Remove(getFilePath(filename))\n}", "title": "" }, { "docid": "a2db4c918f50be6a19fc6bbf7f55245e", "score": "0.62587446", "text": "func (fc *FileCache) CloseFile(options internal.CloseFileOptions) error {\n\tlog.Trace(\"FileCache::CloseFile : name=%s, handle=%d\", options.Handle.Path, options.Handle.ID)\n\n\tlocalPath := filepath.Join(fc.tmpPath, options.Handle.Path)\n\n\tif options.Handle.Dirty() {\n\t\tlog.Info(\"FileCache::CloseFile : name=%s, handle=%d dirty. Flushing the file.\", options.Handle.Path, options.Handle.ID)\n\t\terr := fc.FlushFile(internal.FlushFileOptions{Handle: options.Handle}) //nolint\n\t\tif err != nil {\n\t\t\tlog.Err(\"FileCache::CloseFile : failed to flush file %s\", options.Handle.Path)\n\t\t\treturn err\n\t\t}\n\t}\n\n\tf := options.Handle.GetFileObject()\n\tif f == nil {\n\t\tlog.Err(\"FileCache::CloseFile : error [missing fd in handle object] %s\", options.Handle.Path)\n\t\treturn syscall.EBADF\n\t}\n\n\t// Reduce the open handle counter here as file is being closed now\n\tflock := fc.fileLocks.Get(options.Handle.Path)\n\tflock.Lock()\n\tdefer flock.Unlock()\n\n\terr := f.Close()\n\tif err != nil {\n\t\tlog.Err(\"FileCache::CloseFile : error closing file %s(%d) [%s]\", options.Handle.Path, int(f.Fd()), err.Error())\n\t\treturn err\n\t}\n\tflock.Dec()\n\n\t// If it is an fsync op then purge the file\n\tif options.Handle.Fsynced() {\n\t\tlog.Trace(\"FileCache::CloseFile : fsync/sync op, purging %s\", options.Handle.Path)\n\t\tlocalPath := filepath.Join(fc.tmpPath, options.Handle.Path)\n\n\t\terr = deleteFile(localPath)\n\t\tif err != nil && !os.IsNotExist(err) {\n\t\t\tlog.Err(\"FileCache::CloseFile : failed to delete local file %s [%s]\", localPath, err.Error())\n\t\t}\n\n\t\tfc.policy.CachePurge(localPath)\n\t\treturn nil\n\t}\n\n\tfc.policy.CacheInvalidate(localPath) // Invalidate the file from the local cache.\n\treturn nil\n}", "title": "" }, { "docid": "be505695f92cffbc7dfd5a7bbdf4f106", "score": "0.62501836", "text": "func (f *Fake) Delete(file string) error {\n\tfp := filepath.Join(f.Dir, filepath.FromSlash(file))\n\treturn os.RemoveAll(fp)\n}", "title": "" }, { "docid": "cc45f0a428f0b50e041c0226cbaeebf4", "score": "0.61922836", "text": "func deletefile(t *testing.T, f *os.File) {\n\tt.Helper()\n\tif err := os.Remove(f.Name()); err != nil {\n\t\tt.Fatal(err)\n\t}\n}", "title": "" }, { "docid": "55111af1a040b4a63526291f88f00bc0", "score": "0.61610717", "text": "func (s *SeedrAPI) DeleteFile(id int) error {\n\treturn s.client.DeleteFile(id)\n}", "title": "" }, { "docid": "ae83a7f44d29e07480dac9ff8680d8d3", "score": "0.61581635", "text": "func (adp *fileStorage) Delete(ctx context.Context, filename string) error {\n\tpath := adp.dsn.Join(filename)\n\treturn os.Remove(path)\n}", "title": "" }, { "docid": "a13cfb7e2b226736047eef93e4d090a0", "score": "0.6132002", "text": "func Delete(fileName string) {\n\tos.Remove(fileName)\n}", "title": "" }, { "docid": "b707b71c7855ac8f10b05beeece48c97", "score": "0.61065227", "text": "func (h VideoUploadHandler) deleteFile(path string) {\n\tif path != \"\" {\n\t\tabsPath := filepath.Join(h.webroot, \"videos\", path)\n\t\t_, err := os.Stat(absPath)\n\t\tif err == nil {\n\t\t\terr = os.Remove(absPath)\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Unable to cleanup file %v. %v\\n\", absPath, err)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "b64e3ba708faab122fff0c9fcf749524", "score": "0.6094789", "text": "func (c *ForgettingCache) Delete(key string) {\n\tkey = keyToFilename(key)\n\tc.d.Erase(key)\n}", "title": "" }, { "docid": "fae5a36d0d532b5a7e2ea87bd7f5ae4c", "score": "0.60800767", "text": "func deleteTempFile(filename string) {\n\t_ = os.Remove(filename)\n}", "title": "" }, { "docid": "383a8f249cfc3833f92c3fec90c7f6ec", "score": "0.60637105", "text": "func (ws *Workspace) DeleteFile(fileName string) error {\n\tmanifestDirPath, err := ws.manifestDirectoryPath()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmanifestFileName := ws.AppManifestFileName(fileName)\n\tmanifestPath := filepath.Join(manifestDirPath, manifestFileName)\n\tmanifestFileExists, err := ws.fsUtils.Exists(manifestPath)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !manifestFileExists {\n\t\treturn &ErrManifestNotFound{ManifestName: manifestFileName}\n\t}\n\n\treturn ws.fsUtils.Remove(manifestPath)\n}", "title": "" }, { "docid": "af01afa4da67502f15454dcafcb7e35a", "score": "0.6059575", "text": "func (m *Media) DeleteFile() error {\n\tfiles := m.Thumbs()\n\tfiles[\"0x0\"] = m.RealPath()\n\n\tfor _, file := range files {\n\t\tif _, err := os.Stat(file); !os.IsNotExist(err) {\n\t\t\tif removeErr := os.Remove(file); removeErr != nil {\n\t\t\t\treturn removeErr\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f091b0365bb516e49ceb97d3c59518dc", "score": "0.6049084", "text": "func (svc *FileSvc) DeleteFile(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\tproject, ok := ctx.Value(earthworks.ContextKey{Name: \"ProjectContext\"}).(earthworks.Project)\n\tif !ok {\n\t\thttp.Error(w, http.StatusText(422), 422)\n\t\treturn\n\t}\n\n\tfileID, err := strconv.Atoi(chi.URLParam(r, \"fileID\"))\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), 500)\n\t}\n\n\terr = svc.Repo.DeleteFile(fileID, project.ID)\n\tif err != nil {\n\t\thttp.Error(w, http.StatusText(404), 404)\n\t}\n\n\tw.WriteHeader(http.StatusNoContent)\n}", "title": "" }, { "docid": "d755f00cb6d68361976f6304950cc4a3", "score": "0.6028665", "text": "func DeleteFile(fileName string, location string) {\n\tos.Remove(ConcatenateStrings(location, fileName))\n}", "title": "" }, { "docid": "287c39b6c5c212ee0761ad59fc36bb51", "score": "0.6021606", "text": "func DeleteFile(allocationID, remotePath string) error {\n\ta, err := getAllocation(allocationID)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn a.DeleteFile(remotePath)\n}", "title": "" }, { "docid": "8d465abbb8e193e4f9be0eff9c0a1fd9", "score": "0.6011604", "text": "func (f *delFile) Delete() error {\n\tf.Close()\n\treturn os.Remove(f.Name())\n}", "title": "" }, { "docid": "42a360d5c3464ad0689cf24e5e219734", "score": "0.5977489", "text": "func (s *S3Uploader) DeleteFile(name string, remoteRoot string) {\n\tif s.config.Versioning {\n\t\ts.deleteVersionedObject(s.buildObjectKey(name, remoteRoot))\n\t} else {\n\t\ts.deleteObject(s.buildObjectKey(name, remoteRoot))\n\t}\n}", "title": "" }, { "docid": "6180ebd70076db54c66e471ba5d69d75", "score": "0.597703", "text": "func Delete(client client.Client, fileName string) (err error) {\n\tclient.Logger.Debugf(\"Deleting file %s\", fileName)\n\n\terr = os.Remove(fileName)\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn\n}", "title": "" }, { "docid": "1bcaf416469432e82b6f7a962437dd85", "score": "0.59340686", "text": "func (self *fdbFile) Delete() error {\n\tself.mutex.Lock()\n\tdefer self.mutex.Unlock()\n\n\tif self.fd != nil {\n\t\tself.fdc.Remove(self)\n\t}\n\n\treturn os.Remove(self.path)\n}", "title": "" }, { "docid": "7367f40b3bc6d0a016d92d5f56bdc7bd", "score": "0.5931805", "text": "func (a *api) DeleteFile(fileID int64) error {\n\tparams := url.Values{}\n\tvar out interface{}\n\tif err := a.Request(\"DELETE\", \"/api/v3/files/\"+fmt.Sprint(fileID)+\"\", params, nil, &out); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c8e6235272b09f0c6ce8cbdf66c40e6d", "score": "0.59302604", "text": "func (s *Server) DeleteFile(ctx context.Context, cmdDelFile *CmdDelFile) (*Empty, error) {\n\terr := slowpoke.DeleteFile(cmdDelFile.File)\n\tif err != nil {\n\t\treturn &Empty{}, status.Errorf(codes.Unknown, err.Error())\n\t}\n\treturn &Empty{}, nil\n}", "title": "" }, { "docid": "dedd3f9e1d300a8b186bb4b67b40cbfc", "score": "0.59195614", "text": "func Delete(filePath string) error {\n\tlog.I(\"Delete file: \", filePath)\n\te := os.Remove(filePath)\n\tif e != nil {\n\t\tlog.E(e)\n\t\treturn e\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "601ffa87c776d52158ee1f25608644de", "score": "0.5914961", "text": "func (c *Client) DeleteFile(ctx context.Context, fileID string) (err error) {\n\treq, err := c.requestBuilder.build(ctx, http.MethodDelete, c.fullURL(\"/files/\"+fileID), nil)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = c.sendRequest(req, nil)\n\treturn\n}", "title": "" }, { "docid": "0d854a2b30c5ab3514195e83fe115700", "score": "0.59143704", "text": "func (d *Db) Delete(file string) error {\n\tsession, err := mgo.Dial(d.Address)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer session.Close()\n\tg := session.DB(d.Database).GridFS(PREFIX)\n\terr = g.Remove(file)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "83afc31138ab48854f7ac4de14f529bd", "score": "0.59140563", "text": "func (cl *clientChronos) DeleteFile(bucket, path string) error {\n\t_, err := cl.http.Delete().\n\t\tAddPath(fmt.Sprintf(pathToFile, cl.appName, bucket, path)).\n\t\tSend()\n\n\treturn err\n}", "title": "" }, { "docid": "384c04be4770187dfb4eec5ce7876c2d", "score": "0.58795166", "text": "func (h invalidateFileHandler) InvalidateFile(\n\tctx context.Context,\n\turl scalar.URI,\n) (err error) {\n\treturn h.uow.Do(ctx, func(ctx context.Context) (err error) {\n\t\treturn h.media.InvalidateFile(ctx, url)\n\t})\n}", "title": "" }, { "docid": "580b736e01b187c71363cddcd29ba0cb", "score": "0.5844754", "text": "func (l *level) deleteFile(fileNumber int64) {\n\tdelete(l.files, fileNumber)\n}", "title": "" }, { "docid": "307e46693e1ad2052532c60ef64c8228", "score": "0.58385813", "text": "func (g *Gerrit) DeleteFile(ctx context.Context, ci *ChangeInfo, filepath string) error {\n\treturn g.delete(ctx, fmt.Sprintf(\"/changes/%s/edit/%s\", ci.Id, url.QueryEscape(filepath)))\n}", "title": "" }, { "docid": "8992f790c48c0590cd246eb6a1af583f", "score": "0.5831629", "text": "func (fs *FSStore) Delete(key string) error {\n\treturn os.RemoveAll(fs.filePathFor(key))\n}", "title": "" }, { "docid": "30234d15b443a7238e1d5b7734a341b7", "score": "0.58290136", "text": "func RemoveFile(name string) { c.RemoveFile(name) }", "title": "" }, { "docid": "19879414bb16c597daf1dee7f77118f5", "score": "0.5828804", "text": "func DeleteTokenFile() {\n\ttoken, err := settings.NewToken()\n\tutils.CheckError(err)\n\terr = token.Delete()\n\tutils.CheckError(err)\n}", "title": "" }, { "docid": "7f854b10ed3b077747ece05ba42fd584", "score": "0.58260775", "text": "func (fs *MockFilesystem) Delete(filename string) error {\n\tdelete(fs.Files, filename)\n\treturn nil\n}", "title": "" }, { "docid": "377ca326ef6420e41c5078f24740e2a9", "score": "0.5821866", "text": "func (fs *FileService) DeleteFile(path string) error {\n\tpath = strings.Trim(path, \"/\")\n\n\t_, err := fs.client.DoRequestDecoding(\"DELETE\", strings.Join([]string{filesTopLevelSuffix, path}, \"/\"), nil, nil)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "6b65207bd96b2702a4482257121a38d4", "score": "0.57961184", "text": "func (f *FileSystem) Delete(ctx context.Context, name string) error {\n\treturn util.Retry(ctx, func(ctx context.Context) error {\n\t\t_, err := f.store.db.Exec(ctx,\n\t\t\t\"UPDATE files SET dtime = now() WHERE tenant = $1 AND path = $2 AND dtime IS NULL\",\n\t\t\tf.tenant, name)\n\t\treturn err\n\t})\n}", "title": "" }, { "docid": "8b67863ad34844cc3d36d072db4657d4", "score": "0.5771781", "text": "func DeleteFile(nameToDelete, lynkName string) error {\n\t// Need to delete the local file too - so parseMeta properly picks it up\n\tlynk := lynxutil.GetLynk(lynks, lynkName)\n\tvar err error\n\n\tif lynk == nil {\n\t\terr = errors.New(\"Could not delete file\")\n\t}\n\n\ti := 0\n\tfor i < len(lynk.Files) {\n\t\tif nameToDelete == lynk.Files[i].Name {\n\t\t\tlynk.Files = append(lynk.Files[:i], lynk.Files[i+1:]...)\n\t\t}\n\t\ti++\n\t}\n\n\treturn err\n}", "title": "" }, { "docid": "e62313a80f86c8e9f14953bb43e331b0", "score": "0.57620186", "text": "func (s *GCStorage) Delete(filepath string) error {\n\treturn s.DeleteCtx(s.ctx, filepath)\n}", "title": "" }, { "docid": "3898002e2f9c0b44ac36f651a9cb46fe", "score": "0.5750536", "text": "func (i *File) Delete() error {\n\tdir := filepath.Dir(i.Path)\n\tif dir == \".\" {\n\t\tdir = \"\"\n\t}\n\treturn os.RemoveAll(dir)\n}", "title": "" }, { "docid": "db27be96e5cc00c11ddc8ba7b2036adf", "score": "0.57469803", "text": "func (rdb *SQLiteFileDatasource) Delete(fileID int) error {\n\tif _, err := rdb.SQLConn.Exec(\"DELETE FROM file WHERE id=?\", fileID); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "82da013ac62e156fa8e9e887f03d65eb", "score": "0.57164645", "text": "func DeleteFile(\n\tdb *proxy.DB,\n\tname string,\n\tpath string,\n\tmessage string,\n\tauthor string,\n\temail string,\n\titemHash string,\n\trepoHash string,\n\tbranch string) (*FileInfo, error) {\n\n\tpath = strings.TrimPrefix(path, \"/\")\n\n\trepo, err := openRepo(db, true, name)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = compareHash(repo, path, itemHash, repoHash)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tw, err := repo.Worktree()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t_, err = w.Remove(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tc, err := commitChanges(repo, w, message, author, email)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &FileInfo{\n\t\tRepoHash: c.Hash.String(),\n\t}, nil\n}", "title": "" }, { "docid": "cc6eee48b8c0cfb048305e9d7e7f2e8a", "score": "0.57093036", "text": "func (t *Token) DeleteFile(fileID string) error {\n\tapi := fmt.Sprintf(deleteFileURL, t.Token, fileID)\n\tres := deleteFileResponse{}\n\tif err := getAPIResponse(api, &res); err != nil {\n\t\treturn err\n\t}\n\n\tif !res.Ok {\n\t\treturn errors.New(res.Error)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "ff540c0bb00d4e243109d0300243d80d", "score": "0.5699587", "text": "func (p *Peer) DeleteFile(ctx context.Context, c cid.Cid) error {\n\tfound, err := p.BlockStore().Has(c)\n\tif err != nil {\n\t\tlog.Error(\"Unable to find block \", err)\n\t\treturn err\n\t}\n\tif !found {\n\t\tlog.Warn(\"Content not found in datastore\")\n\t\treturn errors.New(\"content not found in datastore\")\n\t}\n\n\tgetLinks := func(ctx context.Context, cid cid.Cid) ([]*ipld.Link, error) {\n\t\tlinks, err := ipld.GetLinks(ctx, p, c)\n\t\tif err != nil {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil, ctx.Err()\n\t\t\t}\n\t\t}\n\t\treturn links, nil\n\t}\n\tgcs := cid.NewSet()\n\terr = descendants(ctx, getLinks, gcs, []cid.Cid{c})\n\tif err != nil {\n\t\tlog.Error(\"Descendants failed \", err)\n\t\treturn err\n\t}\n\terr = gcs.ForEach(func(c cid.Cid) error {\n\t\tlog.Debug(c)\n\t\terr = p.BlockStore().DeleteBlock(c)\n\t\tif err != nil {\n\t\t\tlog.Error(\"Unable to remove block \", err)\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\tlog.Error(\"Block removal failed \", err)\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "7ba4f2df96a5845050fbaab7a0141540", "score": "0.56980217", "text": "func (s *FileStationSession) DeleteFile(path string) (bool, error) {\n\treturn s.deleteFileInternal(path, false)\n}", "title": "" }, { "docid": "09e4a58ceadf09f5f35e6e1b62d62eee", "score": "0.56947553", "text": "func DeleteFile(appID string, objectID string) error {\n\tappInfo, err := GetAppInfo(appID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tregion, err := apps.GetAppRegion(appID)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn DeleteFileEx(appInfo.AppID, appInfo.AppKey, region, objectID)\n}", "title": "" }, { "docid": "8a3e4018e2d80cc290478792e72910b0", "score": "0.56919014", "text": "func (cloud *CloudClient) deleteFile(path string) error {\n\terr := cloud.conn.ObjectDelete(cloud.container, filepath.Base(path))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete file error: 'BackupClient:deleteFile' %s to storage: %q\\nHeaders\", path, err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "c64535df0e5a3a39c6d573f8a49cd013", "score": "0.5689316", "text": "func (l *TmpWriter) DeleteCurrentFile() (err error) {\n\tif l.file != nil && fileExists(l.file.Name()) {\n\t\tif err = os.Remove(l.file.Name()); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tl.file = nil\n\tl.fileOpen = false\n\treturn nil\n}", "title": "" }, { "docid": "d443ea8a2705be390760dae6e252c9a1", "score": "0.56884223", "text": "func CleanFile(file *os.File) {\n\tif file == nil {\n\t\treturn\n\t}\n\t// the following operation won't failed regularly, if failed, log it\n\terr := file.Close()\n\tif err != nil && err != os.ErrClosed {\n\t\tlogger.Warn(err)\n\t}\n\terr = os.Remove(file.Name())\n\tif err != nil {\n\t\tlogger.Warn(err)\n\t}\n}", "title": "" }, { "docid": "37c0af3749c88b2b635579c56821c996", "score": "0.5681692", "text": "func (sf *Source) Delete(client *http.Client) error {\n\t// if the fileID is nil, then upload function hasn't been called yet\n\tif sf.fileID == \"\" {\n\t\treturn errNothingToDelete\n\t}\n\tsrv, err := drive.New(client)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := srv.Files.Delete(sf.fileID).Do(); err != nil {\n\t\treturn err\n\t}\n\t// clearing the file ID so that consequent calls would now that the file\n\t// does not exist\n\tsf.fileID = \"\"\n\treturn nil\n}", "title": "" }, { "docid": "0e0875443765fbdb488a37fc5cfa9cc5", "score": "0.5671724", "text": "func deleting() {\n\tfmt.Println(\"=== file.deleting ===\")\n\terr := os.Remove(\"/tmp/dat\")\n\tcheck(err)\n}", "title": "" }, { "docid": "f35670b1aaa990e95d714c17be7e8140", "score": "0.566439", "text": "func (fs *localFS) Delete(path string, force bool) error {\n\tvar err error\n\tpath, err = fs.Abs(path)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !force {\n\t\treturn file.Remove(path)\n\t} else {\n\t\treturn file.RemoveAll(path)\n\t}\n}", "title": "" }, { "docid": "218eac46eacecfce35da281f2557c3d1", "score": "0.56638724", "text": "func (*FileOperation) Delete(fs533FileName string) {\n\tvar err = os.Remove(fs533FileName)\n\n\tif err != nil {\n\t\tlog.Printf(\"Error: it is unable to remove file %s due to error %s\\n\", fs533FileName, err.Error())\n\t\treturn\n\t}\n\n\tlog.Printf(\"File %s is removed\\n\", fs533FileName)\n}", "title": "" }, { "docid": "f120ae11a68b2a99763d85fe1fa921be", "score": "0.5656144", "text": "func (f FileService) Delete(ctx context.Context, request model.DeleteFileRequest) (string, error) {\n\tid, err := f.File.Delete(ctx, request.ID)\n\tif err != nil {\n\t\treturn \"\", errors.Wrap(err, \"couldn't delete file\")\n\t}\n\n\treturn id, nil\n}", "title": "" }, { "docid": "e6430075a2e619564921e9a5c2d6204b", "score": "0.56440413", "text": "func (s *Storage) Delete(key string, kind interface{}) error {\n\tfileName := s.buildFileName(key, kind)\n\tif err := s.disk.Erase(fileName); err != nil {\n\t\treturn fmt.Errorf(\"failed to delete file: %v\", err)\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "c611c93d78156834fd25199b1c17d669", "score": "0.56399167", "text": "func (p *Pkg) RemoveFile(relPath string) error {\n\tif p.ReadOnly() {\n\t\treturn errors.New(\"Package is readonly\")\n\t}\n\tif !p.Has(relPath) {\n\t\treturn nil\n\t}\n\tpath := filepath.Join(p.AbsolutePath(), relPath)\n\treturn os.Remove(path)\n}", "title": "" }, { "docid": "06244f7042eadb36dd8b92f26a290f31", "score": "0.56291056", "text": "func (c *file) Invalidate(key string) {\n\tos.Remove(filepath.Join(c.dirname, key))\n}", "title": "" }, { "docid": "b792ddeec1dc07f5237ac4f79561ddc6", "score": "0.56179243", "text": "func DeleteCredentialFile() error {\n\treturn helper.DeleteFile(GetAppInfo().CredentialsPath)\n}", "title": "" }, { "docid": "fbe93bfb4a4bf677ff2922615e9c4829", "score": "0.5617496", "text": "func DeleteImageFile(filename string) error {\n\tpath := GetFileStoreFullPath(filename)\n\treturn core.DeleteFile(path)\n}", "title": "" }, { "docid": "a9fbd4a4493f886004cd0a289856ed9f", "score": "0.56104296", "text": "func DeleteDatabaseFile(t *testing.T) {\n\tt.Logf(\"Deleting database-file: '%s'\", databaseFile)\n\t_ = os.Remove(databaseFile)\n}", "title": "" }, { "docid": "5cf8d0ad8a434ac44387654acef5899d", "score": "0.56098896", "text": "func (p *Partitions) removeFile(off uint64) error {\n\tif p.tempDir == \"\" {\n\t\treturn errors.New(\"Partitions.Remove: temp dir not set\")\n\t}\n\n\t// we just remove it. if it's not removed, it's not in the tempdir\n\tppath := partitionPath(p.conf, p.topic, off)\n\tfullpath := filepath.Join(p.tempDir, ppath)\n\t// fmt.Println(\"removeFile\", ppath, fullpath)\n\n\tif _, err := os.Stat(fullpath); err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\tinternal.DiscardError(err)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\tgo func() {\n\t\tinternal.Debugf(p.conf, \"removing %s\", fullpath)\n\t\terr := os.Remove(fullpath)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"error removing %s: %+v\", fullpath, err)\n\t\t}\n\t}()\n\tdelete(p.pathCache[p.tempDir], off)\n\treturn nil\n}", "title": "" }, { "docid": "fc8a231a7ff89d6235dfb943c23bb88b", "score": "0.56087923", "text": "func (s *fs) Delete(name string) error {\n\ts.Lock()\n\tdefer s.Unlock()\n\n\tif err := os.Remove(s.contentFile(name)); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8d3df7dc735d428e99c7da5c61109345", "score": "0.5606835", "text": "func (f *Fs) DeleteFile(path string) error {\n\tcf, name, err := f.currentDir.walkToParent(path)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif _, ok := cf.children[name]; !ok {\n\t\treturn ErrDeleteFileNotExist\n\t}\n\n\tdelete(cf.children, name)\n\n\treturn nil\n}", "title": "" }, { "docid": "16026c42243597d02c7562638b0ba15d", "score": "0.5603998", "text": "func RemoveCacheByFile(mdPath string) {\n\tif _, found := articleCache[mdPath]; found {\n\t\tmu.Lock()\n\t\tdelete(articleCache, mdPath)\n\t\tmu.Unlock()\n\t\tlog.Infof(\"Removed from cache: %s\", mdPath)\n\t} else {\n\t\tlog.Warnf(\"Remove: File not found: %s\", mdPath)\n\t}\n}", "title": "" }, { "docid": "16026c42243597d02c7562638b0ba15d", "score": "0.5603998", "text": "func RemoveCacheByFile(mdPath string) {\n\tif _, found := articleCache[mdPath]; found {\n\t\tmu.Lock()\n\t\tdelete(articleCache, mdPath)\n\t\tmu.Unlock()\n\t\tlog.Infof(\"Removed from cache: %s\", mdPath)\n\t} else {\n\t\tlog.Warnf(\"Remove: File not found: %s\", mdPath)\n\t}\n}", "title": "" }, { "docid": "b49b32373c892bbd5f08cabded5c7820", "score": "0.56032586", "text": "func ClearVersion(versionFile string) error {\n\tmutex.Lock()\n\tdefer mutex.Unlock()\n\tfInfo, err := os.Lstat(versionFile)\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\tlog.Infof(\"version file %s does not exist, no need to clear.\", versionFile)\n\t\t\treturn nil\n\t\t}\n\n\t\tmsg := fmt.Sprintf(\"Check version file %s error %v\", versionFile, err)\n\t\tlog.Errorln(msg)\n\t\treturn errors.New(msg)\n\t}\n\n\tif fInfo.IsDir() {\n\t\tlog.Warningf(\"Version file %v exists and is a directory.\", versionFile)\n\t\treturn nil\n\t}\n\n\terr = os.Remove(versionFile)\n\tif err != nil {\n\t\tif !os.IsNotExist(err) {\n\t\t\treturn fmt.Errorf(\"failed to remove version file [%s]. %s\", versionFile, err)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "402c8ac718b9586782df2e114434a634", "score": "0.5602469", "text": "func deleteFileSafely(file string) (err error) {\n\terr = nil\n\tif _, e := os.Stat(file); os.IsNotExist(e) {\n\t\treturn\n\t}\n\n\tif err = os.Remove(file); err != nil {\n\t\treturn\n\t}\n\n\treturn\n\n}", "title": "" }, { "docid": "a530d99c3a7a4662ceecfa718fa51fe3", "score": "0.56024134", "text": "func (storage *HubicStorage) DeleteFile(threadIndex int, filePath string) (err error) {\n err = storage.client.DeleteFile(storage.storageDir + \"/\" + filePath)\n if e, ok := err.(HubicError); ok && e.Status == 404 {\n LOG_DEBUG(\"HUBIC_DELETE\", \"Ignore 404 error\")\n return nil\n }\n return err\n}", "title": "" }, { "docid": "dd433b5019eda82f610a897d7f805bd1", "score": "0.5591397", "text": "func (fs *fileSystem) DeleteDxFile(dxPath storage.DxPath) error {\n\treturn fs.fileSet.Delete(dxPath)\n}", "title": "" }, { "docid": "4a2173a7f327907424b9a1058c3ea2aa", "score": "0.55909187", "text": "func (pf *ProcessedFiles) Delete(filename string) {\n\tpf.mux.Lock()\n\tdefer pf.mux.Unlock()\n\tdelete(pf.Files, filename)\n}", "title": "" }, { "docid": "6736ce21ded25822620d1431e8a5ec2b", "score": "0.5575758", "text": "func (f FileStore) Delete(accessToken string) error {\n\tif err := validateToken(accessToken); err != nil {\n\t\treturn &NotFoundError{accessToken, err}\n\t}\n\n\tif err := os.Remove(filepath.Join(string(f), accessToken)); err != nil && !os.IsNotExist(err) {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "800e90ec83f15ecec9a0748abd5b1852", "score": "0.5568938", "text": "func (file *File) Close() error {\n\treturn os.RemoveAll(file.tempDir)\n}", "title": "" }, { "docid": "bd2ba43534db6c39e4da570a2f6f4585", "score": "0.5558688", "text": "func (chustorage *Chustorage) DeleteFile(ctx context.Context, bucket, bucketFilename string) error {\n\tobj, err := chustorage.getObject(ctx, bucket, bucketFilename)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := obj.Delete(ctx); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "3c2b8283c8d78cf930bbf777e3667528", "score": "0.55584353", "text": "func deleteFile(t *testing.T, host string, key string, apiKey string) int {\n\n\tendpoint := \"http://\" + host + \"/api/v1/object/\" + key\n\n\tstatusCode, _ := httpCall(t, http.MethodDelete, endpoint, \"\", apiKey, -1, nil, \"\")\n\n\treturn statusCode\n}", "title": "" }, { "docid": "0e47ab2bda11cc59dfb16199e2cda913", "score": "0.5544789", "text": "func (r *Receiver) Delete(ctx context.Context, req *FileRequest) (*EmptyResponse, error) {\n\terr := os.RemoveAll(req.Path)\n\treturn &EmptyResponse{}, err\n}", "title": "" }, { "docid": "c26e1b127b30583b5dfe9d140e146fe4", "score": "0.553457", "text": "func (lfs *FS) Delete(ctx context.Context, path string) (err error) {\n\t// TODO (b5):\n\treturn fmt.Errorf(\"deleting local files via qfs.Localfs is not finished\")\n}", "title": "" }, { "docid": "db588d23d485df7f40df85cd9de778cf", "score": "0.552723", "text": "func (fs *Filesystem) Del(k string) error {\n\tp, err := fs.cleanPath(k)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif _, err := os.Stat(p); os.IsNotExist(err) {\n\t\treturn db.ErrNoValue\n\t}\n\n\tif err := os.Remove(p); err != nil {\n\t\treturn err\n\t}\n\n\tfs.fireEventForKey(k, eventDelete)\n\treturn nil\n}", "title": "" }, { "docid": "bc0e3ca3b1b4196d9da13a33c007b5c6", "score": "0.55224717", "text": "func (fod *FileOnDisk) Remove(path string) error {\n\tpath = filepath.FromSlash(path)\n\n\treturn os.RemoveAll(path)\n}", "title": "" }, { "docid": "c42c571a9345ffc95b49f1f82c650486", "score": "0.55206347", "text": "func (gr *GitResource) Delete() error {\n\terr := os.Remove(gr.path)\n\tif os.IsNotExist(err) {\n\t\t// already deleted\n\t\treturn nil\n\t}\n\treturn err\n}", "title": "" } ]
d70446178009bfdf12193b8353bf2ed2
+kubebuilder:rbac:groups="",resources=pods,verbs=get;list;watch +kubebuilder:rbac:groups="",resources=pods/status,verbs=get +kubebuilder:rbac:groups="",resources=services,verbs=get;list;watch;create;update;patch;delete +kubebuilder:rbac:groups="",resources=services/status,verbs=get;update;patch +kubebuilder:rbac:groups=apps,resources=deployments,verbs=get;list;watch;create;update;patch;delete +kubebuilder:rbac:groups=apps,resources=deployments/status,verbs=get;update;patch +kubebuilder:rbac:groups=apps,resources=statefulsets,verbs=get;list;watch;create;update;patch;delete +kubebuilder:rbac:groups=apps,resources=statefulsets/status,verbs=get;update;patch +kubebuilder:rbac:groups=extensions,resources=ingresses,verbs=get;list;watch;create;update;patch;delete +kubebuilder:rbac:groups=extensions,resources=ingresses/status,verbs=get;update;patch +kubebuilder:rbac:groups="",resources=configmaps,verbs=get;list;watch;create;update;patch;delete +kubebuilder:rbac:groups="",resources=configmaps/status,verbs=get;update;patch +kubebuilder:rbac:groups=solr.bloomberg.com,resources=solrclouds,verbs=get;list;watch;create;update;patch;delete +kubebuilder:rbac:groups=solr.bloomberg.com,resources=solrclouds/status,verbs=get;update;patch +kubebuilder:rbac:groups=zookeeper.pravega.io,resources=zookeeperclusters,verbs=get;list;watch;create;update;patch;delete +kubebuilder:rbac:groups=zookeeper.pravega.io,resources=zookeeperclusters/status,verbs=get;update;patch +kubebuilder:rbac:groups=etcd.database.coreos.com,resources=etcdclusters,verbs=get;list;watch;create;update;patch;delete +kubebuilder:rbac:groups=etcd.database.coreos.com,resources=etcdclusters/status,verbs=get;update;patch
[ { "docid": "2e9a2b7ef0288a10347c1b942db2a58f", "score": "0.0", "text": "func (r *SolrCloudReconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {\n\t_ = context.Background()\n\t_ = r.Log.WithValues(\"solrcloud\", req.NamespacedName)\n\n\t// Fetch the SolrCloud instance\n\tinstance := &solr.SolrCloud{}\n\terr := r.Get(context.TODO(), req.NamespacedName, instance)\n\tif err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\t// Object not found, return. Created objects are automatically garbage collected.\n\t\t\t// For additional cleanup logic use finalizers.\n\t\t\treturn reconcile.Result{}, nil\n\t\t}\n\t\t// Error reading the object - requeue the req.\n\t\treturn reconcile.Result{}, err\n\t}\n\n\tchanged := instance.WithDefaults(IngressBaseUrl)\n\tif changed {\n\t\tr.Log.Info(\"Setting default settings for solr-cloud\", \"namespace\", instance.Namespace, \"name\", instance.Name)\n\t\tif err := r.Update(context.TODO(), instance); err != nil {\n\t\t\treturn reconcile.Result{}, err\n\t\t}\n\t\treturn reconcile.Result{Requeue: true}, nil\n\t}\n\n\t// When working with the clouds, some actions outside of kube may need to be retried after a few seconds\n\trequeueOrNot := reconcile.Result{}\n\n\tnewStatus := solr.SolrCloudStatus{}\n\n\tbusyBoxImage := *instance.Spec.BusyBoxImage\n\n\tblockReconciliationOfStatefulSet := false\n\n\tif err := reconcileZk(r, req, instance, busyBoxImage, &newStatus); err != nil {\n\t\treturn requeueOrNot, err\n\t}\n\n\t// Generate Common Service\n\tcommonService := util.GenerateCommonService(instance)\n\tif err := controllerutil.SetControllerReference(instance, commonService, r.scheme); err != nil {\n\t\treturn requeueOrNot, err\n\t}\n\n\t// Check if the Common Service already exists\n\tfoundCommonService := &corev1.Service{}\n\terr = r.Get(context.TODO(), types.NamespacedName{Name: commonService.Name, Namespace: commonService.Namespace}, foundCommonService)\n\tif err != nil && errors.IsNotFound(err) {\n\t\tr.Log.Info(\"Creating Common Service\", \"namespace\", commonService.Namespace, \"name\", commonService.Name)\n\t\terr = r.Create(context.TODO(), commonService)\n\t} else if err == nil {\n\t\tif util.CopyServiceFields(commonService, foundCommonService) {\n\t\t\t// Update the found Service and write the result back if there are any changes\n\t\t\tr.Log.Info(\"Updating Common Service\", \"namespace\", commonService.Namespace, \"name\", commonService.Name)\n\t\t\terr = r.Update(context.TODO(), foundCommonService)\n\t\t}\n\t} else {\n\t\treturn requeueOrNot, err\n\t}\n\n\tsolrNodeNames := instance.GetAllSolrNodeNames()\n\n\thostNameIpMap := make(map[string]string)\n\t// Generate a service for every Node\n\tif instance.UsesIndividualNodeServices() {\n\t\tfor _, nodeName := range solrNodeNames {\n\t\t\terr, ip := reconcileNodeService(r, instance, nodeName)\n\t\t\tif err != nil {\n\t\t\t\treturn requeueOrNot, err\n\t\t\t}\n\t\t\t// This IP Address only needs to be used in the hostname map if the SolrCloud is advertising the external address.\n\t\t\tif instance.Spec.SolrAddressability.External.UseExternalAddress {\n\t\t\t\tif ip == \"\" {\n\t\t\t\t\t// If we are using this IP in the hostAliases of the statefulSet, it needs to be set for every service before trying to update the statefulSet\n\t\t\t\t\tblockReconciliationOfStatefulSet = true\n\t\t\t\t} else {\n\t\t\t\t\thostNameIpMap[instance.AdvertisedNodeHost(nodeName)] = ip\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Generate HeadlessService\n\tif instance.UsesHeadlessService() {\n\t\theadless := util.GenerateHeadlessService(instance)\n\t\tif err := controllerutil.SetControllerReference(instance, headless, r.scheme); err != nil {\n\t\t\treturn requeueOrNot, err\n\t\t}\n\n\t\t// Check if the HeadlessService already exists\n\t\tfoundHeadless := &corev1.Service{}\n\t\terr = r.Get(context.TODO(), types.NamespacedName{Name: headless.Name, Namespace: headless.Namespace}, foundHeadless)\n\t\tif err != nil && errors.IsNotFound(err) {\n\t\t\tr.Log.Info(\"Creating HeadlessService\", \"namespace\", headless.Namespace, \"name\", headless.Name)\n\t\t\terr = r.Create(context.TODO(), headless)\n\t\t} else if err == nil && util.CopyServiceFields(headless, foundHeadless) {\n\t\t\t// Update the found HeadlessService and write the result back if there are any changes\n\t\t\tr.Log.Info(\"Updating HeadlessService\", \"namespace\", headless.Namespace, \"name\", headless.Name)\n\t\t\terr = r.Update(context.TODO(), foundHeadless)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn requeueOrNot, err\n\t\t}\n\t}\n\n\t// Generate ConfigMap\n\tconfigMap := util.GenerateConfigMap(instance)\n\tif err := controllerutil.SetControllerReference(instance, configMap, r.scheme); err != nil {\n\t\treturn requeueOrNot, err\n\t}\n\n\t// Check if the ConfigMap already exists\n\tfoundConfigMap := &corev1.ConfigMap{}\n\terr = r.Get(context.TODO(), types.NamespacedName{Name: configMap.Name, Namespace: configMap.Namespace}, foundConfigMap)\n\tif err != nil && errors.IsNotFound(err) {\n\t\tr.Log.Info(\"Creating ConfigMap\", \"namespace\", configMap.Namespace, \"name\", configMap.Name)\n\t\terr = r.Create(context.TODO(), configMap)\n\t} else if err == nil && util.CopyConfigMapFields(configMap, foundConfigMap) {\n\t\t// Update the found ConfigMap and write the result back if there are any changes\n\t\tr.Log.Info(\"Updating ConfigMap\", \"namespace\", configMap.Namespace, \"name\", configMap.Name)\n\t\terr = r.Update(context.TODO(), foundConfigMap)\n\t}\n\tif err != nil {\n\t\treturn requeueOrNot, err\n\t}\n\n\t// Only create stateful set if zkConnectionString can be found (must contain host and port)\n\tif !strings.Contains(newStatus.ZkConnectionString(), \":\") {\n\t\tblockReconciliationOfStatefulSet = true\n\t}\n\n\tif !blockReconciliationOfStatefulSet {\n\t\t// Generate StatefulSet\n\t\tstatefulSet := util.GenerateStatefulSet(instance, &newStatus, hostNameIpMap)\n\t\tif err := controllerutil.SetControllerReference(instance, statefulSet, r.scheme); err != nil {\n\t\t\treturn requeueOrNot, err\n\t\t}\n\n\t\t// Check if the StatefulSet already exists\n\t\tfoundStatefulSet := &appsv1.StatefulSet{}\n\t\terr = r.Get(context.TODO(), types.NamespacedName{Name: statefulSet.Name, Namespace: statefulSet.Namespace}, foundStatefulSet)\n\t\tif err != nil && errors.IsNotFound(err) {\n\t\t\tr.Log.Info(\"Creating StatefulSet\", \"namespace\", statefulSet.Namespace, \"name\", statefulSet.Name)\n\t\t\terr = r.Create(context.TODO(), statefulSet)\n\t\t} else if err == nil {\n\t\t\tif util.CopyStatefulSetFields(statefulSet, foundStatefulSet) {\n\t\t\t\t// Update the found StatefulSet and write the result back if there are any changes\n\t\t\t\tr.Log.Info(\"Updating StatefulSet\", \"namespace\", statefulSet.Namespace, \"name\", statefulSet.Name)\n\t\t\t\terr = r.Update(context.TODO(), foundStatefulSet)\n\t\t\t}\n\t\t\tnewStatus.Replicas = foundStatefulSet.Status.Replicas\n\t\t\tnewStatus.ReadyReplicas = foundStatefulSet.Status.ReadyReplicas\n\t\t}\n\t\tif err != nil {\n\t\t\treturn requeueOrNot, err\n\t\t}\n\t}\n\n\terr = reconcileCloudStatus(r, instance, &newStatus)\n\tif err != nil {\n\t\treturn requeueOrNot, err\n\t}\n\n\textAddressabilityOpts := instance.Spec.SolrAddressability.External\n\tif extAddressabilityOpts != nil && extAddressabilityOpts.Method == solr.Ingress {\n\t\t// Generate Ingress\n\t\tingress := util.GenerateIngress(instance, solrNodeNames, IngressBaseUrl)\n\t\tif err := controllerutil.SetControllerReference(instance, ingress, r.scheme); err != nil {\n\t\t\treturn requeueOrNot, err\n\t\t}\n\n\t\t// Check if the Ingress already exists\n\t\tfoundIngress := &extv1.Ingress{}\n\t\terr = r.Get(context.TODO(), types.NamespacedName{Name: ingress.Name, Namespace: ingress.Namespace}, foundIngress)\n\t\tif err != nil && errors.IsNotFound(err) {\n\t\t\tr.Log.Info(\"Creating Common Ingress\", \"namespace\", ingress.Namespace, \"name\", ingress.Name)\n\t\t\terr = r.Create(context.TODO(), ingress)\n\t\t} else if err == nil && util.CopyIngressFields(ingress, foundIngress) {\n\t\t\t// Update the found Ingress and write the result back if there are any changes\n\t\t\tr.Log.Info(\"Updating Common Ingress\", \"namespace\", ingress.Namespace, \"name\", ingress.Name)\n\t\t\terr = r.Update(context.TODO(), foundIngress)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn requeueOrNot, err\n\t\t}\n\t}\n\n\tif !reflect.DeepEqual(instance.Status, newStatus) {\n\t\tinstance.Status = newStatus\n\t\tr.Log.Info(\"Updating SolrCloud Status: \", \"namespace\", instance.Namespace, \"name\", instance.Name)\n\t\terr = r.Status().Update(context.TODO(), instance)\n\t\tif err != nil {\n\t\t\treturn requeueOrNot, err\n\t\t}\n\t}\n\n\treturn requeueOrNot, nil\n}", "title": "" } ]
[ { "docid": "0788ab1a9581a6a6927810db7d6f9355", "score": "0.5565196", "text": "func main() {\n\tif len(os.Args) < 5 {\n\t\tlog.Fatalln(\"usage: k8stest kube_token user_namespace space_name cluster_api_server\")\n\t}\n\n\ttoken := os.Args[1]\n\tuserNamespace := os.Args[2]\n\tspaceName := os.Args[3]\n\tapiServer := os.Args[4]\n\n\tkc, err := NewKubeClient(apiServer, token, userNamespace)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\n\tspace, err := kc.GetSpace(spaceName)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\tfor _, appn := range space.Applications {\n\t\tfmt.Println(\"Application:\", *appn.Name)\n\t\tfor _, env := range appn.Pipeline {\n\t\t\tfmt.Println(\"\\tEnvironment:\", *env.Name)\n\t\t\tfmt.Println(\"\\t\\tCPU Usage:\", *env.Stats.Cpucores.Used)\n\t\t\tfmt.Println(\"\\t\\tMemory Usage:\", *env.Stats.Memory.Used, *env.Stats.Memory.Units)\n\t\t\tfmt.Println(\"\\t\\tPodsStarting:\", *env.Stats.Pods.Starting)\n\t\t\tfmt.Println(\"\\t\\tPodsRunning:\", *env.Stats.Pods.Running)\n\t\t\tfmt.Println(\"\\t\\tPodsStopping:\", *env.Stats.Pods.Stopping)\n\t\t}\n\t}\n\n\tenvs, err := kc.GetEnvironments()\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\tfor _, env := range envs {\n\t\tfmt.Println(\"Environment:\", *env.Name)\n\t\tfmt.Println(\"\\tCPU Used:\", *env.Quota.Cpucores.Used)\n\t\tfmt.Println(\"\\tCPU Limit:\", *env.Quota.Cpucores.Quota)\n\t\tfmt.Println(\"\\tMemory Used:\", *env.Quota.Memory.Used, *env.Quota.Memory.Units)\n\t\tfmt.Println(\"\\tMemory Limit:\", *env.Quota.Memory.Quota)\n\t}\n}", "title": "" }, { "docid": "2ae97d95de50e59e654aa725692ad22a", "score": "0.55288416", "text": "func desiredClusterRole(name string, contour *operatorv1alpha1.Contour) *rbacv1.ClusterRole {\n\tgroupAll := []string{corev1.GroupName}\n\tgroupNet := []string{networkingv1.GroupName}\n\tgroupGateway := []string{gatewayv1alpha1.GroupName}\n\tgroupExt := []string{apiextensionsv1.GroupName}\n\tgroupContour := []string{contourV1GroupName}\n\tverbCGU := []string{\"create\", \"get\", \"update\"}\n\tverbGLW := []string{\"get\", \"list\", \"watch\"}\n\tverbGLWU := []string{\"get\", \"list\", \"watch\", \"update\"}\n\n\tcfgMap := rbacv1.PolicyRule{\n\t\tVerbs: verbCGU,\n\t\tAPIGroups: groupAll,\n\t\tResources: []string{\"configmaps\"},\n\t}\n\tendPt := rbacv1.PolicyRule{\n\t\tVerbs: verbGLW,\n\t\tAPIGroups: groupAll,\n\t\tResources: []string{\"endpoints\"},\n\t}\n\tns := rbacv1.PolicyRule{\n\t\tVerbs: verbGLW,\n\t\tAPIGroups: groupAll,\n\t\tResources: []string{\"namespaces\"},\n\t}\n\tsecret := rbacv1.PolicyRule{\n\t\tVerbs: verbGLW,\n\t\tAPIGroups: groupAll,\n\t\tResources: []string{\"secrets\"},\n\t}\n\tsvc := rbacv1.PolicyRule{\n\t\tVerbs: verbGLW,\n\t\tAPIGroups: groupAll,\n\t\tResources: []string{\"services\"},\n\t}\n\tcrd := rbacv1.PolicyRule{\n\t\tVerbs: []string{\"list\"},\n\t\tAPIGroups: groupExt,\n\t\tResources: []string{\"customresourcedefinitions\"},\n\t}\n\tgateway := rbacv1.PolicyRule{\n\t\tVerbs: verbGLWU,\n\t\tAPIGroups: groupGateway,\n\t\tResources: []string{\"gatewayclasses\", \"gateways\", \"backendpolicies\", \"httproutes\", \"tlsroutes\"},\n\t}\n\tgatewayStatus := rbacv1.PolicyRule{\n\t\tVerbs: verbCGU,\n\t\tAPIGroups: groupGateway,\n\t\tResources: []string{\"gatewayclasses/status\", \"gateways/status\", \"backendpolicies/status\", \"httproutes/status\",\n\t\t\t\"tlsroutes/status\"},\n\t}\n\tunsupported := rbacv1.PolicyRule{\n\t\tVerbs: verbGLW,\n\t\tAPIGroups: groupGateway,\n\t\tResources: []string{\"tcproutes\", \"udproutes\"},\n\t}\n\tunsupportedStatus := rbacv1.PolicyRule{\n\t\tVerbs: []string{\"update\"},\n\t\tAPIGroups: groupGateway,\n\t\tResources: []string{\"tcproutes/status\", \"udproutes/status\"},\n\t}\n\ting := rbacv1.PolicyRule{\n\t\tVerbs: verbGLW,\n\t\tAPIGroups: groupNet,\n\t\tResources: []string{\"ingresses\", \"ingressclasses\"},\n\t}\n\tingStatus := rbacv1.PolicyRule{\n\t\tVerbs: verbCGU,\n\t\tAPIGroups: groupNet,\n\t\tResources: []string{\"ingresses/status\"},\n\t}\n\tcntr := rbacv1.PolicyRule{\n\t\tVerbs: verbGLW,\n\t\tAPIGroups: groupContour,\n\t\tResources: []string{\"httpproxies\", \"tlscertificatedelegations\", \"extensionservices\"},\n\t}\n\tcntrStatus := rbacv1.PolicyRule{\n\t\tVerbs: verbCGU,\n\t\tAPIGroups: groupContour,\n\t\tResources: []string{\"httpproxies/status\", \"extensionservices/status\"},\n\t}\n\n\tcr := &rbacv1.ClusterRole{\n\t\tTypeMeta: metav1.TypeMeta{\n\t\t\tKind: \"Role\",\n\t\t},\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: name,\n\t\t},\n\t}\n\tcr.Labels = map[string]string{\n\t\toperatorv1alpha1.OwningContourNameLabel: contour.Name,\n\t\toperatorv1alpha1.OwningContourNsLabel: contour.Namespace,\n\t}\n\tcr.Rules = []rbacv1.PolicyRule{cfgMap, endPt, secret, svc, gateway, gatewayStatus, ing, ingStatus, cntr, cntrStatus,\n\t\tcrd, ns, unsupported, unsupportedStatus}\n\treturn cr\n}", "title": "" }, { "docid": "99cc0c5f7760c39c9cd4e5c7aaecc4fc", "score": "0.545141", "text": "func main() {\n\n\tservice_labels := make(map[string]map[string]string)\n\n\tservice_labels[\"serviceA\"]=map[string]string{\"key1\":\"a\", \"key2\":\"b\", \"key3\":\"c\", \"key4\":\"d\", \"key5\":\"e\",\"can-access-serviceX\":\"true\"}\n\tservice_labels[\"serviceB\"]=map[string]string{\"key1\":\"a\", \"key2\":\"b\", \"key3\":\"c\", \"key4\":\"d\"}\n\tservice_labels[\"serviceC\"]=map[string]string{\"key1\":\"A\", \"key2\":\"B\", \"key3\":\"C\"}\n\tservice_labels[\"serviceD\"]=map[string]string{\"key1\":\"A\", \"key2\":\"B\",\"can-access-serviceX\":\"true\"}\n\tservice_labels[\"serviceE\"]=map[string]string{\"key1\":\"abc\", \"key2\":\"def\",\"can-access-serviceY\":\"true\"}\n\tservice_labels[\"serviceX\"]=map[string]string{}\n\tservice_labels[\"serviceY\"]=map[string]string{}\n\n\trulesFilename := \"examples/rules_for_testing_rule_translation.yaml\"\n\tvar rules= MAPL_engine.YamlReadRulesFromFile(rulesFilename)\n\n\t/*messagesFilename := \"examples/messages_for_rule_translation.yaml\"\n\tvar messages = MAPL_engine.YamlReadMessagesFromFile(messagesFilename)\n\tmessage_attributes := messages.Messages[0]\n\t*/\n\tservice_labels_explicit:=map[string]map[string]string{}\n\terr:=extras.RemoveLabelConditionsFromRules(&rules,service_labels,service_labels_explicit)\n\tfmt.Println(err)\n\tfilename := \"outputs/translated_rules.yaml\"\n\textras.OutputRulesToFile(&rules,filename)\n}", "title": "" }, { "docid": "139fc4ce5acdf532e13a537e12c9f29d", "score": "0.5307753", "text": "func getKubemarkMasterComponentsResourceUsage(ctx context.Context) map[string]*kubemarkResourceUsage {\n\tresult := make(map[string]*kubemarkResourceUsage)\n\t// Get kubernetes component resource usage\n\tsshResult, err := getMasterUsageByPrefix(ctx, \"kube\")\n\tif err != nil {\n\t\tframework.Logf(\"Error when trying to SSH to master machine. Skipping probe. %v\", err)\n\t\treturn nil\n\t}\n\tscanner := bufio.NewScanner(strings.NewReader(sshResult))\n\tfor scanner.Scan() {\n\t\tvar cpu float64\n\t\tvar mem uint64\n\t\tvar name string\n\t\tfmt.Sscanf(strings.TrimSpace(scanner.Text()), \"%f %d /usr/local/bin/kube-%s\", &cpu, &mem, &name)\n\t\tif name != \"\" {\n\t\t\t// Gatherer expects pod_name/container_name format\n\t\t\tfullName := name + \"/\" + name\n\t\t\tresult[fullName] = &kubemarkResourceUsage{Name: fullName, MemoryWorkingSetInBytes: mem * 1024, CPUUsageInCores: cpu / 100}\n\t\t}\n\t}\n\t// Get etcd resource usage\n\tsshResult, err = getMasterUsageByPrefix(ctx, \"bin/etcd\")\n\tif err != nil {\n\t\tframework.Logf(\"Error when trying to SSH to master machine. Skipping probe\")\n\t\treturn nil\n\t}\n\tscanner = bufio.NewScanner(strings.NewReader(sshResult))\n\tfor scanner.Scan() {\n\t\tvar cpu float64\n\t\tvar mem uint64\n\t\tvar etcdKind string\n\t\tfmt.Sscanf(strings.TrimSpace(scanner.Text()), \"%f %d /bin/sh -c /usr/local/bin/etcd\", &cpu, &mem)\n\t\tdataDirStart := strings.Index(scanner.Text(), \"--data-dir\")\n\t\tif dataDirStart < 0 {\n\t\t\tcontinue\n\t\t}\n\t\tfmt.Sscanf(scanner.Text()[dataDirStart:], \"--data-dir=/var/%s\", &etcdKind)\n\t\tif etcdKind != \"\" {\n\t\t\t// Gatherer expects pod_name/container_name format\n\t\t\tfullName := \"etcd/\" + etcdKind\n\t\t\tresult[fullName] = &kubemarkResourceUsage{Name: fullName, MemoryWorkingSetInBytes: mem * 1024, CPUUsageInCores: cpu / 100}\n\t\t}\n\t}\n\treturn result\n}", "title": "" }, { "docid": "9bd978691c1ee84c01861478c32584be", "score": "0.51780957", "text": "func Components(ctx context.Context, logger log.Logger, kubeClient client.Client, namespace string, dryRun bool) error {\n\tvar aggregateErr []error\n\topts, dryRunStr := getDeleteOptions(dryRun)\n\tselector := client.MatchingLabels{manifestgen.PartOfLabelKey: manifestgen.PartOfLabelValue}\n\t{\n\t\tvar list appsv1.DeploymentList\n\t\tif err := kubeClient.List(ctx, &list, client.InNamespace(namespace), selector); err == nil {\n\t\t\tfor _, r := range list.Items {\n\t\t\t\tif err := kubeClient.Delete(ctx, &r, opts); err != nil {\n\t\t\t\t\tlogger.Failuref(\"Deployment/%s/%s deletion failed: %s\", r.Namespace, r.Name, err.Error())\n\t\t\t\t\taggregateErr = append(aggregateErr, err)\n\t\t\t\t} else {\n\t\t\t\t\tlogger.Successf(\"Deployment/%s/%s deleted %s\", r.Namespace, r.Name, dryRunStr)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t{\n\t\tvar list corev1.ServiceList\n\t\tif err := kubeClient.List(ctx, &list, client.InNamespace(namespace), selector); err == nil {\n\t\t\tfor _, r := range list.Items {\n\t\t\t\tif err := kubeClient.Delete(ctx, &r, opts); err != nil {\n\t\t\t\t\tlogger.Failuref(\"Service/%s/%s deletion failed: %s\", r.Namespace, r.Name, err.Error())\n\t\t\t\t\taggregateErr = append(aggregateErr, err)\n\t\t\t\t} else {\n\t\t\t\t\tlogger.Successf(\"Service/%s/%s deleted %s\", r.Namespace, r.Name, dryRunStr)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t{\n\t\tvar list networkingv1.NetworkPolicyList\n\t\tif err := kubeClient.List(ctx, &list, client.InNamespace(namespace), selector); err == nil {\n\t\t\tfor _, r := range list.Items {\n\t\t\t\tif err := kubeClient.Delete(ctx, &r, opts); err != nil {\n\t\t\t\t\tlogger.Failuref(\"NetworkPolicy/%s/%s deletion failed: %s\", r.Namespace, r.Name, err.Error())\n\t\t\t\t\taggregateErr = append(aggregateErr, err)\n\t\t\t\t} else {\n\t\t\t\t\tlogger.Successf(\"NetworkPolicy/%s/%s deleted %s\", r.Namespace, r.Name, dryRunStr)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t{\n\t\tvar list corev1.ServiceAccountList\n\t\tif err := kubeClient.List(ctx, &list, client.InNamespace(namespace), selector); err == nil {\n\t\t\tfor _, r := range list.Items {\n\t\t\t\tif err := kubeClient.Delete(ctx, &r, opts); err != nil {\n\t\t\t\t\tlogger.Failuref(\"ServiceAccount/%s/%s deletion failed: %s\", r.Namespace, r.Name, err.Error())\n\t\t\t\t\taggregateErr = append(aggregateErr, err)\n\t\t\t\t} else {\n\t\t\t\t\tlogger.Successf(\"ServiceAccount/%s/%s deleted %s\", r.Namespace, r.Name, dryRunStr)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t{\n\t\tvar list rbacv1.ClusterRoleList\n\t\tif err := kubeClient.List(ctx, &list, selector); err == nil {\n\t\t\tfor _, r := range list.Items {\n\t\t\t\tif err := kubeClient.Delete(ctx, &r, opts); err != nil {\n\t\t\t\t\tlogger.Failuref(\"ClusterRole/%s deletion failed: %s\", r.Name, err.Error())\n\t\t\t\t\taggregateErr = append(aggregateErr, err)\n\t\t\t\t} else {\n\t\t\t\t\tlogger.Successf(\"ClusterRole/%s deleted %s\", r.Name, dryRunStr)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t{\n\t\tvar list rbacv1.ClusterRoleBindingList\n\t\tif err := kubeClient.List(ctx, &list, selector); err == nil {\n\t\t\tfor _, r := range list.Items {\n\t\t\t\tif err := kubeClient.Delete(ctx, &r, opts); err != nil {\n\t\t\t\t\tlogger.Failuref(\"ClusterRoleBinding/%s deletion failed: %s\", r.Name, err.Error())\n\t\t\t\t\taggregateErr = append(aggregateErr, err)\n\t\t\t\t} else {\n\t\t\t\t\tlogger.Successf(\"ClusterRoleBinding/%s deleted %s\", r.Name, dryRunStr)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn errors.Reduce(errors.Flatten(errors.NewAggregate(aggregateErr)))\n}", "title": "" }, { "docid": "1169fcb9939bdb3f949c86442f86f42a", "score": "0.5160529", "text": "func TestCreateAndListWithLabel(t *testing.T) {\n\tt.Parallel()\n\n\tttKubectlOptions, kubectlOptions := GetKubectlOptions(t)\n\n\tnamespace := strings.ToLower(random.UniqueId())\n\tk8s.CreateNamespace(t, ttKubectlOptions, namespace)\n\tdefer k8s.DeleteNamespace(t, ttKubectlOptions, namespace)\n\n\ttestRules := []rbacv1.PolicyRule{\n\t\trbacv1.PolicyRule{\n\t\t\tVerbs: []string{\"get\", \"list\"},\n\t\t\tAPIGroups: []string{\"\"},\n\t\t\tResources: []string{\"pods\"},\n\t\t},\n\t}\n\troleName := getTestRoleName(namespace)\n\trole := PrepareRole(\n\t\tnamespace,\n\t\troleName,\n\t\tgetTestLabels(),\n\t\tmap[string]string{},\n\t\ttestRules,\n\t)\n\terr := CreateRole(kubectlOptions, role)\n\trequire.NoError(t, err)\n\n\tlabels := LabelsToListOptions(getTestLabels())\n\troles, err := ListRoles(kubectlOptions, namespace, labels)\n\trequire.NoError(t, err)\n\tassert.NotEmpty(t, roles)\n\tassert.Equal(t, roleName, roles[0].Name)\n}", "title": "" }, { "docid": "9d01c5dd11e647c0552b2d3d747051d6", "score": "0.5152841", "text": "func (i *CockroachDBManifests) GetCockroachDBOperator(namespace string) string {\n\treturn `kind: Namespace\napiVersion: v1\nmetadata:\n name: ` + namespace + `\n---\napiVersion: rbac.authorization.k8s.io/v1beta1\nkind: ClusterRole\nmetadata:\n name: rook-cockroachdb-operator\nrules:\n- apiGroups:\n - \"\"\n resources:\n - pods\n verbs:\n - get\n - list\n- apiGroups:\n - \"\"\n resources:\n - services\n verbs:\n - get\n - list\n - create\n - update\n- apiGroups:\n - apps\n resources:\n - statefulsets\n verbs:\n - create\n- apiGroups:\n - policy\n resources:\n - poddisruptionbudgets\n verbs:\n - create\n- apiGroups:\n - cockroachdb.rook.io\n resources:\n - \"*\"\n verbs:\n - \"*\"\n---\napiVersion: v1\nkind: ServiceAccount\nmetadata:\n name: rook-cockroachdb-operator\n namespace: ` + namespace + `\n---\nkind: ClusterRoleBinding\napiVersion: rbac.authorization.k8s.io/v1beta1\nmetadata:\n name: rook-cockroachdb-operator\n namespace: ` + namespace + `\nroleRef:\n apiGroup: rbac.authorization.k8s.io\n kind: ClusterRole\n name: rook-cockroachdb-operator\nsubjects:\n- kind: ServiceAccount\n name: rook-cockroachdb-operator\n namespace: ` + namespace + `\n---\napiVersion: apps/v1beta1\nkind: Deployment\nmetadata:\n name: rook-cockroachdb-operator\n namespace: ` + namespace + `\nspec:\n replicas: 1\n template:\n metadata:\n labels:\n app: rook-cockroachdb-operator\n spec:\n serviceAccountName: rook-cockroachdb-operator\n containers:\n - name: rook-cockroachdb-operator\n image: rook/cockroachdb:master\n args: [\"cockroachdb\", \"operator\"]\n env:\n - name: POD_NAME\n valueFrom:\n fieldRef:\n fieldPath: metadata.name\n - name: POD_NAMESPACE\n valueFrom:\n fieldRef:\n fieldPath: metadata.namespace\n`\n}", "title": "" }, { "docid": "50ddf9a97adb88866e712175aaf19008", "score": "0.5148725", "text": "func GetK8Components(config []byte, ctx string) (*manifests.Component, error) {\n\tcli, err := kubernetes.New(config)\n\tif err != nil {\n\t\treturn nil, ErrGetK8sComponents(err)\n\t}\n\treq := cli.KubeClient.RESTClient().Get().RequestURI(\"/openapi/v2\")\n\tk8version, err := cli.KubeClient.ServerVersion()\n\tif err != nil {\n\t\treturn nil, ErrGetK8sComponents(err)\n\t}\n\tres := req.Do(context.Background())\n\tcontent, err := res.Raw()\n\tif err != nil {\n\t\treturn nil, ErrGetK8sComponents(err)\n\t}\n\tapiResources, err := getAPIRes(cli)\n\tif err != nil {\n\t\treturn nil, ErrGetK8sComponents(err)\n\t}\n\tmanifest := string(content)\n\tman, err := manifests.GenerateComponents(manifest, manifests.K8s, manifests.Config{\n\t\tName: \"Kubernetes\",\n\t\tFilter: manifests.CrdFilter{\n\t\t\tIsJson: true,\n\t\t\tOnlyRes: apiResources, //When crd or api-resource names are directly given, we dont need NameFilter\n\t\t\tRootFilter: []string{\"$.definitions\"},\n\t\t\tVersionFilter: []string{\"$[0]\"},\n\t\t\tGroupFilter: []string{\"$[0]\"},\n\t\t\tItrFilter: []string{\"$..[\\\"x-kubernetes-group-version-kind\\\"][?(@.kind\"},\n\t\t\tItrSpecFilter: []string{\"$[0][?(@[\\\"x-kubernetes-group-version-kind\\\"][0][\\\"kind\\\"]\"},\n\t\t\tResolveFilter: []string{\"--resolve\", \"$\"},\n\t\t\tGField: \"group\",\n\t\t\tVField: \"version\",\n\t\t},\n\t\tK8sVersion: k8version.String(),\n\t\tModifyDefSchema: func(s1, s2 *string) {\n\t\t\tvar schema map[string]interface{}\n\t\t\terr := json.Unmarshal([]byte(*s2), &schema)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tprop, ok := schema[\"properties\"].(map[string]interface{})\n\t\t\tif !ok {\n\t\t\t\treturn\n\t\t\t}\n\t\t\t// The schema generated has few fields that are not required and can break things, so they are removed here\n\t\t\tdelete(prop, \"apiVersion\")\n\t\t\tdelete(prop, \"metadata\")\n\t\t\tdelete(prop, \"kind\")\n\t\t\tdelete(prop, \"status\")\n\t\t\tschema[\"properties\"] = prop\n\t\t\tschema[\"$schema\"] = \"http://json-schema.org/draft-04/schema\"\n\t\t\tb, err := json.Marshal(schema)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t\t*s2 = string(b)\n\t\t},\n\t})\n\tif err != nil {\n\t\treturn nil, ErrGetK8sComponents(err)\n\t}\n\treturn man, nil\n}", "title": "" }, { "docid": "78954fe6ea6b7d202679a43afaf8eaf1", "score": "0.512665", "text": "func ListResource() {\n\tnsList, err := client.Client.K8sClient.\n\t\tCoreV1().\n\t\tNamespaces().\n\t\tList(metav1.ListOptions{})\n\tif err != nil {\n\t\tlog.Errorf(\"list and watch k8s's namespace err: %v\", err)\n\t\treturn\n\t} else {\n\t\tif len(nsList.Items) > 0 {\n\t\t\tloop(nsList, \"\")\n\t\t}\n\t}\n\tfor k, v := range Store.NamespaceCache.List {\n\t\tsvcList, err := client.Client.K8sClient.\n\t\t\tCoreV1().\n\t\t\tServices(v[k].ObjectMeta.Name).\n\t\t\tList(metav1.ListOptions{})\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"list and watch k8s's service of namespace [%v] err: %v\", v[k].Name, err)\n\t\t} else {\n\t\t\tloop(svcList, v[k].ObjectMeta.Name)\n\t\t}\n\n\t\tstfList, err := client.Client.K8sClient.\n\t\t\tStatefulSets(v[k].ObjectMeta.Name).\n\t\t\tList(metav1.ListOptions{})\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"list and watch k8s's stfList of namespace [%v] err: %v\", v[k].Name, err)\n\t\t} else {\n\t\t\tloop(stfList, v[k].ObjectMeta.Name)\n\t\t}\n\n\t\tdpList, err := client.Client.K8sClient.\n\t\t\tExtensionsV1beta1().\n\t\t\tDeployments(v[k].ObjectMeta.Name).\n\t\t\tList(metav1.ListOptions{})\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"list and watch k8s's deployment of namespace [%v] err: %v\", v[k].Name, err)\n\t\t} else {\n\t\t\tloop(dpList, v[k].ObjectMeta.Name)\n\t\t}\n\n\t\tcfgMapList, err := client.Client.K8sClient.\n\t\t\tCoreV1().\n\t\t\tConfigMaps(v[k].ObjectMeta.Name).\n\t\t\tList(metav1.ListOptions{})\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"list and watch k8s's configMap of namespace [%v] err: %v\", v[k].Name, err)\n\t\t} else {\n\t\t\tloop(cfgMapList, v[k].ObjectMeta.Name)\n\t\t}\n\n\t\tpodList, err := client.Client.K8sClient.\n\t\t\tCoreV1().\n\t\t\tPods(v[k].ObjectMeta.Name).\n\t\t\tList(metav1.ListOptions{})\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"list and watch k8s's configMap of namespace [%v] err: %v\", v[k].Name, err)\n\t\t} else {\n\t\t\tloop(podList, v[k].ObjectMeta.Name)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "6acd168bb03fec2816fdb512c3e47ffb", "score": "0.5071078", "text": "func Kubeinfo(c *gin.Context) {\n\tvar ns = service.ServiceInfo.Namespaces(\"\").Exclude(\"kube-system\", bootstrap.Args.Namespace)\n\tvar retNs = make([]string, 0, len(ns))\n\tfor _, n := range ns {\n\t\tretNs = append(retNs, n.Name)\n\t}\n\tc.JSON(200, gin.H{\n\t\t\"code\": 0,\n\t\t\"data\": map[string]interface{}{\n\t\t\t\"namespaces\": retNs,\n\t\t},\n\t})\n}", "title": "" }, { "docid": "7bbdfec2ace6a605ffe6779acd30064b", "score": "0.5036402", "text": "func (set RoleSet) CheckKubeGroupsAndUsers(ttl time.Duration, overrideTTL bool, matchers ...RoleMatcher) ([]string, []string, error) {\n\tgroups := make(map[string]struct{})\n\tusers := make(map[string]struct{})\n\tvar matchedTTL bool\n\tfor _, role := range set {\n\t\tok, err := RoleMatchers(matchers).MatchAll(role, types.Allow)\n\t\tif err != nil {\n\t\t\treturn nil, nil, trace.Wrap(err)\n\t\t}\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\n\t\tmaxSessionTTL := role.GetOptions().MaxSessionTTL.Value()\n\t\tif overrideTTL || (ttl <= maxSessionTTL && maxSessionTTL != 0) {\n\t\t\tmatchedTTL = true\n\t\t\tfor _, group := range role.GetKubeGroups(types.Allow) {\n\t\t\t\tgroups[group] = struct{}{}\n\t\t\t}\n\t\t\tfor _, user := range role.GetKubeUsers(types.Allow) {\n\t\t\t\tusers[user] = struct{}{}\n\t\t\t}\n\t\t}\n\t}\n\tfor _, role := range set {\n\t\tok, _, err := RoleMatchers(matchers).MatchAny(role, types.Deny)\n\t\tif err != nil {\n\t\t\treturn nil, nil, trace.Wrap(err)\n\t\t}\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tfor _, group := range role.GetKubeGroups(types.Deny) {\n\t\t\tdelete(groups, group)\n\t\t}\n\t\tfor _, user := range role.GetKubeUsers(types.Deny) {\n\t\t\tdelete(users, user)\n\t\t}\n\t}\n\tif !matchedTTL {\n\t\treturn nil, nil, trace.AccessDenied(\"this user cannot request kubernetes access for %v\", ttl)\n\t}\n\tif len(groups) == 0 && len(users) == 0 {\n\t\treturn nil, nil, trace.NotFound(\"this user cannot request kubernetes access, has no assigned groups or users\")\n\t}\n\treturn utils.StringsSliceFromSet(groups), utils.StringsSliceFromSet(users), nil\n}", "title": "" }, { "docid": "654b701b656ada87f4100bfd6c025864", "score": "0.49857682", "text": "func deleteUserNamespaces(kubeConfig []byte, logger *logrus.Entry) error {\n\tclient, err := k8sclient.NewClientFromKubeConfig(kubeConfig)\n\tif err != nil {\n\t\treturn err\n\t}\n\tnamespaces, err := client.CoreV1().Namespaces().List(metav1.ListOptions{})\n\tif err != nil {\n\t\treturn emperror.Wrap(err, \"could not list namespaces to delete\")\n\t}\n\n\tfor _, ns := range namespaces.Items {\n\t\tswitch ns.Name {\n\t\tcase \"default\", \"kube-system\", \"kube-public\":\n\t\t\tcontinue\n\t\t}\n\t\terr := retry(func() error {\n\t\t\tlogger.Infof(\"deleting kubernetes namespace %q\", ns.Name)\n\t\t\terr := client.CoreV1().Namespaces().Delete(ns.Name, &metav1.DeleteOptions{})\n\t\t\tif err != nil {\n\t\t\t\treturn emperror.Wrapf(err, \"failed to delete %q namespace\", ns.Name)\n\t\t\t}\n\t\t\treturn nil\n\t\t}, 3, 1)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\terr = retry(func() error {\n\t\tnamespaces, err := client.CoreV1().Namespaces().List(metav1.ListOptions{})\n\t\tif err != nil {\n\t\t\treturn emperror.Wrap(err, \"could not list remaining namespaces\")\n\t\t}\n\t\tleft := []string{}\n\t\tfor _, ns := range namespaces.Items {\n\t\t\tswitch ns.Name {\n\t\t\tcase \"default\", \"kube-system\", \"kube-public\":\n\t\t\t\tcontinue\n\t\t\tdefault:\n\t\t\t\tlogger.Infof(\"namespace %q still %s\", ns.Name, ns.Status)\n\t\t\t\tleft = append(left, ns.Name)\n\t\t\t}\n\t\t}\n\t\tif len(left) > 0 {\n\t\t\treturn emperror.With(errors.Errorf(\"namespaces remained after deletion: %v\", left), \"namespaces\", left)\n\t\t}\n\t\treturn nil\n\t}, 20, 30)\n\treturn err\n}", "title": "" }, { "docid": "82a1c8016a7198e3a1f65b76c99f12a4", "score": "0.49721232", "text": "func kubeadmConfigV1Beta2Template() string {\n\tvar tmpl = \"apiVersion: kubeadm.k8s.io/v1beta2\\n\" +\n\t\t\"kind: InitConfiguration\\n\" +\n\t\t\"{{ if .APIServerAdvertiseAddress }}\\n\" +\n\t\t\"localAPIEndpoint:\\n\" +\n\t\t\" advertiseAddress: \\\"{{ .APIServerAdvertiseAddress }}\\\"\\n\" +\n\t\t\" bindPort: {{ .APIServerBindPort }}{{end}}\\n\" +\n\t\t\"---\\n\" +\n\t\t\"apiVersion: kubeadm.k8s.io/v1beta2\\n\" +\n\t\t\"kind: ClusterConfiguration\\n\" +\n\t\t\"clusterName: \\\"{{ .KubeadmConfig.ClusterName }}\\\"\\n\" +\n\t\t\"imageRepository: {{ .KubeadmConfig.ImageRepository }}\\n\" +\n\t\t\"{{ if .KubeadmConfig.UseHyperKubeImage }}useHyperKubeImage: true{{end}}\\n\" +\n\t\t\"kubernetesVersion: \\\"{{ .KubeadmConfig.KubernetesVersion }}\\\"\\n\" +\n\t\t\"networking:\\n\" +\n\t\t\" serviceSubnet: \\\"{{ .KubeadmConfig.Networking.ServiceSubnet }}\\\"\\n\" +\n\t\t\" podSubnet: \\\"{{ .KubeadmConfig.Networking.PodSubnet }}\\\"\\n\" +\n\t\t\" dnsDomain: \\\"cluster.local\\\"\\n\" +\n\t\t\"{{ if .KubeadmConfig.ControlPlaneEndpoint }}controlPlaneEndpoint: \\\"{{ .KubeadmConfig.ControlPlaneEndpoint }}\\\"{{end}}\\n\" +\n\t\t\"certificatesDir: \\\"/etc/kubernetes/pki\\\"\\n\" +\n\t\t\"apiServer:\\n\" +\n\t\t\" {{ if .KubeadmConfig.APIServer.CertSANs }}\\n\" +\n\t\t\" certSANs:\\n\" +\n\t\t\" {{range $k, $san := .KubeadmConfig.APIServer.CertSANs}} - \\\"{{ $san }}\\\"\\n\" +\n\t\t\" {{end}}{{end}}\\n\" +\n\t\t\" extraArgs:\\n\" +\n\t\t\" profiling: \\\"false\\\"\\n\" +\n\t\t\" enable-admission-plugins: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.EnableAdmissionPlugins }}\\\"\\n\" +\n\t\t\" disable-admission-plugins: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.DisableAdmissionPlugins }}\\\"\\n\" +\n\t\t\" admission-control-config-file: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.AdmissionControlConfigFile }}\\\"\\n\" +\n\t\t\" audit-log-path: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.AuditLogPath }}\\\"\\n\" +\n\t\t\" audit-log-maxage: \\\"30\\\"\\n\" +\n\t\t\" audit-log-maxbackup: \\\"10\\\"\\n\" +\n\t\t\" audit-log-maxsize: \\\"100\\\"\\n\" +\n\t\t\" {{ if .KubeadmConfig.APIServer.ExtraArgs.AuditPolicyFile }}audit-policy-file: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.AuditPolicyFile }}\\\"{{ end }}\\n\" +\n\t\t\" {{ if .KubeadmConfig.APIServer.ExtraArgs.EtcdPrefix }}etcd-prefix: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.EtcdPrefix }}\\\"{{end}}\\n\" +\n\t\t\" service-account-lookup: \\\"true\\\"\\n\" +\n\t\t\" kubelet-certificate-authority: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.KubeletCertificateAuthority }}\\\"\\n\" +\n\t\t\" tls-cipher-suites: \\\"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_128_GCM_SHA256\\\"\\n\" +\n\t\t\" encryption-provider-config: \\\"/etc/kubernetes/admission-control/encryption-provider-config.yaml\\\"\\n\" +\n\t\t\" {{ if (and .KubeadmConfig.APIServer.ExtraArgs.OIDCIssuerURL .KubeadmConfig.APIServer.ExtraArgs.OIDCClientID) }}\\n\" +\n\t\t\" oidc-issuer-url: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.OIDCIssuerURL }}\\\"\\n\" +\n\t\t\" oidc-client-id: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.OIDCClientID }}\\\"\\n\" +\n\t\t\" oidc-username-claim: \\\"email\\\"\\n\" +\n\t\t\" oidc-username-prefix: \\\"oidc:\\\"\\n\" +\n\t\t\" oidc-groups-claim: \\\"groups\\\"{{end}}\\n\" +\n\t\t\" {{ if .KubeadmConfig.APIServer.ExtraArgs.CloudProvider }}cloud-provider: \\\"{{ .KubeadmConfig.APIServer.ExtraArgs.CloudProvider }}\\\"\\n\" +\n\t\t\" {{ if .KubeadmConfig.APIServer.ExtraArgs.CloudConfig }}cloud-config: {{ .KubeadmConfig.APIServer.ExtraArgs.CloudConfig }}{{end}}{{end}}\\n\" +\n\t\t\" extraVolumes:\\n\" +\n\t\t\" {{range $k, $volume := .KubeadmConfig.APIServer.ExtraVolumes }}\\n\" +\n\t\t\" - name: {{ $volume.Name }}\\n\" +\n\t\t\" hostPath: {{ $volume.HostPath }}\\n\" +\n\t\t\" mountPath: {{ $volume.MountPath }}\\n\" +\n\t\t\" pathType: {{ $volume.PathType }}\\n\" +\n\t\t\" readOnly: {{ $volume.ReadOnly }}{{end}}\\n\" +\n\t\t\"scheduler:\\n\" +\n\t\t\" extraArgs:\\n\" +\n\t\t\" profiling: \\\"false\\\"\\n\" +\n\t\t\"controllerManager:\\n\" +\n\t\t\" extraArgs:\\n\" +\n\t\t\" cluster-name: \\\"{{ .KubeadmConfig.ControllerManager.ExtraArgs.ClusterName }}\\\"\\n\" +\n\t\t\" profiling: \\\"false\\\"\\n\" +\n\t\t\" terminated-pod-gc-threshold: \\\"10\\\"\\n\" +\n\t\t\" feature-gates: \\\"RotateKubeletServerCertificate=true\\\"\\n\" +\n\t\t\" {{ if .KubeadmConfig.ControllerManager.ExtraArgs.ClusterSigningCertFile }}cluster-signing-cert-file: {{ .KubeadmConfig.ControllerManager.ExtraArgs.ClusterSigningCertFile }}{{end}}\\n\" +\n\t\t\" {{ if .KubeadmConfig.ControllerManager.ExtraArgs.CloudProvider }}cloud-provider: \\\"{{ .KubeadmConfig.ControllerManager.ExtraArgs.CloudProvider }}\\\"\\n\" +\n\t\t\" {{ if .KubeadmConfig.ControllerManager.ExtraArgs.CloudConfig }}cloud-config: \\\"{{ .KubeadmConfig.ControllerManager.ExtraArgs.CloudConfig }}\\\"\\n\" +\n\t\t\" extraVolumes:\\n\" +\n\t\t\" {{range $k, $volume := .KubeadmConfig.ControllerManager.ExtraVolumes }}\\n\" +\n\t\t\" - name: {{ $volume.Name }}\\n\" +\n\t\t\" hostPath: {{ $volume.HostPath }}\\n\" +\n\t\t\" mountPath: {{ $volume.MountPath }}\\n\" +\n\t\t\" pathType: {{ $volume.PathType }}\\n\" +\n\t\t\" readOnly: {{ $volume.ReadOnly }}{{end}}{{end}}{{end}}\\n\" +\n\t\t\"etcd:\\n\" +\n\t\t\" {{ if .KubeadmConfig.Etcd.External.Endpoints }}\\n\" +\n\t\t\" external:\\n\" +\n\t\t\" endpoints:\\n\" +\n\t\t\" {{range $k, $endpoint := .KubeadmConfig.Etcd.External.Endpoints }}\\n\" +\n\t\t\" - caFile: {{ $endpoint.CAFile }}\\n\" +\n\t\t\" certFile: {{ $endpoint.CertFile }}\\n\" +\n\t\t\" keyFile: {{ $endpoint.KeyFile }}{{end}}\\n\" +\n\t\t\" {{else}}\\n\" +\n\t\t\" local:\\n\" +\n\t\t\" extraArgs:\\n\" +\n\t\t\" peer-auto-tls: \\\"false\\\"\\n\" +\n\t\t\" {{end}}\\n\" +\n\t\t\"\"\n\treturn tmpl\n}", "title": "" }, { "docid": "a4b4abd13629bce298edab62bb8d994a", "score": "0.49623498", "text": "func generateConfigMapString(requestedStrategies []deschedulerv1alpha1.Strategy) string {\n\tstrategiesPolicyString := \"\"\n\t// There is no need to do validation here. By the time, we reach here, validation would have already happened.\n\tfor _, strategy := range requestedStrategies {\n\t\tswitch strings.ToLower(strategy.Name) {\n\t\tcase \"duplicates\":\n\t\t\tstrategiesPolicyString = strategiesPolicyString + \" \\\"RemoveDuplicates\\\":\\n enabled: true\\n\"\n\t\tcase \"interpodantiaffinity\":\n\t\t\tstrategiesPolicyString = strategiesPolicyString + \" \\\"RemovePodsViolatingInterPodAntiAffinity\\\":\\n enabled: true\\n\"\n\t\tcase \"lownodeutilization\":\n\t\t\tstrategiesPolicyString = strategiesPolicyString + \" \\\"LowNodeUtilization\\\":\\n enabled: true\\n params:\\n\" + \" nodeResourceUtilizationThresholds:\\n\"\n\t\t\tparamString := \"\"\n\t\t\tif len(strategy.Params) > 0 {\n\t\t\t\t// TODO: Make this more generic using methods and interfaces.\n\t\t\t\tparamString = addStrategyParamsForLowNodeUtilization(strategy.Params)\n\t\t\t\tstrategiesPolicyString = strategiesPolicyString + paramString\n\t\t\t}\n\t\tcase \"nodeaffinity\":\n\t\t\tstrategiesPolicyString = strategiesPolicyString + \" \\\"RemovePodsViolatingNodeAffinity\\\":\\n enabled: true\\n params:\\n nodeAffinityType:\\n - requiredDuringSchedulingIgnoredDuringExecution\\n\"\n\t\tdefault:\n\t\t\tstrategiesPolicyString = \"\" // Accept no other strategy except for the valid ones.\n\t\t}\n\t}\n\t// At last, we will have a \"\\n\", which we don't need.\n\treturn strings.TrimSuffix(strategiesPolicyString, \"\\n\")\n}", "title": "" }, { "docid": "229ccd202d1204bab1b45e450d447e91", "score": "0.49587983", "text": "func listPossibleResticDaemonsets(clientset *kubernetes.Clientset, namespace string) ([]v1.DaemonSet, error) {\n\tdaemonsets, err := clientset.AppsV1().DaemonSets(namespace).List(context.TODO(), metav1.ListOptions{\n\t\tLabelSelector: \"component=velero\",\n\t})\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to list daemonsets\")\n\t}\n\n\thelmDaemonsets, err := clientset.AppsV1().DaemonSets(namespace).List(context.TODO(), metav1.ListOptions{\n\t\tLabelSelector: \"app.kubernetes.io/name=velero\",\n\t})\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to list helm daemonsets\")\n\t}\n\n\treturn append(daemonsets.Items, helmDaemonsets.Items...), nil\n}", "title": "" }, { "docid": "c0e9aa4d8f936475c62caa844244d560", "score": "0.49371436", "text": "func TestApplyResetFields(t *testing.T) {\n\tserver, err := apiservertesting.StartTestServer(t, apiservertesting.NewDefaultTestServerOptions(), []string{\"--disable-admission-plugins\", \"ServiceAccount,TaintNodesByCondition\"}, framework.SharedEtcd())\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer server.TearDownFn()\n\n\tclient, err := kubernetes.NewForConfig(server.ClientConfig)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdynamicClient, err := dynamic.NewForConfig(server.ClientConfig)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// create CRDs so we can make sure that custom resources do not get lost\n\tetcd.CreateTestCRDs(t, apiextensionsclientset.NewForConfigOrDie(server.ClientConfig), false, etcd.GetCustomResourceDefinitionData()...)\n\n\tif _, err := client.CoreV1().Namespaces().Create(context.TODO(), &v1.Namespace{ObjectMeta: metav1.ObjectMeta{Name: resetFieldsNamespace}}, metav1.CreateOptions{}); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tcreateData := etcd.GetEtcdStorageDataForNamespace(resetFieldsNamespace)\n\t// gather resources to test\n\t_, resourceLists, err := client.Discovery().ServerGroupsAndResources()\n\tif err != nil {\n\t\tt.Fatalf(\"Failed to get ServerGroupsAndResources with error: %+v\", err)\n\t}\n\n\tfor _, resourceList := range resourceLists {\n\t\tfor _, resource := range resourceList.APIResources {\n\t\t\tif !strings.HasSuffix(resource.Name, \"/status\") {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tmapping, err := createMapping(resourceList.GroupVersion, resource)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\tt.Run(mapping.Resource.String(), func(t *testing.T) {\n\t\t\t\tif _, ok := resetFieldsSkippedResources[mapping.Resource.Resource]; ok {\n\t\t\t\t\tt.Skip()\n\t\t\t\t}\n\n\t\t\t\tnamespace := resetFieldsNamespace\n\t\t\t\tif mapping.Scope == meta.RESTScopeRoot {\n\t\t\t\t\tnamespace = \"\"\n\t\t\t\t}\n\n\t\t\t\t// assemble first object\n\t\t\t\tstatus, ok := statusData[mapping.Resource]\n\t\t\t\tif !ok {\n\t\t\t\t\tstatus = statusDefault\n\t\t\t\t}\n\n\t\t\t\tresource, ok := createData[mapping.Resource]\n\t\t\t\tif !ok {\n\t\t\t\t\tt.Fatalf(\"no test data for %s. Please add a test for your new type to etcd.GetEtcdStorageData() or getResetFieldsEtcdStorageData()\", mapping.Resource)\n\t\t\t\t}\n\n\t\t\t\tobj1 := unstructured.Unstructured{}\n\t\t\t\tif err := json.Unmarshal([]byte(resource.Stub), &obj1.Object); err != nil {\n\t\t\t\t\tt.Fatal(err)\n\t\t\t\t}\n\t\t\t\tif err := json.Unmarshal([]byte(status), &obj1.Object); err != nil {\n\t\t\t\t\tt.Fatal(err)\n\t\t\t\t}\n\n\t\t\t\tname := obj1.GetName()\n\t\t\t\tobj1.SetAPIVersion(mapping.GroupVersionKind.GroupVersion().String())\n\t\t\t\tobj1.SetKind(mapping.GroupVersionKind.Kind)\n\t\t\t\tobj1.SetName(name)\n\n\t\t\t\t// apply the spec of the first object\n\t\t\t\t_, err = dynamicClient.\n\t\t\t\t\tResource(mapping.Resource).\n\t\t\t\t\tNamespace(namespace).\n\t\t\t\t\tApply(context.TODO(), name, &obj1, metav1.ApplyOptions{FieldManager: \"fieldmanager1\"})\n\t\t\t\tif err != nil {\n\t\t\t\t\tt.Fatalf(\"Failed to apply obj1: %v\", err)\n\t\t\t\t}\n\n\t\t\t\t// create second object\n\t\t\t\tobj2 := &unstructured.Unstructured{}\n\t\t\t\tobj1.DeepCopyInto(obj2)\n\t\t\t\tif err := json.Unmarshal([]byte(resetFieldsSpecData[mapping.Resource]), &obj2.Object); err != nil {\n\t\t\t\t\tt.Fatal(err)\n\t\t\t\t}\n\t\t\t\tstatus2, ok := resetFieldsStatusData[mapping.Resource]\n\t\t\t\tif !ok {\n\t\t\t\t\tstatus2 = resetFieldsStatusDefault\n\t\t\t\t}\n\t\t\t\tif err := json.Unmarshal([]byte(status2), &obj2.Object); err != nil {\n\t\t\t\t\tt.Fatal(err)\n\t\t\t\t}\n\n\t\t\t\tif reflect.DeepEqual(obj1, obj2) {\n\t\t\t\t\tt.Fatalf(\"obj1 and obj2 should not be equal %v\", obj2)\n\t\t\t\t}\n\n\t\t\t\t// apply the status of the second object\n\t\t\t\t// this won't conflict if resetfields are set correctly\n\t\t\t\t// and will conflict if they are not\n\t\t\t\t_, err = dynamicClient.\n\t\t\t\t\tResource(mapping.Resource).\n\t\t\t\t\tNamespace(namespace).\n\t\t\t\t\tApplyStatus(context.TODO(), name, obj2, metav1.ApplyOptions{FieldManager: \"fieldmanager2\"})\n\t\t\t\tif err != nil {\n\t\t\t\t\tt.Fatalf(\"Failed to apply obj2: %v\", err)\n\t\t\t\t}\n\n\t\t\t\t// skip checking for conflicts on resources\n\t\t\t\t// that will never have conflicts\n\t\t\t\tif _, ok = noConflicts[mapping.Resource.Resource]; !ok {\n\t\t\t\t\tvar objRet *unstructured.Unstructured\n\n\t\t\t\t\t// reapply second object to the spec endpoint\n\t\t\t\t\t// that should fail with a conflict\n\t\t\t\t\tobjRet, err = dynamicClient.\n\t\t\t\t\t\tResource(mapping.Resource).\n\t\t\t\t\t\tNamespace(namespace).\n\t\t\t\t\t\tApply(context.TODO(), name, obj2, metav1.ApplyOptions{FieldManager: \"fieldmanager2\"})\n\t\t\t\t\terr = expectConflict(objRet, err, dynamicClient, mapping.Resource, namespace, name)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tt.Fatalf(\"Did not get expected conflict in spec of %s %s/%s: %v\", mapping.Resource, namespace, name, err)\n\t\t\t\t\t}\n\n\t\t\t\t\t// reapply first object to the status endpoint\n\t\t\t\t\t// that should fail with a conflict\n\t\t\t\t\tobjRet, err = dynamicClient.\n\t\t\t\t\t\tResource(mapping.Resource).\n\t\t\t\t\t\tNamespace(namespace).\n\t\t\t\t\t\tApplyStatus(context.TODO(), name, &obj1, metav1.ApplyOptions{FieldManager: \"fieldmanager1\"})\n\t\t\t\t\terr = expectConflict(objRet, err, dynamicClient, mapping.Resource, namespace, name)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tt.Fatalf(\"Did not get expected conflict in status of %s %s/%s: %v\", mapping.Resource, namespace, name, err)\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// cleanup\n\t\t\t\trsc := dynamicClient.Resource(mapping.Resource).Namespace(namespace)\n\t\t\t\tif err := rsc.Delete(context.TODO(), name, *metav1.NewDeleteOptions(0)); err != nil {\n\t\t\t\t\tt.Fatalf(\"deleting final object failed: %v\", err)\n\t\t\t\t}\n\t\t\t})\n\t\t}\n\t}\n}", "title": "" }, { "docid": "689e31b9f079150d85cc6c9ed6a6f2e6", "score": "0.49104628", "text": "func cleanupList(list []types.Pod) map[string]string {\n\tpods := make(map[string]string)\n\tfor _, pod := range list {\n\t\tapp := pod.ObjectMeta.Labels[\"app\"]\n\t\tif _, ok := pods[app]; ok {\n\t\t\tcontinue\n\t\t}\n\t\t// TODO - can refactor this\n\t\tif (strings.HasPrefix(app, \"api\") || strings.HasPrefix(app, \"svc\")) && !strings.HasSuffix(app, \"docs-site\") {\n\t\t\tpods[app] = pod.ObjectMeta.Labels[\"version\"]\n\t\t}\n\t}\n\n\treturn pods\n}", "title": "" }, { "docid": "d1a673fac16df21eca86f7c3dbda378b", "score": "0.49014518", "text": "func List(ctx context.Context, config *Config) (map[string]string, error) {\n\tclientConfig := kubernetes.NewClientConfig(config.KubeConfig, config.ContextName)\n\tcc, err := clientConfig.ClientConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclientset, err := corev1client.NewForConfig(cc)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar namespaces []string\n\t// A specific namespace is ignored if all-namespaces is provided.\n\tif config.AllNamespaces {\n\t\tnamespaces = []string{\"\"}\n\t} else {\n\t\tnamespaces = config.Namespaces\n\t\tif len(namespaces) == 0 {\n\t\t\tn, _, err := clientConfig.Namespace()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrap(err, \"unable to get default namespace\")\n\t\t\t}\n\t\t\tnamespaces = []string{n}\n\t\t}\n\t}\n\n\tlabels := make(map[string]string)\n\toptions := metav1.ListOptions{}\n\n\twg := sync.WaitGroup{}\n\n\twg.Add(len(namespaces))\n\n\t// Concurrently iterate through provided namespaces.\n\tfor _, n := range namespaces {\n\t\tgo func(n string) {\n\t\t\tdefer wg.Done()\n\n\t\t\tpods, err := clientset.Pods(n).List(ctx, options)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tmatch := \"app.kubernetes.io/instance\"\n\t\t\t// Iterate through pods in namespace, looking for matching labels.\n\t\t\tfor _, pod := range pods.Items {\n\t\t\t\tkey := pod.Labels[match]\n\n\t\t\t\tif key == \"\" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tlabels[key] = match\n\t\t\t}\n\t\t}(n)\n\t}\n\n\twg.Wait()\n\n\treturn labels, nil\n}", "title": "" }, { "docid": "df380a69f54363e720e773978d9647cb", "score": "0.48771814", "text": "func generateKubeAPIServerConfig(nodeID string) error {\n\tconst (\n\t\t// Environment variables\n\t\tinsecurePortVar = \"INSECURE_PORT\"\n\t\tetcdServersVar = \"ETCD_SERVERS\"\n\t\tetcdCAFileVar = \"ETCD_CAFILE\"\n\t\tetcdCertFileVar = \"ETCD_CERTFILE\"\n\t\tetcdKeyFileVar = \"ETCD_KEYFILE\"\n\t\tkubeletCAFileVar = \"KUBELET_CAFILE\"\n\t\tkubeletCertFileVar = \"KUBELET_CERTFILE\"\n\t\tkubeletKeyFileVar = \"KUBELET_KEYFILE\"\n\t\tserviceClusterVar = \"SERVICE_CLUSTER\"\n\t\tserviceAccountKeyFileVar = \"SERVICE_ACCOUNT_KEYFILE\"\n\t\truntimeConfigVar = \"RUNTIME_CONFIG\"\n\t\tallowPrivilegedVar = \"ALLOW_PRIVILEGED\"\n\t\tadmissionControlVar = \"ENABLE_ADMISSION_CONTROLLERS\"\n\t\tadvertiseAddrVar = \"ADVERTISE_ADDR_VAR\"\n\n\t\t// Parameters\n\t\tinsecurePortParam = \"--insecure-port\"\n\t\tetcdServersParam = \"--etcd-servers\"\n\t\tetcdCAFileParam = \"--etcd-cafile\" // used to validate the cert presented by etcd\n\t\tetcdCertFileParam = \"--etcd-certfile\" // used to authenticate to etcd\n\t\tetcdKeyFileParam = \"--etcd-keyfile\" // used to authenticate to etcd\n\t\tkubeletCAFileParam = \"--kubelet-certificate-authority\" // used to validate cert presented by kubelet\n\t\tkubeletCertFileParam = \"--kubelet-client-certificate\" // used to authenticate to kubelet (as a client)\n\t\tkubeletKeyFileParam = \"--kubelet-client-key\" // used to authenticate to kubelet (as a client)\n\t\tserviceAccountKeyFileParam = \"--service-account-key-file\" // used to sign service account tokens\n\t\tserviceClusterParam = \"--service-cluster-ip-range\"\n\t\truntimeConfigParam = \"--runtime-config\"\n\t\tallowPrivilegedParam = \"--allow-privileged\"\n\t\tadmissionControlParam = \"--admission-control\"\n\t\tadvertiseAddrParam = \"--advertise-address\" // ApiServer address as advertised to other cluster members\n\t)\n\n\tadmissionControllers := []string{\n\t\t\"NamespaceLifecycle\",\n\t\t\"NodeRestriction\",\n\t\t\"LimitRanger\",\n\t\t\"ServiceAccount\",\n\t\t\"DefaultStorageClass\",\n\t\t\"ResourceQuota\",\n\t}\n\n\tetcdCertFile, etcdKeyFile, etcdCACertFile := etcd.GetEtcdClientCredentialsPaths()\n\t// ApiServer gets a single set of credentials that it uses to:\n\t// - authenticate itself as a client when opening a connection to Kubelets\n\t// - authenticate itself as a server when it receives incoming connections from controllers or kubelets\n\tapiServerCertFile, apiServerKeyFile, apiServerCACertFile := certs.GetTLSCredentialsPaths(globals.KubernetesAPIServerPKIDir)\n\t// Key to validate service accounts\n\t_, serviceAccountKeyFile, _ := certs.GetTLSCredentialsPaths(globals.KubernetesServiceAccountsPKIDir)\n\n\tcfgMap := make(map[string]string)\n\tcfgMap[anonymousAuthVar] = fmt.Sprintf(\"%s=%s\", anonymousAuthParam, \"false\")\n\tcfgMap[authzModeVar] = fmt.Sprintf(\"%s=%s\", authzModeParam, \"Node,RBAC\")\n\tcfgMap[insecurePortVar] = fmt.Sprintf(\"%s %s\", insecurePortParam, \"0\") // disable unauthenticated access\n\tcfgMap[etcdServersVar] = fmt.Sprintf(\"%s %s\", etcdServersParam, strings.Join(env.KVServers, \",\"))\n\tcfgMap[etcdCAFileVar] = fmt.Sprintf(\"%s %s\", etcdCAFileParam, etcdCACertFile)\n\tcfgMap[etcdKeyFileVar] = fmt.Sprintf(\"%s %s\", etcdKeyFileParam, etcdKeyFile)\n\tcfgMap[etcdCertFileVar] = fmt.Sprintf(\"%s %s\", etcdCertFileParam, etcdCertFile)\n\tcfgMap[kubeletCAFileVar] = fmt.Sprintf(\"%s %s\", kubeletCAFileParam, apiServerCACertFile)\n\tcfgMap[kubeletKeyFileVar] = fmt.Sprintf(\"%s %s\", kubeletKeyFileParam, apiServerKeyFile)\n\tcfgMap[kubeletCertFileVar] = fmt.Sprintf(\"%s %s\", kubeletCertFileParam, apiServerCertFile)\n\tcfgMap[tlsKeyFileVar] = fmt.Sprintf(\"%s %s\", tlsKeyFileParam, apiServerKeyFile)\n\tcfgMap[tlsCertFileVar] = fmt.Sprintf(\"%s %s\", tlsCertFileParam, apiServerCertFile)\n\tcfgMap[clientCAFileVar] = fmt.Sprintf(\"%s %s\", clientCAFileParam, apiServerCACertFile)\n\tcfgMap[minTLSVersionVar] = fmt.Sprintf(\"%s %s\", minTLSVersionParam, minTLSVersionVal)\n\tcfgMap[serviceAccountKeyFileVar] = fmt.Sprintf(\"%s %s\", serviceAccountKeyFileParam, serviceAccountKeyFile)\n\tcfgMap[serviceClusterVar] = fmt.Sprintf(\"%s %s\", serviceClusterParam, serviceClusterIPRange)\n\tcfgMap[runtimeConfigVar] = fmt.Sprintf(\"%s %s\", runtimeConfigParam, enableDaemonSet)\n\tcfgMap[allowPrivilegedVar] = allowPrivilegedParam\n\tcfgMap[admissionControlVar] = fmt.Sprintf(\"%s %s\", admissionControlParam, strings.Join(admissionControllers, \",\"))\n\n\t// nodeID should be an IP address or resolve to a specific IP address (validated when cluster is formed).\n\t// That is what we want K8s ApiServer to advertise to rest of the cluster.\n\t// If we cannot resolve, we just leave advertise-addr empty and let K8s ApiServer pick a default interface, if it exists.\n\tnodeIP := net.ParseIP(nodeID)\n\tif nodeIP != nil && nodeIP.IsGlobalUnicast() {\n\t\tcfgMap[advertiseAddrVar] = fmt.Sprintf(\"%s %s\", advertiseAddrParam, nodeID)\n\t} else { // try to resolve\n\t\taddrs, err := net.LookupHost(nodeID)\n\t\tif err == nil {\n\t\t\tcfgMap[advertiseAddrVar] = fmt.Sprintf(\"%s %s\", advertiseAddrParam, addrs[0])\n\t\t} else {\n\t\t\tlog.Errorf(\"Unable to get an IP for nodeID %s, not setting param %s for K8s ApiServer\", nodeID, advertiseAddrParam)\n\t\t}\n\t}\n\treturn systemd.WriteCfgMapToFile(cfgMap, path.Join(globals.KubernetesAPIServerConfigDir, kubeAPIServerCfgFile))\n}", "title": "" }, { "docid": "92fb1613ace074d17a8a93e073334ee9", "score": "0.48524097", "text": "func DeclarativeKubernetes(client operatorv1pb.OperatorClient, log logger.Logger) []Subscription {\n\tvar subs []Subscription\n\tresp, err := client.ListSubscriptions(context.TODO(), &emptypb.Empty{})\n\tif err != nil {\n\t\tlog.Errorf(\"failed to list subscriptions from operator: %s\", err)\n\t\treturn subs\n\t}\n\n\tfor _, s := range resp.Subscriptions {\n\t\tsubs, err = appendSubscription(subs, s)\n\t\tif err != nil {\n\t\t\tlog.Warnf(\"failed to add subscription from operator: %s\", err)\n\t\t\tcontinue\n\t\t}\n\t}\n\treturn subs\n}", "title": "" }, { "docid": "dcd027b1795b2f903a78bb9d51d1afba", "score": "0.48370957", "text": "func kubectlApplyString(group string) string {\n\treturn fmt.Sprintf(`cat <<EOF | kubectl apply -f -\n%s\n---\n%s\nEOF`, clusterRoleTemplate, strings.ReplaceAll(clusterRoleBindingTemplate, \"group_name\", group))\n}", "title": "" }, { "docid": "388e31e7edc8d5af62d4874f002397cb", "score": "0.4826932", "text": "func BuildNodeLabels(cluster *api.Cluster, instanceGroup *api.InstanceGroup) (map[string]string, error) {\n\tisControlPlane := false\n\tisAPIServer := false\n\tisNode := false\n\tswitch instanceGroup.Spec.Role {\n\tcase api.InstanceGroupRoleControlPlane:\n\t\tisControlPlane = true\n\tcase api.InstanceGroupRoleAPIServer:\n\t\tisAPIServer = true\n\tcase api.InstanceGroupRoleNode:\n\t\tisNode = true\n\tcase api.InstanceGroupRoleBastion:\n\t\t// no labels to add\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unhandled instanceGroup role %q\", instanceGroup.Spec.Role)\n\t}\n\n\t// Merge KubeletConfig for NodeLabels\n\tc := &api.KubeletConfigSpec{}\n\tif isControlPlane {\n\t\treflectutils.JSONMergeStruct(c, cluster.Spec.ControlPlaneKubelet)\n\t} else {\n\t\treflectutils.JSONMergeStruct(c, cluster.Spec.Kubelet)\n\t}\n\n\tif instanceGroup.Spec.Kubelet != nil {\n\t\treflectutils.JSONMergeStruct(c, instanceGroup.Spec.Kubelet)\n\t}\n\n\tnodeLabels := c.NodeLabels\n\n\tif isAPIServer || isControlPlane {\n\t\tif nodeLabels == nil {\n\t\t\tnodeLabels = make(map[string]string)\n\t\t}\n\t\t// Note: featureflag is not available here - we're in kops-controller.\n\t\t// We keep the featureflag as a placeholder to change the logic;\n\t\t// when we drop the featureflag we should just always include the label, even for\n\t\t// full control-plane nodes.\n\t\tif isAPIServer || featureflag.APIServerNodes.Enabled() {\n\t\t\tnodeLabels[RoleLabelAPIServer16] = \"\"\n\t\t}\n\t\tif cluster.IsKubernetesLT(\"1.24\") {\n\t\t\tnodeLabels[RoleLabelName15] = RoleAPIServerLabelValue15\n\t\t}\n\t}\n\n\tif isNode {\n\t\tif nodeLabels == nil {\n\t\t\tnodeLabels = make(map[string]string)\n\t\t}\n\t\tnodeLabels[RoleLabelNode16] = \"\"\n\t\tif cluster.IsKubernetesLT(\"1.24\") {\n\t\t\tnodeLabels[RoleLabelName15] = RoleNodeLabelValue15\n\t\t}\n\t}\n\n\tif isControlPlane {\n\t\tif nodeLabels == nil {\n\t\t\tnodeLabels = make(map[string]string)\n\t\t}\n\t\tfor label, value := range BuildMandatoryControlPlaneLabels() {\n\t\t\tnodeLabels[label] = value\n\t\t}\n\t\tif cluster.IsKubernetesLT(\"1.24\") {\n\t\t\tnodeLabels[RoleLabelMaster16] = \"\"\n\t\t\tnodeLabels[RoleLabelName15] = RoleMasterLabelValue15\n\t\t}\n\t}\n\n\tfor k, v := range instanceGroup.Spec.NodeLabels {\n\t\tif nodeLabels == nil {\n\t\t\tnodeLabels = make(map[string]string)\n\t\t}\n\t\tnodeLabels[k] = v\n\t}\n\n\tif instanceGroup.Spec.Manager == api.InstanceManagerKarpenter {\n\t\tnodeLabels[\"karpenter.sh/provisioner-name\"] = instanceGroup.ObjectMeta.Name\n\t}\n\n\treturn nodeLabels, nil\n}", "title": "" }, { "docid": "61a395e716e49d2d4dd9dc7f6572a1ea", "score": "0.48214072", "text": "func (this *KubernetesService) printResponse(resp string) {\n\t// todo\n\t// for i := 0; i < 60; i++ {\n\t// \tresp, err = client.Get(&param)\n\t// \tif err != nil {\n\t// \t\tbeego.Debug(err, resp)\n\t// \t}\n\n\t// \t//beego.Debug(label)\n\t// \terr = json.Unmarshal(([]byte)(resp), &response)\n\t// \tfor j := 0; j < len(response.Items); j++ {\n\t// \t\tpodName = response.Items[j].MetaData.Name\n\t// \t\tstatus = response.Items[j].Status.Phase\n\t// \t\t//beego.Debug(podName)\n\t// \t\t//beego.Debug(status)\n\n\t// \t\tif strings.Contains(podName, label) {\n\t// \t\t\tbeego.Debug(\"=============================================\")\n\t// \t\t\tbeego.Debug(\"podName:\", podName)\n\t// \t\t\tbeego.Debug(\"label:\", label)\n\t// \t\t\t// phase\n\t// \t\t\tbeego.Debug(podName, \"phase\", status)\n\t// \t\t\tbeego.Debug(podName, \"hostIP\", response.Items[j].Status.HostIP)\n\t// \t\t\tbeego.Debug(podName, \"podIP\", response.Items[j].Status.PodIP)\n\t// \t\t\tbeego.Debug(podName, \"startTime\", response.Items[j].Status.StartTime)\n\t// \t\t\t// conditions\n\t// \t\t\tconditionCnt := len(response.Items[j].Status.Conditions)\n\t// \t\t\tfor k := 0; k < conditionCnt; k++ {\n\t// \t\t\t\t//response.Items[j].Status.Conditions[j]\n\t// \t\t\t\tbeego.Debug(podName, \"condition:Type\", response.Items[j].Status.Conditions[k].Type)\n\t// \t\t\t\tbeego.Debug(podName, \"condition:Status\", response.Items[j].Status.Conditions[k].Status)\n\t// \t\t\t\tif response.Items[j].Status.Conditions[k].LastProbeTime != nil {\n\t// \t\t\t\t\tbeego.Debug(podName, \"condition:LastProbeTime\", *response.Items[j].Status.Conditions[k].LastProbeTime)\n\t// \t\t\t\t}\n\t// \t\t\t\tbeego.Debug(podName, \"condition:LastTransitionTime\", response.Items[j].Status.Conditions[k].LastTransitionTime)\n\t// \t\t\t\tbeego.Debug(podName, \"condition:Reason\", response.Items[j].Status.Conditions[k].Reason)\n\t// \t\t\t\tbeego.Debug(podName, \"condition:Message\", response.Items[j].Status.Conditions[k].Message)\n\t// \t\t\t}\n\t// \t\t\t// container\n\t// \t\t\tcontainerCnt := len(response.Items[j].Status.ContainerStatuses)\n\t// \t\t\tfor k := 0; k < containerCnt; k++ {\n\t// \t\t\t\tbeego.Debug(podName, \"container:name\", response.Items[j].Status.ContainerStatuses[k].Name)\n\t// \t\t\t\tif response.Items[j].Status.ContainerStatuses[k].State.Running != nil {\n\t// \t\t\t\t\tbeego.Debug(podName, \"container:Running:StartedAt\", response.Items[j].Status.ContainerStatuses[k].State.Running.StartedAt)\n\t// \t\t\t\t}\n\t// \t\t\t\tif response.Items[j].Status.ContainerStatuses[k].State.Waiting != nil {\n\t// \t\t\t\t\tbeego.Debug(podName, \"container:Waiting:Reason\", response.Items[j].Status.ContainerStatuses[k].State.Waiting.Reason)\n\t// \t\t\t\t\tbeego.Debug(podName, \"container:Waiting:Message\", response.Items[j].Status.ContainerStatuses[k].State.Waiting.Message)\n\t// \t\t\t\t}\n\t// \t\t\t\tbeego.Debug(podName, \"container:ready\", response.Items[j].Status.ContainerStatuses[k].Ready)\n\t// \t\t\t\tbeego.Debug(podName, \"container:RestartCount\", response.Items[j].Status.ContainerStatuses[k].RestartCount)\n\t// \t\t\t\tbeego.Debug(podName, \"container:Image\", response.Items[j].Status.ContainerStatuses[k].Image)\n\t// \t\t\t\tbeego.Debug(podName, \"container:ImageID\", response.Items[j].Status.ContainerStatuses[k].ImageID)\n\t// \t\t\t\tbeego.Debug(podName, \"container:ContainerID\", response.Items[j].Status.ContainerStatuses[k].ContainerID)\n\n\t// \t\t\t}\n\n\t// \t\t\tif utility.StringEqual(status, \"Running\") {\n\t// \t\t\t\tbeego.Debug(podName, \"is\", status)\n\t// \t\t\t\treturn nil\n\t// \t\t\t}\n\t// \t\t}\n\t// \t}\n\n\t// \ttime.Sleep(time.Second * 2)\n\t// }\n}", "title": "" }, { "docid": "44be5273810b746205aa514d95e357b4", "score": "0.48056844", "text": "func addKubeCommands(topLevel *cobra.Command) {\n\ttopLevel.AddCommand(&cobra.Command{\n\t\tUse: \"delete\",\n\t\tShort: `See \"kubectl help delete\" for detailed usage.`,\n\t\tRun: passthru(\"kubectl\"),\n\t\t// We ignore unknown flags to avoid importing everything Go exposes\n\t\t// from our commands.\n\t\tFParseErrWhitelist: cobra.FParseErrWhitelist{\n\t\t\tUnknownFlags: true,\n\t\t},\n\t})\n\n\tkoApplyFlags := []string{}\n\tlo := &LocalOptions{}\n\tbo := &BinaryOptions{}\n\tno := &NameOptions{}\n\tfo := &FilenameOptions{}\n\tta := &TagsOptions{}\n\tapply := &cobra.Command{\n\t\tUse: \"apply -f FILENAME\",\n\t\tShort: \"Apply the input files with image references resolved to built/pushed image digests.\",\n\t\tLong: `This sub-command finds import path references within the provided files, builds them into Go binaries, containerizes them, publishes them, and then feeds the resulting yaml into \"kubectl apply\".`,\n\t\tExample: `\n # Build and publish import path references to a Docker\n # Registry as:\n # ${KO_DOCKER_REPO}/<package name>-<hash of import path>\n # Then, feed the resulting yaml into \"kubectl apply\".\n # When KO_DOCKER_REPO is ko.local, it is the same as if\n # --local was passed.\n ko apply -f config/\n\n # Build and publish import path references to a Docker\n # Registry preserving import path names as:\n # ${KO_DOCKER_REPO}/<import path>\n # Then, feed the resulting yaml into \"kubectl apply\".\n ko apply --preserve-import-paths -f config/\n\n # Build and publish import path references to a Docker\n # daemon as:\n # ko.local/<import path>\n # Then, feed the resulting yaml into \"kubectl apply\".\n ko apply --local -f config/\n\n # Apply from stdin:\n cat config.yaml | ko apply -f -`,\n\t\tArgs: cobra.NoArgs,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\t// Create a set of ko-specific flags to ignore when passing through\n\t\t\t// kubectl global flags.\n\t\t\tignoreSet := make(map[string]struct{})\n\t\t\tfor _, s := range koApplyFlags {\n\t\t\t\tignoreSet[s] = struct{}{}\n\t\t\t}\n\n\t\t\t// Filter out ko flags from what we will pass through to kubectl.\n\t\t\tkubectlFlags := []string{}\n\t\t\tcmd.Flags().Visit(func(flag *pflag.Flag) {\n\t\t\t\tif _, ok := ignoreSet[flag.Name]; !ok {\n\t\t\t\t\tkubectlFlags = append(kubectlFlags, \"--\"+flag.Name, flag.Value.String())\n\t\t\t\t}\n\t\t\t})\n\n\t\t\t// Issue a \"kubectl apply\" command reading from stdin,\n\t\t\t// to which we will pipe the resolved files.\n\t\t\targv := []string{\"apply\", \"-f\", \"-\"}\n\t\t\targv = append(argv, kubectlFlags...)\n\t\t\tkubectlCmd := exec.Command(\"kubectl\", argv...)\n\n\t\t\t// Pass through our environment\n\t\t\tkubectlCmd.Env = os.Environ()\n\t\t\t// Pass through our std{out,err} and make our resolved buffer stdin.\n\t\t\tkubectlCmd.Stderr = os.Stderr\n\t\t\tkubectlCmd.Stdout = os.Stdout\n\n\t\t\t// Wire up kubectl stdin to resolveFilesToWriter.\n\t\t\tstdin, err := kubectlCmd.StdinPipe()\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatalf(\"error piping to 'kubectl apply': %v\", err)\n\t\t\t}\n\n\t\t\tgo func() {\n\t\t\t\t// kubectl buffers data before starting to apply it, which\n\t\t\t\t// can lead to resources being created more slowly than desired.\n\t\t\t\t// In the case of --watch, it can lead to resources not being\n\t\t\t\t// applied at all until enough iteration has occurred. To work\n\t\t\t\t// around this, we prime the stream with a bunch of empty objects\n\t\t\t\t// which kubectl will discard.\n\t\t\t\t// See https://github.com/google/go-containerregistry/pull/348\n\t\t\t\tfor i := 0; i < 1000; i++ {\n\t\t\t\t\tstdin.Write([]byte(\"---\\n\"))\n\t\t\t\t}\n\t\t\t\t// Once primed kick things off.\n\t\t\t\tresolveFilesToWriter(fo, no, lo, ta, stdin)\n\t\t\t}()\n\n\t\t\t// Run it.\n\t\t\tif err := kubectlCmd.Run(); err != nil {\n\t\t\t\tlog.Fatalf(\"error executing 'kubectl apply': %v\", err)\n\t\t\t}\n\t\t},\n\t}\n\taddLocalArg(apply, lo)\n\taddNamingArgs(apply, no)\n\taddFileArg(apply, fo)\n\taddTagsArg(apply, ta)\n\n\t// Collect the ko-specific apply flags before registering the kubectl global\n\t// flags so that we can ignore them when passing kubectl global flags through\n\t// to kubectl.\n\tapply.Flags().VisitAll(func(flag *pflag.Flag) {\n\t\tkoApplyFlags = append(koApplyFlags, flag.Name)\n\t})\n\n\t// Register the kubectl global flags.\n\tkubeConfigFlags := genericclioptions.NewConfigFlags()\n\tkubeConfigFlags.AddFlags(apply.Flags())\n\n\ttopLevel.AddCommand(apply)\n\n\tresolve := &cobra.Command{\n\t\tUse: \"resolve -f FILENAME\",\n\t\tShort: \"Print the input files with image references resolved to built/pushed image digests.\",\n\t\tLong: `This sub-command finds import path references within the provided files, builds them into Go binaries, containerizes them, publishes them, and prints the resulting yaml.`,\n\t\tExample: `\n # Build and publish import path references to a Docker\n # Registry as:\n # ${KO_DOCKER_REPO}/<package name>-<hash of import path>\n # When KO_DOCKER_REPO is ko.local, it is the same as if\n # --local and --preserve-import-paths were passed.\n ko resolve -f config/\n\n # Build and publish import path references to a Docker\n # Registry preserving import path names as:\n # ${KO_DOCKER_REPO}/<import path>\n # When KO_DOCKER_REPO is ko.local, it is the same as if\n # --local was passed.\n ko resolve --preserve-import-paths -f config/\n\n # Build and publish import path references to a Docker\n # daemon as:\n # ko.local/<import path>\n # This always preserves import paths.\n ko resolve --local -f config/`,\n\t\tArgs: cobra.NoArgs,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tresolveFilesToWriter(fo, no, lo, ta, os.Stdout)\n\t\t},\n\t}\n\taddLocalArg(resolve, lo)\n\taddNamingArgs(resolve, no)\n\taddFileArg(resolve, fo)\n\taddTagsArg(resolve, ta)\n\ttopLevel.AddCommand(resolve)\n\n\tpublish := &cobra.Command{\n\t\tUse: \"publish IMPORTPATH...\",\n\t\tShort: \"Build and publish container images from the given importpaths.\",\n\t\tLong: `This sub-command builds the provided import paths into Go binaries, containerizes them, and publishes them.`,\n\t\tExample: `\n # Build and publish import path references to a Docker\n # Registry as:\n # ${KO_DOCKER_REPO}/<package name>-<hash of import path>\n # When KO_DOCKER_REPO is ko.local, it is the same as if\n # --local and --preserve-import-paths were passed.\n ko publish github.com/foo/bar/cmd/baz github.com/foo/bar/cmd/blah\n\n # Build and publish a relative import path as:\n # ${KO_DOCKER_REPO}/<package name>-<hash of import path>\n # When KO_DOCKER_REPO is ko.local, it is the same as if\n # --local and --preserve-import-paths were passed.\n ko publish ./cmd/blah\n\n # Build and publish a relative import path as:\n # ${KO_DOCKER_REPO}/<import path>\n # When KO_DOCKER_REPO is ko.local, it is the same as if\n # --local was passed.\n ko publish --preserve-import-paths ./cmd/blah\n\n # Build and publish import path references to a Docker\n # daemon as:\n # ko.local/<import path>\n # This always preserves import paths.\n ko publish --local github.com/foo/bar/cmd/baz github.com/foo/bar/cmd/blah`,\n\t\tArgs: cobra.MinimumNArgs(1),\n\t\tRun: func(_ *cobra.Command, args []string) {\n\t\t\tpublishImages(args, no, lo, ta)\n\t\t},\n\t}\n\taddLocalArg(publish, lo)\n\taddNamingArgs(publish, no)\n\taddTagsArg(publish, ta)\n\ttopLevel.AddCommand(publish)\n\n\trun := &cobra.Command{\n\t\tUse: \"run NAME --image=IMPORTPATH\",\n\t\tShort: \"A variant of `kubectl run` that containerizes IMPORTPATH first.\",\n\t\tLong: `This sub-command combines \"ko publish\" and \"kubectl run\" to support containerizing and running Go binaries on Kubernetes in a single command.`,\n\t\tExample: `\n # Publish the --image and run it on Kubernetes as:\n # ${KO_DOCKER_REPO}/<package name>-<hash of import path>\n # When KO_DOCKER_REPO is ko.local, it is the same as if\n # --local and --preserve-import-paths were passed.\n ko run foo --image=github.com/foo/bar/cmd/baz\n\n # This supports relative import paths as well.\n ko run foo --image=./cmd/baz`,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\timgs := publishImages([]string{bo.Path}, no, lo, ta)\n\n\t\t\t// There's only one, but this is the simple way to access the\n\t\t\t// reference since the import path may have been qualified.\n\t\t\tfor k, v := range imgs {\n\t\t\t\tlog.Printf(\"Running %q\", k)\n\t\t\t\t// Issue a \"kubectl run\" command with our same arguments,\n\t\t\t\t// but supply a second --image to override the one we intercepted.\n\t\t\t\targv := append(os.Args[1:], \"--image\", v.String())\n\t\t\t\tkubectlCmd := exec.Command(\"kubectl\", argv...)\n\n\t\t\t\t// Pass through our environment\n\t\t\t\tkubectlCmd.Env = os.Environ()\n\t\t\t\t// Pass through our std*\n\t\t\t\tkubectlCmd.Stderr = os.Stderr\n\t\t\t\tkubectlCmd.Stdout = os.Stdout\n\t\t\t\tkubectlCmd.Stdin = os.Stdin\n\n\t\t\t\t// Run it.\n\t\t\t\tif err := kubectlCmd.Run(); err != nil {\n\t\t\t\t\tlog.Fatalf(\"error executing \\\"kubectl run\\\": %v\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t// We ignore unknown flags to avoid importing everything Go exposes\n\t\t// from our commands.\n\t\tFParseErrWhitelist: cobra.FParseErrWhitelist{\n\t\t\tUnknownFlags: true,\n\t\t},\n\t}\n\taddLocalArg(run, lo)\n\taddNamingArgs(run, no)\n\taddImageArg(run, bo)\n\taddTagsArg(run, ta)\n\n\ttopLevel.AddCommand(run)\n}", "title": "" }, { "docid": "9daba2738fa4a6f52ee6ac827136273e", "score": "0.48037782", "text": "func (l kubernetesClusterLabelMatcher) getKubeLabels(role types.Role, typ types.RoleConditionType) types.Labels {\n\tlabels := role.GetKubernetesLabels(typ)\n\n\t// After the introduction of https://github.com/gravitational/teleport/pull/9759 the\n\t// kubernetes_labels started to be respected. Former role behavior evaluated deny rules\n\t// even if the kubernetes_labels was empty. To preserve this behavior after respecting kubernetes label the label\n\t// logic needs to be aligned.\n\t// Default wildcard rules should be added to deny.kubernetes_labels if\n\t// deny.kubernetes_labels is empty to ensure that deny rule will be evaluated\n\t// even if kubernetes_labels are empty.\n\tif len(labels) == 0 && typ == types.Deny {\n\t\treturn map[string]apiutils.Strings{types.Wildcard: []string{types.Wildcard}}\n\t}\n\treturn labels\n}", "title": "" }, { "docid": "6a754bf042e038b2c02f539445c7db84", "score": "0.47946325", "text": "func kubeletHasRBAC(major, minor uint) func(c *status.Cluster, n *status.Node) bool {\n\treturn func(c *status.Cluster, n *status.Node) bool {\n\t\tfor i := 0; i < 5; i++ {\n\t\t\t// Try the new kubelet config naming scheme and fallback to the old one.\n\t\t\t//\n\t\t\t// TODO: remove handling of the old CM once kinder no longer\n\t\t\t// manages clusters with the legacy kubelet ConfigMap -\n\t\t\t// i.e. when 1.24 is out of support.\n\t\t\t// https://github.com/kubernetes/kubeadm/issues/1582\n\t\t\toutput1 := kubectlOutput(n,\n\t\t\t\t\"auth\",\n\t\t\t\t\"can-i\",\n\t\t\t\t\"get\",\n\t\t\t\t\"--kubeconfig=/etc/kubernetes/kubelet.conf\",\n\t\t\t\t\"--namespace=kube-system\",\n\t\t\t\t\"configmaps/kubelet-config\",\n\t\t\t)\n\t\t\tif output1 != \"yes\" {\n\t\t\t\toutput1 = kubectlOutput(n,\n\t\t\t\t\t\"auth\",\n\t\t\t\t\t\"can-i\",\n\t\t\t\t\t\"get\",\n\t\t\t\t\t\"--kubeconfig=/etc/kubernetes/kubelet.conf\",\n\t\t\t\t\t\"--namespace=kube-system\",\n\t\t\t\t\tfmt.Sprintf(\"configmaps/kubelet-config-%d.%d\", major, minor),\n\t\t\t\t)\n\t\t\t}\n\t\t\toutput2 := kubectlOutput(n,\n\t\t\t\t\"auth\",\n\t\t\t\t\"can-i\",\n\t\t\t\t\"get\",\n\t\t\t\t\"--kubeconfig=/etc/kubernetes/kubelet.conf\",\n\t\t\t\t\"--namespace=kube-system\",\n\t\t\t\t\"configmaps/kube-proxy\",\n\t\t\t)\n\t\t\tif output1 == \"yes\" && output2 == \"yes\" {\n\t\t\t\ttime.Sleep(1 * time.Second)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\treturn false\n\t\t}\n\n\t\tfmt.Println(\"kubelet has access to expected config maps\")\n\t\treturn true\n\t}\n}", "title": "" }, { "docid": "b8d29f0959d74f34f34a018a32da6204", "score": "0.47825298", "text": "func restart(deploymentsInterface appsv1.DeploymentInterface, statefulsetsInterface appsv1.StatefulSetInterface, daemonsetsInterface appsv1.DaemonSetInterface, secret string, client kubernetes.Interface) {\n\tlistOptions := metav1.ListOptions{}\n\tdeployments, _ := deploymentsInterface.List(context.TODO(), listOptions)\n\tstatefulsets, _ := statefulsetsInterface.List(context.TODO(), listOptions)\n\tdaemonsets, _ := daemonsetsInterface.List(context.TODO(), listOptions)\n\n\tupdate := time.Now().Format(\"2006-1-2.150417.000\")\n\tupdateOptions := metav1.UpdateOptions{}\nNEXT_DEPLOYMENT:\n\tfor _, adeployment := range deployments.Items {\n\t\tdeployment := adeployment\n\t\tfor _, volume := range deployment.Spec.Template.Spec.Volumes {\n\t\t\tif volume.Secret != nil && volume.Secret.SecretName != \"\" && volume.Secret.SecretName == secret && deployment.ObjectMeta.Annotations[noRestartAnnotation] != \"true\" {\n\t\t\t\tif deployment.ObjectMeta.Labels == nil {\n\t\t\t\t\tdeployment.ObjectMeta.Labels = make(map[string]string)\n\t\t\t\t}\n\t\t\t\tdeployment.ObjectMeta.Labels[restartLabel] = update\n\t\t\t\tif deployment.Spec.Template.ObjectMeta.Labels == nil {\n\t\t\t\t\tdeployment.Spec.Template.ObjectMeta.Labels = make(map[string]string)\n\t\t\t\t}\n\t\t\t\tdeployment.Spec.Template.ObjectMeta.Labels[restartLabel] = update\n\t\t\t\t_, err := deploymentsInterface.Update(context.TODO(), &deployment, updateOptions)\n\t\t\t\tif err != nil {\n\t\t\t\t\tklog.Errorf(\"Error updating deployment: %v\", err)\n\t\t\t\t}\n\t\t\t\tklog.Infof(\"%s Cert-Rotator Restarting Resource: Secret=%s, Deployment=%s\", update, secret, deployment.ObjectMeta.Name)\n\t\t\t\tcontinue NEXT_DEPLOYMENT\n\t\t\t}\n\t\t}\n\t}\nNEXT_STATEFULSET:\n\tfor _, astatefulset := range statefulsets.Items {\n\t\tstatefulset := astatefulset\n\t\tfor _, volume := range statefulset.Spec.Template.Spec.Volumes {\n\t\t\tif volume.Secret != nil && volume.Secret.SecretName != \"\" && volume.Secret.SecretName == secret && statefulset.ObjectMeta.Annotations[noRestartAnnotation] != \"true\" {\n\t\t\t\tif statefulset.ObjectMeta.Labels == nil {\n\t\t\t\t\tstatefulset.ObjectMeta.Labels = make(map[string]string)\n\t\t\t\t}\n\t\t\t\tstatefulset.ObjectMeta.Labels[restartLabel] = update\n\t\t\t\tif statefulset.Spec.Template.ObjectMeta.Labels == nil {\n\t\t\t\t\tstatefulset.Spec.Template.ObjectMeta.Labels = make(map[string]string)\n\t\t\t\t}\n\t\t\t\tstatefulset.Spec.Template.ObjectMeta.Labels[restartLabel] = update\n\t\t\t\t_, err := statefulsetsInterface.Update(context.TODO(), &statefulset, updateOptions)\n\t\t\t\tif err != nil {\n\t\t\t\t\tklog.Errorf(\"Error updating statefulset: %v\", err)\n\t\t\t\t}\n\t\t\t\tklog.Infof(\"%s Cert-Rotator Restarting Resource: Secret=%s, StatefulSet=%s\", update, secret, statefulset.ObjectMeta.Name)\n\t\t\t\tcontinue NEXT_STATEFULSET\n\t\t\t}\n\t\t}\n\t}\nNEXT_DAEMONSET:\n\tfor _, adaemonset := range daemonsets.Items {\n\t\tdaemonset := adaemonset\n\t\tfor _, volume := range daemonset.Spec.Template.Spec.Volumes {\n\t\t\tif volume.Secret != nil && volume.Secret.SecretName != \"\" && volume.Secret.SecretName == secret && daemonset.ObjectMeta.Annotations[noRestartAnnotation] != \"true\" {\n\t\t\t\tif daemonset.ObjectMeta.Labels == nil {\n\t\t\t\t\tdaemonset.ObjectMeta.Labels = make(map[string]string)\n\t\t\t\t}\n\t\t\t\tdaemonset.ObjectMeta.Labels[restartLabel] = update\n\t\t\t\tif daemonset.Spec.Template.ObjectMeta.Labels == nil {\n\t\t\t\t\tdaemonset.Spec.Template.ObjectMeta.Labels = make(map[string]string)\n\t\t\t\t}\n\t\t\t\tdaemonset.Spec.Template.ObjectMeta.Labels[restartLabel] = update\n\t\t\t\t_, err := daemonsetsInterface.Update(context.TODO(), &daemonset, updateOptions)\n\t\t\t\tif err != nil {\n\t\t\t\t\tklog.Errorf(\"Error updating daemonset: %v\", err)\n\t\t\t\t}\n\t\t\t\tklog.Infof(\"%s Cert-Rotator Restarting Resource: Secret=%s, DaemonSet=%s\", update, secret, daemonset.ObjectMeta.Name)\n\t\t\t\tcontinue NEXT_DAEMONSET\n\t\t\t}\n\t\t}\n\t}\n}", "title": "" }, { "docid": "0c1150c4d15f35d91c5e3b34fbee8329", "score": "0.47739503", "text": "func Kubernetes() *Command {\n\tcmd := &Command{\n\t\tCommand: &cobra.Command{\n\t\t\tUse: \"kubernetes\",\n\t\t\tAliases: []string{\"kube\", \"k8s\", \"k\"},\n\t\t\tShort: \"[beta] kubernetes commands\",\n\t\t\tLong: \"[beta] kubernetes is used to access Kubernetes commands\",\n\t\t\tHidden: !isBeta(),\n\t\t},\n\t}\n\n\tCmdBuilder(cmd, RunKubernetesGet, \"get <id|name>\", \"get a cluster\", Writer, aliasOpt(\"g\"))\n\n\tCmdBuilder(cmd, RunKubernetesGetKubeconfig, \"kubeconfig <id|name>\", \"get a cluster's kubeconfig file\", Writer, aliasOpt(\"cfg\"))\n\n\tCmdBuilder(cmd, RunKubernetesList, \"list\", \"get a list of your clusters\", Writer, aliasOpt(\"ls\"))\n\n\tcmdKubeClusterCreate := CmdBuilder(cmd, RunKubernetesCreate, \"create\", \"create a cluster\", Writer, aliasOpt(\"c\"))\n\tAddStringFlag(cmdKubeClusterCreate, doctl.ArgClusterName, \"\", \"\", \"cluster name\", requiredOpt())\n\tAddStringFlag(cmdKubeClusterCreate, doctl.ArgRegionSlug, \"\", \"\", \"cluster region location, example value: nyc1\", requiredOpt())\n\tAddStringFlag(cmdKubeClusterCreate, doctl.ArgClusterVersionSlug, \"\", \"\", \"cluster version\", requiredOpt())\n\tAddStringSliceFlag(cmdKubeClusterCreate, doctl.ArgTagNames, \"\", nil, \"cluster tags\")\n\tAddStringSliceFlag(cmdKubeClusterCreate, doctl.ArgClusterNodePools, \"\", nil, `cluster node pools in the form \"name=your-name;size=droplet_size;count=5;tag=tag1;tag=tag2\"`, requiredOpt())\n\n\tcmdKubeClusterUpdate := CmdBuilder(cmd, RunKubernetesUpdate, \"update <id|name>\", \"update a cluster's properties\", Writer, aliasOpt(\"u\"))\n\tAddStringFlag(cmdKubeClusterUpdate, doctl.ArgClusterName, \"\", \"\", \"cluster name\")\n\tAddStringSliceFlag(cmdKubeClusterUpdate, doctl.ArgTagNames, \"\", nil, \"cluster tags\")\n\n\tcmdKubeClusterDelete := CmdBuilder(cmd, RunKubernetesDelete, \"delete <id|name>\", \"delete a cluster\", Writer, aliasOpt(\"d\", \"rm\"))\n\tAddBoolFlag(cmdKubeClusterDelete, doctl.ArgForce, doctl.ArgShortForce, false, \"Force cluster delete\")\n\n\tcmd.AddCommand(kubernetesNodePools())\n\n\tcmd.AddCommand(kubernetesOptions())\n\n\treturn cmd\n}", "title": "" }, { "docid": "8909061d5c1014671381495484d9e527", "score": "0.47463498", "text": "func createRBAC(f *of.Framework) error {\n\t// Create ServiceAccount.\n\tsa, err := of.ConvertServiceAccount(\"resources/operator/service-account.yml\")\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"convert ServiceAccount from yml file failed\")\n\t}\n\t_, err = f.KubeClient.CoreV1().ServiceAccounts(TestNSNamespaced).Create(sa)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"create ServiceAccount failed\")\n\t}\n\n\t// Create Role\n\trole, err := of.ConvertRole(\"resources/operator/role.yml\")\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"convert Role from yml file failed\")\n\t}\n\tif _, err = f.KubeClient.RbacV1().Roles(TestNSNamespaced).Create(role); err != nil {\n\t\treturn errors.Wrap(err, \"create Role failed\")\n\t}\n\n\t// Create RoleBinding\n\trb, err := of.ConvertRoleBinding(\"resources/operator/role-binding.yml\")\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"convert RoleBinding from yml file failed\")\n\t}\n\tif _, err = f.KubeClient.RbacV1().RoleBindings(TestNSNamespaced).Create(rb); err != nil {\n\t\treturn errors.Wrap(err, \"create RoleBinding failed\")\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "1a5f68feabd3a7653450faaffc3ac58f", "score": "0.47372648", "text": "func (a *NamespaceClient) ListNamespaces(ctx context.Context, pred *store.SelectionPredicate) ([]*corev3.Namespace, error) {\n\tvar resources, namespaces []*corev3.Namespace\n\n\tvisitor, ok := a.auth.(ruleVisitor)\n\tif !ok {\n\t\tif err := a.client.List(ctx, &namespaces, pred); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn namespaces, nil\n\t}\n\tgstore := storev2.Of[*corev3.Namespace](a.client.Store)\n\tresources, err := gstore.List(ctx, storev2.ID{}, pred)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tnamespaceMap := make(map[string]*corev3.Namespace, len(resources))\n\tfor _, namespace := range resources {\n\t\tnamespaceMap[namespace.Metadata.Name] = namespace\n\t}\n\n\tattrs := &authorization.Attributes{\n\t\tAPIGroup: a.client.APIGroup,\n\t\tAPIVersion: a.client.APIVersion,\n\t\tResource: a.client.Kind.RBACName(),\n\t\tNamespace: corev2.ContextNamespace(ctx),\n\t\tVerb: \"list\",\n\t}\n\tif err := addAuthUser(ctx, attrs); err != nil {\n\t\treturn nil, err\n\t}\n\tlogger = logger.WithFields(logrus.Fields{\n\t\t\"zz_request\": map[string]string{\n\t\t\t\"apiGroup\": attrs.APIGroup,\n\t\t\t\"apiVersion\": attrs.APIVersion,\n\t\t\t\"namespace\": attrs.Namespace,\n\t\t\t\"resource\": attrs.Resource,\n\t\t\t\"resourceName\": attrs.ResourceName,\n\t\t\t\"username\": attrs.User.Username,\n\t\t\t\"verb\": attrs.Verb,\n\t\t},\n\t})\n\n\tvar funcErr error\n\n\tvisitor.VisitRulesFor(ctx, attrs, func(binding rbac.RoleBinding, rule corev2.Rule, err error) (cont bool) {\n\t\tif err != nil {\n\t\t\tfuncErr = err\n\t\t\treturn false\n\t\t}\n\t\tif len(namespaceMap) == 0 {\n\t\t\treturn false\n\t\t}\n\t\tif !rule.VerbMatches(\"get\") {\n\t\t\treturn true\n\t\t}\n\n\t\t// Explicit access to namespaces can only be granted via a\n\t\t// ClusterRoleBinding\n\t\tif rule.ResourceMatches(corev2.NamespacesResource) && binding.GetObjectMeta().Namespace == \"\" {\n\t\t\t// If this rule applies to namespaces, determine if all resources of type \"namespace\" are allowed\n\t\t\tif len(rule.ResourceNames) == 0 {\n\t\t\t\t// All resources of type \"namespace\" are allowed\n\t\t\t\tlogger.Debugf(\"all namespaces explicitly authorized by the binding %s\", binding.GetObjectMeta().Name)\n\t\t\t\tnamespaces = resources\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\t// If this rule applies to namespaces, and only certain namespaces are\n\t\t\t// specified, determine if it matches this current namespace\n\t\t\tfor name, namespace := range namespaceMap {\n\t\t\t\tif rule.ResourceNameMatches(name) {\n\t\t\t\t\tlogger.Debugf(\"namespace %s explicitly authorized by the binding %s\", namespace.Metadata.Name, binding.GetObjectMeta().Name)\n\t\t\t\t\tnamespaces = append(namespaces, namespace)\n\t\t\t\t\tdelete(namespaceMap, name)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn true\n\t\t}\n\n\t\t// Determine if this ClusterRoleBinding provides implicit access to\n\t\t// namespaced resources\n\t\tif binding.GetObjectMeta().Namespace == \"\" {\n\t\t\tfor _, resource := range rule.Resources {\n\t\t\t\tif stringsutil.InArray(resource, corev2.CommonCoreResources) {\n\t\t\t\t\t// All resources of type \"namespace\" are allowed\n\t\t\t\t\tlogger.Debugf(\"all namespaces implicitly authorized by the binding %s\", binding.GetObjectMeta().Name)\n\t\t\t\t\tnamespaces = resources\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Determine if this RoleBinding matches the namespace\n\t\tbindingNamespace := binding.GetObjectMeta().Namespace\n\t\tif namespace, ok := namespaceMap[bindingNamespace]; ok {\n\t\t\tlogger.Debugf(\"namespace %s implicitly authorized by the binding %s\", namespace.Metadata.Name, binding.GetObjectMeta().Name)\n\t\t\tnamespaces = append(namespaces, namespace)\n\t\t\tdelete(namespaceMap, bindingNamespace)\n\t\t}\n\n\t\treturn true\n\t})\n\n\tif funcErr != nil {\n\t\treturn nil, fmt.Errorf(\"error listing namespaces: %s\", funcErr)\n\t}\n\n\tif len(namespaces) == 0 {\n\t\tlogger.Debug(\"unauthorized request\")\n\t\treturn nil, authorization.ErrUnauthorized\n\t}\n\n\treturn namespaces, nil\n}", "title": "" }, { "docid": "473800c2096e60e343816abc6fafc9f4", "score": "0.4735619", "text": "func validateRoles(\n\tappCR *kdv1.KubeDirectorApp,\n\tpatches []appPatchSpec,\n\tvalErrors []string,\n) ([]appPatchSpec, []string) {\n\n\t// Any global defaults will be removed from the CR. Remember their values\n\t// though for use in populating the role definitions.\n\tvar globalImageRepoTag *string\n\tvar globalSetupPackageInfo *kdv1.SetupPackageInfo\n\tvar globalPersistDirs *[]string\n\tvar globalEventList *[]string\n\tvar globalMaxLogSizeDump *int32\n\n\tvar globalMaxLogSizeDumpDefault = shared.DefaultMaxLogSizeDump\n\tglobalMaxLogSizeDump = &globalMaxLogSizeDumpDefault\n\n\tif appCR.Spec.DefaultImageRepoTag == nil {\n\t\tglobalImageRepoTag = nil\n\t} else {\n\t\ttagCopy := *appCR.Spec.DefaultImageRepoTag\n\t\tglobalImageRepoTag = &tagCopy\n\t\tappCR.Spec.DefaultImageRepoTag = nil\n\t\tpatches = append(\n\t\t\tpatches,\n\t\t\tappPatchSpec{\n\t\t\t\tOp: \"remove\",\n\t\t\t\tPath: \"/spec/defaultImageRepoTag\",\n\t\t\t},\n\t\t)\n\t}\n\tif !appCR.Spec.DefaultSetupPackage.IsSet {\n\t\tglobalSetupPackageInfo = nil\n\t} else {\n\t\tif appCR.Spec.DefaultSetupPackage.IsNull {\n\t\t\tglobalSetupPackageInfo = nil\n\t\t} else {\n\t\t\tpackageInfoCopy := appCR.Spec.DefaultSetupPackage.Info\n\t\t\tglobalSetupPackageInfo = &packageInfoCopy\n\t\t}\n\t\tappCR.Spec.DefaultSetupPackage = kdv1.SetupPackage{}\n\t\tpatches = append(\n\t\t\tpatches,\n\t\t\tappPatchSpec{\n\t\t\t\tOp: \"remove\",\n\t\t\t\tPath: \"/spec/defaultConfigPackage\",\n\t\t\t},\n\t\t)\n\t}\n\tif appCR.Spec.DefaultPersistDirs == nil {\n\t\tglobalPersistDirs = nil\n\t} else {\n\t\tdirsCopy := make([]string, len(*appCR.Spec.DefaultPersistDirs))\n\t\tcopy(dirsCopy, *appCR.Spec.DefaultPersistDirs)\n\t\tglobalPersistDirs = &dirsCopy\n\t\tappCR.Spec.DefaultPersistDirs = nil\n\t\tpatches = append(\n\t\t\tpatches,\n\t\t\tappPatchSpec{\n\t\t\t\tOp: \"remove\",\n\t\t\t\tPath: \"/spec/defaultPersistDirs\",\n\t\t\t},\n\t\t)\n\t}\n\tif appCR.Spec.DefaultEventList == nil {\n\t\tglobalEventList = nil\n\t} else {\n\t\teventsCopy := make([]string, len(*appCR.Spec.DefaultEventList))\n\t\tcopy(eventsCopy, *appCR.Spec.DefaultEventList)\n\t\tglobalEventList = &eventsCopy\n\t\tappCR.Spec.DefaultEventList = nil\n\t\tpatches = append(\n\t\t\tpatches,\n\t\t\tappPatchSpec{\n\t\t\t\tOp: \"remove\",\n\t\t\t\tPath: \"/spec/defaultEventList\",\n\t\t\t},\n\t\t)\n\t}\n\tif appCR.Spec.DefaultMaxLogSizeDump != nil {\n\t\tglobalMaxLogSizeDump = appCR.Spec.DefaultMaxLogSizeDump\n\t\tpatches = append(\n\t\t\tpatches,\n\t\t\tappPatchSpec{\n\t\t\t\tOp: \"remove\",\n\t\t\t\tPath: \"/spec/defaultMaxLogSizeDump\",\n\t\t\t},\n\t\t)\n\t}\n\n\t// OK let's do the roles.\n\tnumRoles := len(appCR.Spec.NodeRoles)\n\tfor index := 0; index < numRoles; index++ {\n\t\trole := &(appCR.Spec.NodeRoles[index])\n\t\tif role.SetupPackage.IsSet == false {\n\t\t\t// Nothing specified so, inherit the global specification\n\t\t\tif globalSetupPackageInfo == nil {\n\t\t\t\trole.SetupPackage.IsSet = true\n\t\t\t\trole.SetupPackage.IsNull = true\n\t\t\t\tpatches = append(\n\t\t\t\t\tpatches,\n\t\t\t\t\tappPatchSpec{\n\t\t\t\t\t\tOp: \"add\",\n\t\t\t\t\t\tPath: \"/spec/roles/\" + strconv.Itoa(index) + \"/configPackage\",\n\t\t\t\t\t\tValue: appPatchValue{\n\t\t\t\t\t\t\tstringValue: nil,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t)\n\t\t\t} else {\n\t\t\t\trole.SetupPackage.IsSet = true\n\t\t\t\trole.SetupPackage.IsNull = false\n\t\t\t\trole.SetupPackage.Info = *globalSetupPackageInfo\n\t\t\t\tpatches = append(\n\t\t\t\t\tpatches,\n\t\t\t\t\tappPatchSpec{\n\t\t\t\t\t\tOp: \"add\",\n\t\t\t\t\t\tPath: \"/spec/roles/\" + strconv.Itoa(index) + \"/configPackage\",\n\t\t\t\t\t\tValue: appPatchValue{\n\t\t\t\t\t\t\tpackageInfoValue: globalSetupPackageInfo,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\t\tif role.MinStorage != nil {\n\t\t\t_, minErr := resource.ParseQuantity(role.MinStorage.Size)\n\t\t\tif minErr != nil {\n\t\t\t\tvalErrors = append(\n\t\t\t\t\tvalErrors,\n\t\t\t\t\tfmt.Sprintf(\n\t\t\t\t\t\tinvalidMinStorageDef,\n\t\t\t\t\t\trole.ID,\n\t\t\t\t\t),\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\t\tif role.ContainerSpec != nil {\n\t\t\tif role.ContainerSpec.Tty {\n\t\t\t\tif !role.ContainerSpec.Stdin {\n\t\t\t\t\tvalErrors = append(\n\t\t\t\t\t\tvalErrors,\n\t\t\t\t\t\tfmt.Sprintf(\n\t\t\t\t\t\t\tttyWithoutStdin,\n\t\t\t\t\t\t\trole.ID,\n\t\t\t\t\t\t),\n\t\t\t\t\t)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif role.ImageRepoTag == nil {\n\t\t\t// We allow roles to have different container images but unlike the\n\t\t\t// setup package there cannot be a role with no image.\n\t\t\tif globalImageRepoTag == nil {\n\t\t\t\tvalErrors = append(\n\t\t\t\t\tvalErrors,\n\t\t\t\t\tfmt.Sprintf(\n\t\t\t\t\t\tnoDefaultImage,\n\t\t\t\t\t\trole.ID,\n\t\t\t\t\t),\n\t\t\t\t)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// No special image specified so inherit from global.\n\t\t\trole.ImageRepoTag = globalImageRepoTag\n\t\t\tpatches = append(\n\t\t\t\tpatches,\n\t\t\t\tappPatchSpec{\n\t\t\t\t\tOp: \"add\",\n\t\t\t\t\tPath: \"/spec/roles/\" + strconv.Itoa(index) + \"/imageRepoTag\",\n\t\t\t\t\tValue: appPatchValue{\n\t\t\t\t\t\tstringValue: globalImageRepoTag,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t)\n\t\t}\n\t\tif role.PersistDirs == nil {\n\t\t\tif globalPersistDirs != nil {\n\t\t\t\trole.PersistDirs = globalPersistDirs\n\t\t\t\tpatches = append(\n\t\t\t\t\tpatches,\n\t\t\t\t\tappPatchSpec{\n\t\t\t\t\t\tOp: \"add\",\n\t\t\t\t\t\tPath: \"/spec/roles/\" + strconv.Itoa(index) + \"/persistDirs\",\n\t\t\t\t\t\tValue: appPatchValue{\n\t\t\t\t\t\t\tstringSliceValue: globalPersistDirs,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\t\tif role.EventList == nil {\n\t\t\tif globalEventList != nil {\n\t\t\t\trole.EventList = globalEventList\n\t\t\t\tpatches = append(\n\t\t\t\t\tpatches,\n\t\t\t\t\tappPatchSpec{\n\t\t\t\t\t\tOp: \"add\",\n\t\t\t\t\t\tPath: \"/spec/roles/\" + strconv.Itoa(index) + \"/eventList\",\n\t\t\t\t\t\tValue: appPatchValue{\n\t\t\t\t\t\t\tstringSliceValue: globalEventList,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\t\tif role.MaxLogSizeDump == nil {\n\t\t\trole.MaxLogSizeDump = globalMaxLogSizeDump\n\t\t\tpatches = append(\n\t\t\t\tpatches,\n\t\t\t\tappPatchSpec{\n\t\t\t\t\tOp: \"add\",\n\t\t\t\t\tPath: \"/spec/roles/\" + strconv.Itoa(index) + \"/maxLogSizeDump\",\n\t\t\t\t\tValue: appPatchValue{\n\t\t\t\t\t\tintValue: globalMaxLogSizeDump,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t)\n\t\t}\n\t}\n\n\treturn patches, valErrors\n}", "title": "" }, { "docid": "ee3a8d9a03c29e4119f14ac9a9b39277", "score": "0.4728909", "text": "func (t *ProtokubeBuilder) ProtokubeFlags(k8sVersion semver.Version) *ProtokubeFlags {\n\tf := &ProtokubeFlags{}\n\n\tmaster := t.IsMaster\n\n\tf.Master = fi.Bool(master)\n\tif master {\n\t\tf.Channels = t.NodeupConfig.Channels\n\t}\n\n\tif k8sVersion.Major == 1 && k8sVersion.Minor >= 6 {\n\t\tif master {\n\t\t\tf.InitializeRBAC = fi.Bool(true)\n\t\t}\n\t}\n\n\tf.LogLevel = fi.Int32(4)\n\tf.Containerized = fi.Bool(true)\n\n\tzone := t.Cluster.Spec.DNSZone\n\tif zone != \"\" {\n\t\tif strings.Contains(zone, \".\") {\n\t\t\t// match by name\n\t\t\tf.Zone = append(f.Zone, zone)\n\t\t} else {\n\t\t\t// match by id\n\t\t\tf.Zone = append(f.Zone, \"*/\"+zone)\n\t\t}\n\t} else {\n\t\tglog.Warningf(\"DNSZone not specified; protokube won't be able to update DNS\")\n\t\t// TODO: Should we permit wildcard updates if zone is not specified?\n\t\t//argv = append(argv, \"--zone=*/*\")\n\t}\n\n\tif dns.IsGossipHostname(t.Cluster.Spec.MasterInternalName) {\n\t\tglog.Warningf(\"MasterInternalName %q implies gossip DNS\", t.Cluster.Spec.MasterInternalName)\n\t\tf.DNSProvider = fi.String(\"gossip\")\n\n\t\t/// TODO: This is hacky, but we want it so that we can have a different internal & external name\n\t\tinternalSuffix := t.Cluster.Spec.MasterInternalName\n\t\tinternalSuffix = strings.TrimPrefix(internalSuffix, \"api.\")\n\t\tf.DNSInternalSuffix = fi.String(internalSuffix)\n\t}\n\n\tif t.Cluster.Spec.CloudProvider != \"\" {\n\t\tf.Cloud = fi.String(t.Cluster.Spec.CloudProvider)\n\n\t\tif f.DNSProvider == nil {\n\t\t\tswitch kops.CloudProviderID(t.Cluster.Spec.CloudProvider) {\n\t\t\tcase kops.CloudProviderAWS:\n\t\t\t\tf.DNSProvider = fi.String(\"aws-route53\")\n\t\t\tcase kops.CloudProviderGCE:\n\t\t\t\tf.DNSProvider = fi.String(\"google-clouddns\")\n\t\t\tcase kops.CloudProviderVSphere:\n\t\t\t\tf.DNSProvider = fi.String(\"coredns\")\n\t\t\t\tf.ClusterId = fi.String(t.Cluster.ObjectMeta.Name)\n\t\t\t\tf.DNSServer = fi.String(*t.Cluster.Spec.CloudConfig.VSphereCoreDNSServer)\n\t\t\tdefault:\n\t\t\t\tglog.Warningf(\"Unknown cloudprovider %q; won't set DNS provider\", t.Cluster.Spec.CloudProvider)\n\t\t\t}\n\t\t}\n\t}\n\n\tif f.DNSInternalSuffix == nil {\n\t\tf.DNSInternalSuffix = fi.String(\".internal.\" + t.Cluster.ObjectMeta.Name)\n\t}\n\n\tif k8sVersion.Major == 1 && k8sVersion.Minor <= 5 {\n\t\tf.ApplyTaints = fi.Bool(true)\n\t}\n\n\treturn f\n}", "title": "" }, { "docid": "e99c47d35fbfcfe361251a8548a4bfdc", "score": "0.4712326", "text": "func (*ApplicationControlGroups) GetPath() string { return \"/api/objects/application_control/group/\" }", "title": "" }, { "docid": "383fc817f827e7f41f4fbb0e4c65788c", "score": "0.47108442", "text": "func (c *Controller) newSubmarineServerRBAC(serviceaccount_namespace string) error {\n\tklog.Info(\"[newSubmarineServerRBAC]\")\n\tserverName := \"submarine-server\"\n\t// Step1: Create ClusterRole\n\tclusterrole, clusterrole_err := c.clusterroleLister.Get(serverName)\n\t// If the resource doesn't exist, we'll create it\n\tif errors.IsNotFound(clusterrole_err) {\n\t\tclusterrole, clusterrole_err = c.kubeclientset.RbacV1().ClusterRoles().Create(context.TODO(),\n\t\t\t&rbacv1.ClusterRole{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: serverName,\n\t\t\t\t},\n\t\t\t\tRules: []rbacv1.PolicyRule{\n\t\t\t\t\t{\n\t\t\t\t\t\tVerbs: []string{\"get\", \"list\", \"watch\", \"create\", \"delete\", \"deletecollection\", \"patch\", \"update\"},\n\t\t\t\t\t\tAPIGroups: []string{\"kubeflow.org\"},\n\t\t\t\t\t\tResources: []string{\"tfjobs\", \"tfjobs/status\", \"pytorchjobs\", \"pytorchjobs/status\", \"notebooks\", \"notebooks/status\"},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tVerbs: []string{\"get\", \"list\", \"watch\", \"create\", \"delete\", \"deletecollection\", \"patch\", \"update\"},\n\t\t\t\t\t\tAPIGroups: []string{\"traefik.containo.us\"},\n\t\t\t\t\t\tResources: []string{\"ingressroutes\"},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tVerbs: []string{\"*\"},\n\t\t\t\t\t\tAPIGroups: []string{\"\"},\n\t\t\t\t\t\tResources: []string{\"pods\", \"pods/log\", \"services\", \"persistentvolumes\", \"persistentvolumeclaims\"},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tVerbs: []string{\"*\"},\n\t\t\t\t\t\tAPIGroups: []string{\"apps\"},\n\t\t\t\t\t\tResources: []string{\"deployments\", \"deployments/status\"},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmetav1.CreateOptions{})\n\t\tklog.Info(\"\tCreate ClusterRole: \", clusterrole.Name)\n\t}\n\n\t// If an error occurs during Get/Create, we'll requeue the item so we can\n\t// attempt processing again later. This could have been caused by a\n\t// temporary network failure, or any other transient reason.\n\tif clusterrole_err != nil {\n\t\treturn clusterrole_err\n\t}\n\n\t// TODO: (sample-controller) controller.go:287 ~ 293\n\n\tclusterrolebinding, clusterrolebinding_err := c.clusterrolebindingLister.Get(serverName)\n\t// If the resource doesn't exist, we'll create it\n\tif errors.IsNotFound(clusterrolebinding_err) {\n\t\tclusterrolebinding, clusterrolebinding_err = c.kubeclientset.RbacV1().ClusterRoleBindings().Create(context.TODO(),\n\t\t\t&rbacv1.ClusterRoleBinding{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: serverName,\n\t\t\t\t},\n\t\t\t\tSubjects: []rbacv1.Subject{\n\t\t\t\t\trbacv1.Subject{\n\t\t\t\t\t\tKind: \"ServiceAccount\",\n\t\t\t\t\t\tNamespace: serviceaccount_namespace,\n\t\t\t\t\t\tName: serverName,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tRoleRef: rbacv1.RoleRef{\n\t\t\t\t\tKind: \"ClusterRole\",\n\t\t\t\t\tName: serverName,\n\t\t\t\t\tAPIGroup: \"rbac.authorization.k8s.io\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tmetav1.CreateOptions{})\n\t\tklog.Info(\"\tCreate ClusterRoleBinding: \", clusterrolebinding.Name)\n\t}\n\n\t// If an error occurs during Get/Create, we'll requeue the item so we can\n\t// attempt processing again later. This could have been caused by a\n\t// temporary network failure, or any other transient reason.\n\tif clusterrolebinding_err != nil {\n\t\treturn clusterrolebinding_err\n\t}\n\n\t// TODO: (sample-controller) controller.go:287 ~ 293\n\n\treturn nil\n}", "title": "" }, { "docid": "2b27d35577e27ed7f7a0ba651047608a", "score": "0.4693597", "text": "func supportedResources(discoveryClient *discovery.DiscoveryClient) (map[schema.GroupVersionResource]struct{}, error) {\n\t// Next step is to discover all the gettable resource types that the kuberenetes api server knows about.\n\tsupportedResources := []*machineryV1.APIResourceList{}\n\n\t// List out all the preferred api-resources of this server.\n\tapiResources, err := discoveryClient.ServerPreferredResources()\n\tif err != nil && apiResources == nil { // only return if the list is empty\n\t\treturn nil, err\n\t} else if err != nil {\n\t\tglog.Warning(\"ServerPreferredResources could not list all available resources: \", err)\n\t}\n\ttr.NonNSResourceMap = make(map[string]struct{}) //map to store non-namespaced resources\n\t// Filter down to only resources which support WATCH operations.\n\tfor _, apiList := range apiResources { // This comes out in a nested list, so loop through a couple things\n\t\t// This is a copy of apiList but we only insert resources for which GET is supported.\n\t\twatchList := machineryV1.APIResourceList{}\n\t\twatchList.GroupVersion = apiList.GroupVersion\n\t\twatchResources := []machineryV1.APIResource{} // All the resources for which GET works.\n\t\tfor _, apiResource := range apiList.APIResources { // Loop across inner list\n\t\t\t// TODO: Use env variable for ignored resource kinds.\n\t\t\t// Ignore clusters and clusterstatus resources because these are handled by the aggregator.\n\t\t\t// Ignore oauthaccesstoken resources because those cause too much noise on OpenShift clusters.\n\t\t\t// Ignore projects as namespaces are overwritten to be projects on Openshift clusters - they tend to share\n\t\t\t// the same uid.\n\t\t\tif apiResource.Name == \"clusters\" ||\n\t\t\t\tapiResource.Name == \"clusterstatuses\" ||\n\t\t\t\tapiResource.Name == \"oauthaccesstokens\" ||\n\t\t\t\tapiResource.Name == \"events\" ||\n\t\t\t\tapiResource.Name == \"projects\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// add non-namespaced resource to NonNSResourceMap\n\t\t\tif !apiResource.Namespaced {\n\t\t\t\ttr.NonNSResMapMutex.Lock()\n\t\t\t\tif _, ok := tr.NonNSResourceMap[apiResource.Kind]; !ok {\n\t\t\t\t\ttr.NonNSResourceMap[apiResource.Kind] = struct{}{}\n\t\t\t\t}\n\t\t\t\ttr.NonNSResMapMutex.Unlock()\n\n\t\t\t}\n\t\t\tfor _, verb := range apiResource.Verbs {\n\t\t\t\tif verb == \"watch\" {\n\t\t\t\t\twatchResources = append(watchResources, apiResource)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\twatchList.APIResources = watchResources\n\t\t// Add the list to our list of lists that holds GET enabled resources.\n\t\tsupportedResources = append(supportedResources, &watchList)\n\t}\n\n\t// Use handy converter function to convert into GroupVersionResource objects, which we need in order to make informers\n\tgvrList, err := discovery.GroupVersionResources(supportedResources)\n\n\treturn gvrList, err\n}", "title": "" }, { "docid": "4d0ac4435ed4c66d5ba818847b77db64", "score": "0.4690091", "text": "func getK8sNamespaceResource(req *restful.Request, resp *restful.Response) error {\n\t// init Form\n\treq.Request.FormValue(\"\")\n\treq.Request.Form[fieldTag] = []string{namespaceTag}\n\tvar err error\n\tresult := make([]string, 0)\n\n\tfor i, name := range k8sNamespaceGrepNames {\n\t\t// grep replicaSet\n\t\tif result, err = grepNamespace(req, k8sNamespaceGrepQFilters[i], name, result); err != nil {\n\t\t\tblog.Errorf(\"%s | err: %v\", common.BcsErrStorageListResourceFailStr, err)\n\t\t\tlib.ReturnRest(&lib.RestResponse{\n\t\t\t\tResp: resp, Data: []string{},\n\t\t\t\tErrCode: common.BcsErrStorageListResourceFail, Message: common.BcsErrStorageListResourceFailStr})\n\t\t\treturn err\n\t\t}\n\t}\n\n\tlib.ReturnRest(&lib.RestResponse{Resp: resp, Data: result})\n\treturn nil\n}", "title": "" }, { "docid": "c89e3ff078ef3cf38e75424b3fc12370", "score": "0.46819052", "text": "func TestRolloutRestartSelectorMany(t *testing.T) {\n\tfirstDeployment := appsv1.Deployment{}\n\tfirstDeployment.Name = \"nginx-deployment-1\"\n\tsecondDeployment := appsv1.Deployment{}\n\tsecondDeployment.Name = \"nginx-deployment-2\"\n\tlabelSelector := \"app=test\"\n\n\tns := scheme.Codecs.WithoutConversion()\n\ttf := cmdtesting.NewTestFactory().WithNamespace(\"test\")\n\n\tinfo, _ := runtime.SerializerInfoForMediaType(ns.SupportedMediaTypes(), runtime.ContentTypeJSON)\n\tencoder := ns.EncoderForVersion(info.Serializer, rolloutRestartGroupVersionEncoder)\n\ttf.Client = &RolloutRestartRESTClient{\n\t\tRESTClient: &fake.RESTClient{\n\t\t\tGroupVersion: rolloutRestartGroupVersionEncoder,\n\t\t\tNegotiatedSerializer: ns,\n\t\t\tClient: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {\n\t\t\t\tswitch p, m, q := req.URL.Path, req.Method, req.URL.Query(); {\n\t\t\t\tcase p == \"/namespaces/test/deployments\" && m == \"GET\" && q.Get(\"labelSelector\") == labelSelector:\n\t\t\t\t\t// Return the list of 2 deployments\n\t\t\t\t\tresponseDeployments := &appsv1.DeploymentList{}\n\t\t\t\t\tresponseDeployments.Items = []appsv1.Deployment{firstDeployment, secondDeployment}\n\t\t\t\t\tbody := io.NopCloser(bytes.NewReader([]byte(runtime.EncodeOrDie(encoder, responseDeployments))))\n\t\t\t\t\treturn &http.Response{StatusCode: http.StatusOK, Header: cmdtesting.DefaultHeader(), Body: body}, nil\n\t\t\t\tcase (p == \"/namespaces/test/deployments/nginx-deployment-1\" || p == \"/namespaces/test/deployments/nginx-deployment-2\") && m == \"PATCH\":\n\t\t\t\t\t// Pick deployment based on path\n\t\t\t\t\tresponseDeployment := firstDeployment\n\t\t\t\t\tif strings.HasSuffix(p, \"nginx-deployment-2\") {\n\t\t\t\t\t\tresponseDeployment = secondDeployment\n\t\t\t\t\t}\n\t\t\t\t\tbody := io.NopCloser(bytes.NewReader([]byte(runtime.EncodeOrDie(encoder, &responseDeployment))))\n\t\t\t\t\treturn &http.Response{StatusCode: http.StatusOK, Header: cmdtesting.DefaultHeader(), Body: body}, nil\n\t\t\t\tdefault:\n\t\t\t\t\tt.Fatalf(\"unexpected request: %#v\\n%#v\", req.URL, req)\n\t\t\t\t\treturn nil, nil\n\t\t\t\t}\n\t\t\t}),\n\t\t},\n\t}\n\n\tstreams, _, buf, _ := genericiooptions.NewTestIOStreams()\n\tcmd := NewCmdRolloutRestart(tf, streams)\n\tcmd.Flags().Set(\"selector\", labelSelector)\n\n\tcmd.Run(cmd, []string{\"deployment\"})\n\texpectedOutput := \"deployment.apps/\" + firstDeployment.Name + \" restarted\\ndeployment.apps/\" + secondDeployment.Name + \" restarted\\n\"\n\tif buf.String() != expectedOutput {\n\t\tt.Errorf(\"expected output: %s, but got: %s\", expectedOutput, buf.String())\n\t}\n}", "title": "" }, { "docid": "e0b5b5e641ac5f2dfc0a555393ec9a38", "score": "0.46800134", "text": "func generateKubeletArgs(argsFromIgnition map[string]string, debug bool) ([]string, error) {\n\tcertDirectory := \"c:\\\\var\\\\lib\\\\kubelet\\\\pki\\\\\"\n\twindowsTaints := \"os=Windows:NoSchedule\"\n\twindowsPriorityClass := \"ABOVE_NORMAL_PRIORITY_CLASS\"\n\t// TODO: Removal of deprecated flags to be done in https://issues.redhat.com/browse/WINC-924\n\tkubeletArgs := []string{\n\t\t\"--config=\" + windows.KubeletConfigPath,\n\t\t\"--bootstrap-kubeconfig=\" + windows.BootstrapKubeconfigPath,\n\t\t\"--kubeconfig=\" + windows.KubeconfigPath,\n\t\t\"--cert-dir=\" + certDirectory,\n\t\t\"--windows-service\",\n\t\t// Registers the Kubelet with Windows specific taints so that linux pods won't get scheduled onto\n\t\t// Windows nodes.\n\t\t\"--register-with-taints=\" + windowsTaints,\n\t\t\"--node-labels=\" + nodeconfig.WindowsOSLabel,\n\t\t\"--resolv-conf=\",\n\t\t// Allows the kubelet process to get more CPU time slices when compared to other processes running on the\n\t\t// Windows host.\n\t\t// See: https://kubernetes.io/docs/concepts/configuration/windows-resource-management/#resource-management-cpu\n\t\t\"--windows-priorityclass=\" + windowsPriorityClass,\n\t}\n\n\tkubeletArgs = append(kubeletArgs, klogVerbosityArg(debug))\n\tif cloudProvider, ok := argsFromIgnition[ignition.CloudProviderOption]; ok {\n\t\tkubeletArgs = append(kubeletArgs, fmt.Sprintf(\"--%s=%s\", ignition.CloudProviderOption, cloudProvider))\n\t}\n\tif cloudConfigValue, ok := argsFromIgnition[ignition.CloudConfigOption]; ok {\n\t\t// cloud config is placed by WMCO in the c:\\k directory with the same file name\n\t\tcloudConfigPath := windows.K8sDir + \"\\\\\" + filepath.Base(cloudConfigValue)\n\t\tkubeletArgs = append(kubeletArgs, fmt.Sprintf(\"--%s=%s\", ignition.CloudConfigOption, cloudConfigPath))\n\t}\n\n\treturn kubeletArgs, nil\n}", "title": "" }, { "docid": "e125594fb7801a58ce2dcf80728c12db", "score": "0.46680397", "text": "func (c *apiServerComponent) authClusterRole() (client.Object, client.Object) {\n\tvar name, nameToDelete string\n\tenterpriseName := \"tigera-extension-apiserver-auth-access\"\n\tossName := \"calico-extension-apiserver-auth-access\"\n\tswitch c.cfg.Installation.Variant {\n\tcase operatorv1.TigeraSecureEnterprise:\n\t\tname = enterpriseName\n\t\tnameToDelete = ossName\n\tcase operatorv1.Calico:\n\t\tname = ossName\n\t\tnameToDelete = enterpriseName\n\t}\n\n\trules := []rbacv1.PolicyRule{\n\t\t{\n\t\t\tAPIGroups: []string{\n\t\t\t\t\"\",\n\t\t\t},\n\t\t\tResources: []string{\n\t\t\t\t\"configmaps\",\n\t\t\t},\n\t\t\tVerbs: []string{\n\t\t\t\t\"list\",\n\t\t\t\t\"watch\",\n\t\t\t},\n\t\t\tResourceNames: []string{\n\t\t\t\t\"extension-apiserver-authentication\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tAPIGroups: []string{\n\t\t\t\t\"rbac.authorization.k8s.io\",\n\t\t\t},\n\t\t\tResources: []string{\n\t\t\t\t\"clusterroles\",\n\t\t\t\t\"clusterrolebindings\",\n\t\t\t\t\"roles\",\n\t\t\t\t\"rolebindings\",\n\t\t\t},\n\t\t\tVerbs: []string{\n\t\t\t\t\"get\",\n\t\t\t\t\"list\",\n\t\t\t\t\"watch\",\n\t\t\t},\n\t\t},\n\t}\n\n\tif c.cfg.Installation.KubernetesProvider == operatorv1.ProviderOpenShift {\n\t\trules = append(rules, rbacv1.PolicyRule{\n\t\t\tAPIGroups: []string{\"security.openshift.io\"},\n\t\t\tResources: []string{\"securitycontextconstraints\"},\n\t\t\tVerbs: []string{\"use\"},\n\t\t\tResourceNames: []string{PSSPrivileged},\n\t\t})\n\t}\n\n\treturn &rbacv1.ClusterRole{\n\t\t\tTypeMeta: metav1.TypeMeta{Kind: \"ClusterRole\", APIVersion: \"rbac.authorization.k8s.io/v1\"},\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: name,\n\t\t\t},\n\t\t\tRules: rules,\n\t\t}, &rbacv1.ClusterRole{\n\t\t\tTypeMeta: metav1.TypeMeta{Kind: \"ClusterRole\", APIVersion: \"rbac.authorization.k8s.io/v1\"},\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: nameToDelete,\n\t\t\t},\n\t\t}\n}", "title": "" }, { "docid": "e3ac46c55c6517136e7a011d96b446cf", "score": "0.4665379", "text": "func validateGCPAuthNamespaces(ctx context.Context, t *testing.T, profile string) {\n\trr, err := Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"create\", \"ns\", \"new-namespace\"))\n\tif err != nil {\n\t\tt.Fatalf(\"%s failed: %v\", rr.Command(), err)\n\t}\n\n\tlogsAsError := func() error {\n\t\trr, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"logs\", \"-l\", \"app=gcp-auth\", \"-n\", \"gcp-auth\"))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn errors.New(rr.Output())\n\t}\n\n\tgetSecret := func() error {\n\t\t_, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"get\", \"secret\", \"gcp-auth\", \"-n\", \"new-namespace\"))\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"%w: gcp-auth container logs: %v\", err, logsAsError())\n\t\t}\n\t\treturn err\n\t}\n\n\tif err := retry.Expo(getSecret, Seconds(2), Minutes(1)); err != nil {\n\t\tt.Errorf(\"failed to get secret: %v\", err)\n\t}\n}", "title": "" }, { "docid": "6069c9156114b1d1ccd77b4c0d1db74c", "score": "0.46602347", "text": "func PreInstall(helmInstall *helm.Install, kubeConfig []byte) error {\n\tlog.Infof(\"Start helm pre-install\")\n\n\tcli, err := GetK8sConnection(kubeConfig)\n\tif err != nil {\n\t\tlog.Errorf(\"Can't get kubernetes client: %v\", err)\n\t\treturn err\n\t}\n\n\tv1MetaData := metav1.ObjectMeta{\n\t\tName: helmInstall.ServiceAccount, // \"tiller\"\n\t\tNamespace: helmInstall.Namespace,\n\t}\n\n\tserviceAccount := &apiv1.ServiceAccount{\n\t\tObjectMeta: v1MetaData,\n\t}\n\tlog.Infof(\"Create service account: %q, namespace: %q\", v1MetaData.Name, v1MetaData.Namespace)\n\tfor i := 0; i <= 5; i++ {\n\t\t_, err = cli.CoreV1().ServiceAccounts(helmInstall.Namespace).Create(serviceAccount)\n\t\tif err != nil {\n\t\t\tlog.Warningf(\"create service account failed: %v\", err)\n\t\t\tif strings.Contains(err.Error(), \"etcdserver: request timed out\") {\n\t\t\t\ttime.Sleep(time.Duration(40) * time.Second)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif !strings.Contains(err.Error(), \"already exists\") {\n\t\t\t\treturn errors.Wrap(err, \"create service account failed\")\n\t\t\t}\n\t\t}\n\t\tbreak\n\t}\n\n\tclusterRole := &v1.ClusterRole{\n\t\tObjectMeta: v1MetaData,\n\t\tRules: []v1.PolicyRule{{\n\t\t\tAPIGroups: []string{\n\t\t\t\t\"*\",\n\t\t\t},\n\t\t\tResources: []string{\n\t\t\t\t\"*\",\n\t\t\t},\n\t\t\tVerbs: []string{\n\t\t\t\t\"*\",\n\t\t\t},\n\t\t},\n\t\t\t{\n\t\t\t\tNonResourceURLs: []string{\n\t\t\t\t\t\"*\",\n\t\t\t\t},\n\t\t\t\tVerbs: []string{\n\t\t\t\t\t\"*\",\n\t\t\t\t},\n\t\t\t}},\n\t}\n\tlog.Infof(\"Create cluster roles: %q, namespace: %q\", v1MetaData.Name, v1MetaData.Namespace)\n\tclusterRoleName := helmInstall.ServiceAccount\n\tfor i := 0; i <= 5; i++ {\n\t\t_, err = cli.RbacV1().ClusterRoles().Create(clusterRole)\n\t\tif err != nil {\n\t\t\tif strings.Contains(err.Error(), \"etcdserver: request timed out\") {\n\t\t\t\ttime.Sleep(time.Duration(10) * time.Second)\n\t\t\t\tcontinue\n\t\t\t} else if strings.Contains(err.Error(), \"is forbidden\") {\n\t\t\t\t_, errGet := cli.RbacV1().ClusterRoles().Get(\"cluster-admin\", metav1.GetOptions{})\n\t\t\t\tif errGet != nil {\n\t\t\t\t\treturn fmt.Errorf(\"clusterrole create error: %v cluster-admin not found: %v\", err, errGet)\n\t\t\t\t}\n\t\t\t\tclusterRoleName = \"cluster-admin\"\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tlog.Warningf(\"create roles failed: %v\", err)\n\t\t\tif !strings.Contains(err.Error(), \"already exists\") {\n\t\t\t\treturn fmt.Errorf(\"crate roles failed: %s\", err)\n\t\t\t}\n\t\t}\n\t\tbreak\n\t}\n\n\tlog.Debugf(\"ClusterRole name: %s\", clusterRoleName)\n\tlog.Debugf(\"ServiceAccount name: %s\", helmInstall.ServiceAccount)\n\tclusterRoleBinding := &v1.ClusterRoleBinding{\n\t\tObjectMeta: v1MetaData,\n\t\tRoleRef: v1.RoleRef{\n\t\t\tAPIGroup: v1.GroupName,\n\t\t\tKind: \"ClusterRole\",\n\t\t\tName: clusterRoleName,\n\t\t},\n\t\tSubjects: []v1.Subject{\n\t\t\t{\n\t\t\t\tKind: \"ServiceAccount\",\n\t\t\t\tName: helmInstall.ServiceAccount,\n\t\t\t\tNamespace: helmInstall.Namespace,\n\t\t\t},\n\t\t},\n\t}\n\tlog.Infof(\"Crate cluster role bindings: %q, namespace: %q, roleRef: %q\", v1MetaData.Name, v1MetaData.Namespace, clusterRoleName)\n\tfor i := 0; i <= 5; i++ {\n\t\t_, err = cli.RbacV1().ClusterRoleBindings().Create(clusterRoleBinding)\n\t\tif err != nil {\n\t\t\tlog.Warningf(\"create role bindings failed: %v\", err)\n\t\t\tif strings.Contains(err.Error(), \"etcdserver: request timed out\") {\n\t\t\t\ttime.Sleep(time.Duration(10) * time.Second)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif !strings.Contains(err.Error(), \"already exists\") {\n\t\t\t\treturn fmt.Errorf(\"Create role bindings failed: %v\", err)\n\t\t\t}\n\t\t}\n\t\tbreak\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "026d2a9fb0fd09b76b4ad3cbfc37dafe", "score": "0.4657729", "text": "func admitPods(ar v1beta1.AdmissionReview) *v1beta1.AdmissionResponse {\n\tglog.V(4).Infof(\"admitting pods\")\n\n\tpodResource := metav1.GroupVersionResource{Group: \"\", Version: \"v1\", Resource: \"pods\"}\n\tif ar.Request.Resource != podResource {\n\t\terr := fmt.Errorf(\"expect resource to be %s\", podResource)\n\t\tglog.Errorf(\"%v\", err)\n\t\treturn toAdmissionResponse(err)\n\t}\n\n\tversionCli, kubeCli := client.NewCliOrDie()\n\n\tname := ar.Request.Name\n\tnamespace := ar.Request.Namespace\n\n\treviewResponse := v1beta1.AdmissionResponse{}\n\treviewResponse.Allowed = false\n\n\tpod, err := kubeCli.CoreV1().Pods(namespace).Get(name, metav1.GetOptions{})\n\tif err != nil {\n\t\tglog.Infof(\"api server send wrong pod info namespace %s name %s err %v\", namespace, name, err)\n\t\treturn &reviewResponse\n\t}\n\n\tglog.V(4).Infof(\"delete %s pod [%s]\", pod.Labels[label.ComponentLabelKey], pod.GetName())\n\n\ttc, err := versionCli.PingcapV1alpha1().TidbClusters(namespace).Get(pod.Labels[label.InstanceLabelKey], metav1.GetOptions{})\n\tif err != nil {\n\t\tglog.Infof(\"fail to fetch tidbcluster info namespace %s clustername(instance) %s err %v\", namespace, pod.Labels[label.InstanceLabelKey], err)\n\t\treturn &reviewResponse\n\t}\n\n\tpdClient := controller.NewDefaultPDControl().GetPDClient(tc)\n\ttidbController := controller.NewDefaultTiDBControl()\n\n\t// if pod is already deleting, return Allowed\n\tif pod.DeletionTimestamp != nil {\n\t\tglog.V(4).Infof(\"pod:[%s/%s] status is timestamp %s\", namespace, name, pod.DeletionTimestamp)\n\t\treviewResponse.Allowed = true\n\t\treturn &reviewResponse\n\t}\n\n\tif pod.Labels[label.ComponentLabelKey] == \"tidb\" {\n\t\tordinal, err := strconv.ParseInt(strings.Split(name, \"-\")[len(strings.Split(name, \"-\"))-1], 10, 32)\n\t\tif err != nil {\n\t\t\tglog.Errorf(\"fail to convert string to int while deleting TiDB err %v\", err)\n\t\t\treturn &reviewResponse\n\t\t}\n\n\t\tinfo, err := tidbController.GetInfo(tc, int32(ordinal))\n\t\tif err != nil {\n\t\t\tglog.Errorf(\"fail to get tidb info error:%v\", err)\n\t\t\treturn &reviewResponse\n\t\t}\n\n\t\tif info.IsOwner && tc.Status.TiDB.StatefulSet.Replicas > 1 {\n\t\t\ttime.Sleep(10 * time.Second)\n\t\t\terr := fmt.Errorf(\"tidb is ddl owner, can't be deleted namespace %s name %s\", namespace, name)\n\t\t\tglog.Error(err)\n\t\t\tsendErr := slack.SendErrMsg(err.Error())\n\t\t\tif sendErr != nil {\n\t\t\t\tglog.Error(sendErr)\n\t\t\t}\n\t\t\t// TODO use context instead\n\t\t\tos.Exit(3)\n\t\t}\n\t\tglog.Infof(\"savely delete pod namespace %s name %s isowner %t\", namespace, name, info.IsOwner)\n\n\t} else if pod.Labels[label.ComponentLabelKey] == \"pd\" {\n\n\t\tleader, err := pdClient.GetPDLeader()\n\t\tif err != nil {\n\t\t\tglog.Errorf(\"fail to get pd leader %v\", err)\n\t\t\treturn &reviewResponse\n\t\t}\n\n\t\tif leader.Name == name && tc.Status.PD.StatefulSet.Replicas > 1 {\n\t\t\ttime.Sleep(10 * time.Second)\n\t\t\terr := fmt.Errorf(\"pd is leader, can't be deleted namespace %s name %s\", namespace, name)\n\t\t\tglog.Error(err)\n\t\t\tsendErr := slack.SendErrMsg(err.Error())\n\t\t\tif sendErr != nil {\n\t\t\t\tglog.Error(sendErr)\n\t\t\t}\n\t\t\t// TODO use context instead\n\t\t\tos.Exit(3)\n\t\t}\n\t\tglog.Infof(\"savely delete pod namespace %s name %s leader name %s\", namespace, name, leader.Name)\n\n\t\t// } else if pod.Labels[label.ComponentLabelKey] == \"tikv\" {\n\t} else if false {\n\n\t\tvar storeID uint64\n\t\tstoreID = 0\n\t\tfor _, store := range tc.Status.TiKV.Stores {\n\t\t\tif store.PodName == name {\n\t\t\t\tstoreID, err = strconv.ParseUint(store.ID, 10, 64)\n\t\t\t\tif err != nil {\n\t\t\t\t\tglog.Errorf(\"fail to convert string to int while deleting PD err %v\", err)\n\t\t\t\t\treturn &reviewResponse\n\t\t\t\t}\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\t// Fail to get store in stores\n\t\tif storeID == 0 {\n\t\t\tglog.Errorf(\"fail to find store in TIKV.Stores podname %s\", name)\n\t\t\treturn &reviewResponse\n\t\t}\n\n\t\tstoreInfo, err := pdClient.GetStore(storeID)\n\t\tif err != nil {\n\t\t\tglog.Errorf(\"fail to read storeID %d response %v\", storeID, err)\n\t\t\treturn &reviewResponse\n\t\t}\n\n\t\tbeforeCount := kvLeaderMap[namespace][name]\n\t\tafterCount := storeInfo.Status.LeaderCount\n\n\t\tif beforeCount != 0 && !(afterCount < beforeCount) && tc.Status.TiKV.StatefulSet.Replicas > 1 {\n\t\t\ttime.Sleep(10 * time.Second)\n\t\t\terr := fmt.Errorf(\"failed to evict leader from %s/%s, before: %d, now: %d\",\n\t\t\t\tnamespace, name, beforeCount, afterCount)\n\t\t\tglog.Error(err)\n\t\t\tsendErr := slack.SendErrMsg(err.Error())\n\t\t\tif sendErr != nil {\n\t\t\t\tglog.Error(sendErr)\n\t\t\t}\n\t\t\t// TODO use context instead\n\t\t\tos.Exit(3)\n\t\t}\n\t\tglog.Infof(\"savely delete pod namespace %s name %s before count %d after count %d\", namespace, name, beforeCount, afterCount)\n\t}\n\treviewResponse.Allowed = true\n\treturn &reviewResponse\n}", "title": "" }, { "docid": "f946790a497f5a30dc669b5fa7d41ed5", "score": "0.465603", "text": "func ParseMetaToAPIList(metas []dao.Meta) (res []runtime.Object, err error) {\n\tvar (\n\t\tpodList api.PodList\n\t\tserviceList api.ServiceList\n\t\tsecretList api.SecretList\n\t\tconfigMapList api.ConfigMapList\n\t\tendPointsList api.EndpointsList\n\t\tnodeList api.NodeList\n\t)\n\tfor _, v := range metas {\n\t\tswitch v.Type {\n\t\tcase model.ResourceTypePod:\n\t\t\tvar pod v1.Pod\n\t\t\tvar apiPod api.Pod\n\t\t\tif err = json.Unmarshal([]byte(v.Value), &pod); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif err := k8s_v1_api.Convert_v1_Pod_To_core_Pod(&pod, &apiPod, nil); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tpodList.Items = append(podList.Items, apiPod)\n\t\tcase constants.ResourceTypeService:\n\t\t\tvar svc v1.Service\n\t\t\tvar apiSvc api.Service\n\t\t\tif err = json.Unmarshal([]byte(v.Value), &svc); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif err := k8s_v1_api.Convert_v1_Service_To_core_Service(&svc, &apiSvc, nil); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tserviceList.Items = append(serviceList.Items, apiSvc)\n\t\tcase model.ResourceTypeSecret:\n\t\t\tvar secret v1.Secret\n\t\t\tvar apiSecret api.Secret\n\t\t\tif err = json.Unmarshal([]byte(v.Value), &secret); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif err := k8s_v1_api.Convert_v1_Secret_To_core_Secret(&secret, &apiSecret, nil); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tsecretList.Items = append(secretList.Items, apiSecret)\n\t\tcase model.ResourceTypeConfigmap:\n\t\t\tvar cm v1.ConfigMap\n\t\t\tvar apiCm api.ConfigMap\n\t\t\tif err = json.Unmarshal([]byte(v.Value), &cm); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif err := k8s_v1_api.Convert_v1_ConfigMap_To_core_ConfigMap(&cm, &apiCm, nil); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tconfigMapList.Items = append(configMapList.Items, apiCm)\n\t\tcase constants.ResourceTypeEndpoints:\n\t\t\tvar ep v1.Endpoints\n\t\t\tvar apiEp api.Endpoints\n\t\t\tif err = json.Unmarshal([]byte(v.Value), &ep); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif err := k8s_v1_api.Convert_v1_Endpoints_To_core_Endpoints(&ep, &apiEp, nil); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tendPointsList.Items = append(endPointsList.Items, apiEp)\n\t\tcase model.ResourceTypeNode:\n\t\t\tvar no v1.Node\n\t\t\tvar apiNo api.Node\n\t\t\tif err = json.Unmarshal([]byte(v.Value), &no); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif err := k8s_v1_api.Convert_v1_Node_To_core_Node(&no, &apiNo, nil); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tnodeList.Items = append(nodeList.Items, apiNo)\n\t\t}\n\t}\n\tres = append(res, &podList, &serviceList, &secretList, &configMapList, &endPointsList, &nodeList)\n\treturn\n}", "title": "" }, { "docid": "cd3b31ebb0df8f250af0cc3f6ea81806", "score": "0.46500498", "text": "func getActiveK8SPods(orchestrator string) ([]string, error) {\n\n\tlog.Infof(\"Obtaining currently active K8S pods on agent node\")\n\tvar podsList []string\n\tvar config *krestclient.Config\n\tvar kubeconfFile string\n\tvar dir string\n\tif isAtomic == true {\n\t\tdir = \"/var/usr/share\"\n\t} else {\n\t\tdir = \"/usr/share\"\n\t}\n\tif orchestrator == \"k8s\" {\n\t\tkubeconfFile = dir + \"/vsp-k8s/nuage.kubeconfig\"\n\t} else {\n\t\tkubeconfFile = dir + \"/vsp-openshift/nuage.kubeconfig\"\n\t}\n\n\tloadingRules := &clientcmd.ClientConfigLoadingRules{}\n\tloadingRules.ExplicitPath = kubeconfFile\n\tloader := clientcmd.NewNonInteractiveDeferredLoadingClientConfig(loadingRules, &clientcmd.ConfigOverrides{})\n\tkubeConfig, err := loader.ClientConfig()\n\tif err != nil {\n\t\tlog.Errorf(\"Error loading kubeconfig file\")\n\t\treturn podsList, err\n\t}\n\n\tconfig = kubeConfig\n\tkubeClient, err := kclient.New(config)\n\tif err != nil {\n\t\tlog.Errorf(\"Error trying to create kubeclient\")\n\t\treturn podsList, err\n\t}\n\n\tvar listOpts = &kapi.ListOptions{LabelSelector: labels.Everything(), FieldSelector: fields.Everything()}\n\tpods, err := kubeClient.Pods(kapi.NamespaceAll).List(*listOpts)\n\tif err != nil {\n\t\tlog.Errorf(\"Error occured while fetching pods from k8s api server\")\n\t\treturn podsList, err\n\t}\n\n\tvar idList []string\n\tfor _, entry := range pods.Items {\n\t\tfor _, element := range entry.Status.ContainerStatuses {\n\t\t\tstrSlice := strings.Split(element.ContainerID, \"//\")\n\t\t\tif len(strSlice) > 1 {\n\t\t\t\tidList = append(idList, strSlice[1])\n\t\t\t}\n\t\t}\n\t}\n\n\tvar infraIDList []string\n\tfor _, id := range idList {\n\t\tcontUUID, err := getPodContainerUUID(id)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Failed to obtain container UUID for the pod ID %s\", id)\n\t\t}\n\t\tinfraIDList = append(infraIDList, contUUID)\n\t}\n\n\treturn infraIDList, err\n}", "title": "" }, { "docid": "27e44d304dfd05d7d46402a4f4e6e9b6", "score": "0.464976", "text": "func getResourceNamesForGroup(apiPrefix string, apiGroupInfo *APIGroupInfo, pathsToIgnore openapiutil.Trie) ([]string, error) {\n\t// Get the canonical names of every resource we need to build in this api group\n\tresourceNames := make([]string, 0)\n\tfor _, groupVersion := range apiGroupInfo.PrioritizedVersions {\n\t\tfor resource, storage := range apiGroupInfo.VersionedResourcesStorageMap[groupVersion.Version] {\n\t\t\tpath := gpath.Join(apiPrefix, groupVersion.Group, groupVersion.Version, resource)\n\t\t\tif !pathsToIgnore.HasPrefix(path) {\n\t\t\t\tkind, err := genericapi.GetResourceKind(groupVersion, storage, apiGroupInfo.Scheme)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tsampleObject, err := apiGroupInfo.Scheme.New(kind)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tname := openapiutil.GetCanonicalTypeName(sampleObject)\n\t\t\t\tresourceNames = append(resourceNames, name)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn resourceNames, nil\n}", "title": "" }, { "docid": "2db7e5cbe60f2a6e660bf778e660c00c", "score": "0.46471128", "text": "func main() {\n\tclusterArg := flag.String(\"cluster\", \"\", \"Cluster name used in the tag: kubernetes.io/cluster/xxx\")\n\troleArg := flag.String(\"role\", \"\", \"master / etcd / node\")\n\troleTagArg := flag.String(\"role-tag\", \"Role\", \"Tag name for specifying node types\")\n\tflag.Parse()\n\n\tnodes, err := node.List(*clusterArg, *roleArg, *roleTagArg)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tfmt.Print(strings.Join(nodes, \",\"))\n}", "title": "" }, { "docid": "4f63fc6d19e8a43c102f0499c6fbb986", "score": "0.46459085", "text": "func (r *ReconcileArgoCD) reconcileClusterPermissionsSecret(cr *argoprojv1a1.ArgoCD) error {\n\tvar clusterConfigInstance bool\n\tsecret := argoutil.NewSecretWithSuffix(cr, \"default-cluster-config\")\n\tsecret.Labels[common.ArgoCDSecretTypeLabel] = \"cluster\"\n\tdataBytes, _ := json.Marshal(map[string]interface{}{\n\t\t\"tlsClientConfig\": map[string]interface{}{\n\t\t\t\"insecure\": false,\n\t\t},\n\t})\n\n\tnamespaceList := corev1.NamespaceList{}\n\tlistOption := client.MatchingLabels{\n\t\tcommon.ArgoCDManagedByLabel: cr.Namespace,\n\t}\n\tif err := r.Client.List(context.TODO(), &namespaceList, listOption); err != nil {\n\t\treturn err\n\t}\n\n\tvar namespaces []string\n\tfor _, namespace := range namespaceList.Items {\n\t\tnamespaces = append(namespaces, namespace.Name)\n\t}\n\n\tif !containsString(namespaces, cr.Namespace) {\n\t\tnamespaces = append(namespaces, cr.Namespace)\n\t}\n\tsort.Strings(namespaces)\n\n\tsecret.Data = map[string][]byte{\n\t\t\"config\": dataBytes,\n\t\t\"name\": []byte(\"in-cluster\"),\n\t\t\"server\": []byte(common.ArgoCDDefaultServer),\n\t\t\"namespaces\": []byte(strings.Join(namespaces, \",\")),\n\t}\n\n\tif allowedNamespace(cr.Namespace, os.Getenv(\"ARGOCD_CLUSTER_CONFIG_NAMESPACES\")) {\n\t\tclusterConfigInstance = true\n\t}\n\n\tclusterSecrets, err := r.getClusterSecrets(cr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor _, s := range clusterSecrets.Items {\n\t\t// check if cluster secret with default server address exists\n\t\tif string(s.Data[\"server\"]) == common.ArgoCDDefaultServer {\n\t\t\t// if the cluster belongs to cluster config namespace,\n\t\t\t// remove all namespaces from cluster secret,\n\t\t\t// else update the list of namespaces if value differs.\n\t\t\tif clusterConfigInstance {\n\t\t\t\tdelete(s.Data, \"namespaces\")\n\t\t\t} else {\n\t\t\t\tns := strings.Split(string(s.Data[\"namespaces\"]), \",\")\n\t\t\t\tfor _, n := range namespaces {\n\t\t\t\t\tif !containsString(ns, strings.TrimSpace(n)) {\n\t\t\t\t\t\tns = append(ns, strings.TrimSpace(n))\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tsort.Strings(ns)\n\t\t\t\ts.Data[\"namespaces\"] = []byte(strings.Join(ns, \",\"))\n\t\t\t}\n\t\t\treturn r.Client.Update(context.TODO(), &s)\n\t\t}\n\t}\n\n\tif clusterConfigInstance {\n\t\t// do nothing\n\t\treturn nil\n\t}\n\n\tif err := controllerutil.SetControllerReference(cr, secret, r.Scheme); err != nil {\n\t\treturn err\n\t}\n\treturn r.Client.Create(context.TODO(), secret)\n}", "title": "" }, { "docid": "392b39f5272271ed31622c37103ef27e", "score": "0.46362382", "text": "func validateRestartMultiNodeCluster(ctx context.Context, t *testing.T, profile string) {\n\tif DockerDriver() {\n\t\trr, err := Run(t, exec.Command(\"docker\", \"version\", \"-f\", \"{{.Server.Version}}\"))\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"docker is broken: %v\", err)\n\t\t}\n\t\tif strings.Contains(rr.Stdout.String(), \"azure\") {\n\t\t\tt.Skip(\"kic containers are not supported on docker's azure\")\n\t\t}\n\t}\n\t// Restart a full cluster with minikube start\n\tstartArgs := append([]string{\"start\", \"-p\", profile, \"--wait=true\", \"-v=8\", \"--alsologtostderr\"}, StartArgs()...)\n\trr, err := Run(t, exec.CommandContext(ctx, Target(), startArgs...))\n\tif err != nil {\n\t\tt.Fatalf(\"failed to start cluster. args %q : %v\", rr.Command(), err)\n\t}\n\n\t// Make sure minikube status shows 2 running nodes\n\trr, err = Run(t, exec.CommandContext(ctx, Target(), \"-p\", profile, \"status\", \"--alsologtostderr\"))\n\tif err != nil {\n\t\tt.Fatalf(\"failed to run minikube status. args %q : %v\", rr.Command(), err)\n\t}\n\n\tif strings.Count(rr.Stdout.String(), \"host: Running\") != 2 {\n\t\tt.Errorf(\"status says both hosts are not running: args %q: %v\", rr.Command(), rr.Output())\n\t}\n\n\tif strings.Count(rr.Stdout.String(), \"kubelet: Running\") != 2 {\n\t\tt.Errorf(\"status says both kubelets are not running: args %q: %v\", rr.Command(), rr.Output())\n\t}\n\n\t// Make sure kubectl reports that all nodes are ready\n\trr, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"get\", \"nodes\"))\n\tif err != nil {\n\t\tt.Fatalf(\"failed to run kubectl get nodes. args %q : %v\", rr.Command(), err)\n\t}\n\tif strings.Count(rr.Stdout.String(), \"NotReady\") > 0 {\n\t\tt.Errorf(\"expected 2 nodes to be Ready, got %v\", rr.Output())\n\t}\n\n\trr, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"get\", \"nodes\", \"-o\", `go-template='{{range .items}}{{range .status.conditions}}{{if eq .type \"Ready\"}} {{.status}}{{\"\\n\"}}{{end}}{{end}}{{end}}'`))\n\tif err != nil {\n\t\tt.Fatalf(\"failed to run kubectl get nodes. args %q : %v\", rr.Command(), err)\n\t}\n\tif strings.Count(rr.Stdout.String(), \"True\") != 2 {\n\t\tt.Errorf(\"expected 2 nodes Ready status to be True, got %v\", rr.Output())\n\t}\n}", "title": "" }, { "docid": "b6333e45ffd4be95538dd4e07a3a28c6", "score": "0.46322852", "text": "func lookupResources(namespace, role string, p Permissions) (resources string, err error) {\n\tif namespace != \"\" { // look up in roles\n\t\tfor _, roles := range p.Roles[namespace] {\t\n\t\t\tvar d map[string]interface{}\n\t\t\tb := []byte(roles)\n\t\t\terr = json.Unmarshal(b, &d)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", err\n\t\t\t}\n\t\t\tmetadata := d[\"metadata\"].(map[string]interface{})\n\t\t\trname := metadata[\"name\"]\n\t\t\tif rname == role {\n\t\t\t\trules := d[\"rules\"].([]interface{})\n\t\t\t\tfor _, rule := range rules {\n\t\t\t\t\tr := rule.(map[string]interface{})\n\t\t\t\t\trj, _ := struct2json(r)\n\t\t\t\t\tresources += fmt.Sprintf(\"%v\\n\", rj)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t// ... otherwise, look up in cluster roles:\n\tfor _, cr := range p.ClusterRoles {\t\n\t\tvar d map[string]interface{}\n\t\tb := []byte(cr)\n\t\terr = json.Unmarshal(b, &d)\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tmetadata := d[\"metadata\"].(map[string]interface{})\n\t\tcrname := metadata[\"name\"]\n\t\tif crname == role {\n\t\t\trules := d[\"rules\"].([]interface{})\n\t\t\tfor _, rule := range rules {\n\t\t\t\tr := rule.(map[string]interface{})\n\t\t\t\trj, _ := struct2json(r)\n\t\t\t\tresources += fmt.Sprintf(\"%v\\n\", rj)\n\t\t\t}\n\t\t}\n\t}\n\treturn resources, nil\n}", "title": "" }, { "docid": "14f78b8480ac8a3c4e7e8d8251035750", "score": "0.4630157", "text": "func validateGCPAuthAddon(ctx context.Context, t *testing.T, profile string) {\n\tdefer PostMortemLogs(t, profile)\n\n\tt.Run(\"Namespaces\", func(t *testing.T) {\n\t\tvalidateGCPAuthNamespaces(ctx, t, profile)\n\t})\n\n\t// schedule a pod to check environment variables\n\trr, err := Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"create\", \"-f\", filepath.Join(*testdataDir, \"busybox.yaml\")))\n\tif err != nil {\n\t\tt.Fatalf(\"%s failed: %v\", rr.Command(), err)\n\t}\n\n\tserviceAccountName := \"gcp-auth-test\"\n\t// create a dummy service account so we know the pull secret got added\n\trr, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"create\", \"sa\", serviceAccountName))\n\tif err != nil {\n\t\tt.Fatalf(\"%s failed: %v\", rr.Command(), err)\n\t}\n\n\t// 8 minutes, because 4 is not enough for images to pull in all cases.\n\tnames, err := PodWait(ctx, t, profile, \"default\", \"integration-test=busybox\", Minutes(8))\n\tif err != nil {\n\t\tt.Fatalf(\"wait: %v\", err)\n\t}\n\n\t// Use this pod to confirm that the env vars are set correctly\n\trr, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"exec\", names[0], \"--\", \"/bin/sh\", \"-c\", \"printenv GOOGLE_APPLICATION_CREDENTIALS\"))\n\tif err != nil {\n\t\tt.Fatalf(\"printenv creds: %v\", err)\n\t}\n\n\tgot := strings.TrimSpace(rr.Stdout.String())\n\texpected := \"/google-app-creds.json\"\n\tif got != expected {\n\t\tt.Errorf(\"'printenv GOOGLE_APPLICATION_CREDENTIALS' returned %s, expected %s\", got, expected)\n\t}\n\n\t// Now check the service account and make sure the \"gcp-auth\" image pull secret is present\n\trr, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"describe\", \"sa\", serviceAccountName))\n\tif err != nil {\n\t\tt.Fatalf(\"%s failed: %v\", rr.Command(), err)\n\t}\n\n\texpectedPullSecret := \"gcp-auth\"\n\tre := regexp.MustCompile(`.*Image pull secrets:.*`)\n\tsecrets := re.FindString(rr.Stdout.String())\n\tif !strings.Contains(secrets, expectedPullSecret) {\n\t\tt.Errorf(\"Unexpected image pull secrets. expected %s, got %s\", expectedPullSecret, secrets)\n\t}\n\n\tif !detect.IsOnGCE() || detect.IsCloudShell() {\n\t\t// Make sure the file contents are correct\n\t\trr, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"exec\", names[0], \"--\", \"/bin/sh\", \"-c\", \"cat /google-app-creds.json\"))\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"cat creds: %v\", err)\n\t\t}\n\n\t\tvar gotJSON map[string]string\n\t\terr = json.Unmarshal(bytes.TrimSpace(rr.Stdout.Bytes()), &gotJSON)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"unmarshal json: %v\", err)\n\t\t}\n\t\texpectedJSON := map[string]string{\n\t\t\t\"client_id\": \"haha\",\n\t\t\t\"client_secret\": \"nice_try\",\n\t\t\t\"quota_project_id\": \"this_is_fake\",\n\t\t\t\"refresh_token\": \"maybe_next_time\",\n\t\t\t\"type\": \"authorized_user\",\n\t\t}\n\n\t\tif !reflect.DeepEqual(gotJSON, expectedJSON) {\n\t\t\tt.Fatalf(\"unexpected creds file: got %v, expected %v\", gotJSON, expectedJSON)\n\t\t}\n\t}\n\n\t// Check the GOOGLE_CLOUD_PROJECT env var as well\n\trr, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"exec\", names[0], \"--\", \"/bin/sh\", \"-c\", \"printenv GOOGLE_CLOUD_PROJECT\"))\n\tif err != nil {\n\t\tt.Fatalf(\"print env project: %v\", err)\n\t}\n\n\tgot = strings.TrimSpace(rr.Stdout.String())\n\texpected = \"this_is_fake\"\n\n\tif got != expected {\n\t\tt.Errorf(\"'printenv GOOGLE_CLOUD_PROJECT' returned %s, expected %s\", got, expected)\n\t}\n\n\tdisableGCPAuth := func() error {\n\t\t_, err = Run(t, exec.CommandContext(ctx, Target(), \"-p\", profile, \"addons\", \"disable\", \"gcp-auth\", \"--alsologtostderr\", \"-v=1\"))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\tif err := retry.Expo(disableGCPAuth, Minutes(2), Minutes(10), 5); err != nil {\n\t\tt.Errorf(\"failed to disable GCP auth addon: %v\", err)\n\t}\n\n\t// If we're on GCE, we have proper credentials and can test the registry secrets with an artifact registry image\n\tif detect.IsOnGCE() && !detect.IsCloudShell() && !VMDriver() {\n\t\tt.Skip(\"skipping GCPAuth addon test until 'Permission \\\"artifactregistry.repositories.downloadArtifacts\\\" denied on resource \\\"projects/k8s-minikube/locations/us/repositories/test-artifacts\\\" (or it may not exist)' issue is resolved\")\n\t\t// \"Setting the environment variable MOCK_GOOGLE_TOKEN to true will prevent using the google application credentials to fetch the token used for the image pull secret. Instead the token will be mocked.\"\n\t\t// ref: https://github.com/GoogleContainerTools/gcp-auth-webhook#gcp-auth-webhook\n\t\tos.Unsetenv(\"MOCK_GOOGLE_TOKEN\")\n\t\t// re-set MOCK_GOOGLE_TOKEN once we're done\n\t\tdefer os.Setenv(\"MOCK_GOOGLE_TOKEN\", \"true\")\n\n\t\tos.Unsetenv(\"GOOGLE_APPLICATION_CREDENTIALS\")\n\t\tos.Unsetenv(\"GOOGLE_CLOUD_PROJECT\")\n\t\targs := []string{\"-p\", profile, \"addons\", \"enable\", \"gcp-auth\"}\n\t\trr, err := Run(t, exec.CommandContext(ctx, Target(), args...))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"%s failed: %v\", rr.Command(), err)\n\t\t} else if !strings.Contains(rr.Output(), \"It seems that you are running in GCE\") {\n\t\t\tt.Errorf(\"Unexpected error message: %v\", rr.Output())\n\t\t}\n\t\t_, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"apply\", \"-f\", filepath.Join(*testdataDir, \"private-image.yaml\")))\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"print env project: %v\", err)\n\t\t}\n\n\t\t// Make sure the pod is up and running, which means we successfully pulled the private image down\n\t\t// 8 minutes, because 4 is not enough for images to pull in all cases.\n\t\t_, err = PodWait(ctx, t, profile, \"default\", \"integration-test=private-image\", Minutes(8))\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"wait for private image: %v\", err)\n\t\t}\n\n\t\t// Try it with a European mirror as well\n\t\t_, err = Run(t, exec.CommandContext(ctx, \"kubectl\", \"--context\", profile, \"apply\", \"-f\", filepath.Join(*testdataDir, \"private-image-eu.yaml\")))\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"print env project: %v\", err)\n\t\t}\n\n\t\t_, err = PodWait(ctx, t, profile, \"default\", \"integration-test=private-image-eu\", Minutes(8))\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"wait for private image: %v\", err)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "2db290b457e617baf9c25d6d0e63f478", "score": "0.46259934", "text": "func TestOrgLabelFilter4(t *testing.T) {\n\tvar jsonBlob = []byte(`{\n \"clusterName\": \"terminus-dev\",\n \"kind\": \"MARATHON\",\n \"name\": \"MARATHONFORTERMINUSDEV\",\n \"options\": {\n \"ADDR\": \"http://master.mesos/service/marathon\",\n \"CPU_SUBSCRIBE_RATIO\": \"10\",\n \"ENABLETAG\": \"true\"\n },\n \"optionsPlus\": {\n \"orgs\": [\n {\n \"name\": \"1\",\n \"options\": {\n \"ENABLE_ORG\": \"true\"\n },\n \"workspaces\": [\n {\n \"name\": \"test\",\n \"options\": {\n \"CPU_SUBSCRIBE_RATIO\": \"2\"\n }\n }\n ]\n }\n ]\n }\n}`)\n\n\tvar eConfig executorconfig.ExecutorConfig\n\terr := json.Unmarshal(jsonBlob, &eConfig)\n\tassert.Nil(t, err)\n\n\tvar result labelconfig.RawLabelRuleResult\n\tvar result2 labelconfig.RawLabelRuleResult2\n\n\tli2 := &labelconfig.LabelInfo{\n\t\tLabel: map[string]string{},\n\t\tExecutorName: eConfig.Name,\n\t\tExecutorKind: eConfig.Kind,\n\t\tExecutorConfig: &executorconfig.ExecutorWholeConfigs{BasicConfig: eConfig.Options, PlusConfigs: eConfig.OptionsPlus},\n\t\tOptionsPlus: eConfig.OptionsPlus,\n\t\tObjName: \"test-1111\",\n\t}\n\n\tOrgLabelFilter(&result, &result2, li2)\n\tassert.Zero(t, len(result.UnLikes))\n\tassert.Zero(t, len(result.LikePrefixs))\n\tassert.Zero(t, len(result.Likes))\n\tassert.Zero(t, len(result.ExclusiveLikes))\n\t//assert.Equal(t, []string{\"org-\"}, result.UnLikePrefixs)\n}", "title": "" }, { "docid": "ced4add8465bb35b33efc5b6be82d30d", "score": "0.4624224", "text": "func cleanCluster(logger log.Logger, k8sClient *kubernetes.Clientset) error {\n\tlevel.Info(logger).Log(\"msg\", \"Cleaning cluster\")\n\tlistOpts := metav1.ListOptions{\n\t\tLabelSelector: \"group=loadtest\",\n\t}\n\n\t// Delete namespaces (and everything in them).\n\tnss, err := k8sClient.CoreV1().Namespaces().List(listOpts)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to list namespaces\")\n\t}\n\n\tfor _, ns := range nss.Items {\n\t\tlevel.Info(logger).Log(\"msg\", \"Deleting namespace\", \"name\", ns.Name)\n\t\tif err := k8sClient.CoreV1().Namespaces().Delete(ns.Name, nil); err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to delete namespace (%s)\", ns.Name)\n\t\t}\n\t}\n\n\t// Delete clusterrolebindings as they don't live in a namespace.\n\tcrbs, err := k8sClient.RbacV1().ClusterRoleBindings().List(listOpts)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to list clusterrolebindings\")\n\t}\n\tfor _, crb := range crbs.Items {\n\t\tlevel.Info(logger).Log(\"msg\", \"Deleting CRB\", \"name\", crb.Name)\n\t\tif err := k8sClient.RbacV1().ClusterRoleBindings().Delete(crb.Name, nil); err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to delete crb (%s)\", crb.Name)\n\t\t}\n\t}\n\n\t// Block until namespaces have finished deleting.\n\tlevel.Info(logger).Log(\"msg\", \"Waiting for namespaces to delete\")\n\tvar totalTime time.Duration\n\tfor {\n\t\tnss, err := k8sClient.CoreV1().Namespaces().List(listOpts)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to list namespaces\")\n\t\t}\n\n\t\t// Return if all namespaces are active.\n\t\tallActive := true\n\t\tfor _, ns := range nss.Items {\n\t\t\tallActive = allActive && ns.Status.Phase == v1.NamespaceActive\n\t\t}\n\n\t\tif allActive {\n\t\t\treturn nil\n\t\t}\n\n\t\ttime.Sleep(cleanupPollInterval)\n\t\ttotalTime += cleanupPollInterval\n\t\tlevel.Info(logger).Log(\"msg\", \"Still deleting\", \"time\", totalTime)\n\t}\n}", "title": "" }, { "docid": "6e2b17b6fed5f4ba9d76de20d01476cb", "score": "0.4618215", "text": "func createManifestsStructure(t *testing.T) (func(), string) {\n\t/*\n\t\tmanifestsRoot\n\t\t|--.operator\n\t\t |--configs\n\t\t\t\t\t|--kustomization.yaml\n\t\t\t |--secrets\n\t\t\t\t\t|--kustomization.yaml\n\t\t\t |--transformers\n\t\t\t\t\t|--kustomization.yaml\n\t\t\t\t\t|--storage-class.yaml\n\t\t\t\t\t|--release-name-template.yaml\n\t*/\n\tdir, _ := ioutil.TempDir(\"\", \"test_manifests\")\n\toprCnfDir := filepath.Join(dir, \".operator\", \"configs\")\n\toprSecDir := filepath.Join(dir, \".operator\", \"secrets\")\n\toprTansDir := filepath.Join(dir, \".operator\", \"transformers\")\n\tos.MkdirAll(oprCnfDir, tempPermissionCode)\n\tos.MkdirAll(oprSecDir, tempPermissionCode)\n\tos.MkdirAll(oprTansDir, tempPermissionCode)\n\n\tk := `\napiVersion: kustomize.config.k8s.io/v1beta1\nkind: Kustomization\nresources:`\n\n\terr := ioutil.WriteFile(filepath.Join(oprCnfDir, \"kustomization.yaml\"), []byte(k), tempPermissionCode)\n\tif err != nil {\n\t\tt.Log(err)\n\t\tos.Exit(1)\n\t}\n\tioutil.WriteFile(filepath.Join(oprSecDir, \"kustomization.yaml\"), []byte(k), tempPermissionCode)\n\tstk := `\napiVersion: kustomize.config.k8s.io/v1beta1\nkind: Kustomization\nresources:\n- storage-class.yaml\n`\n\tscf := `\napiVersion: qlik.com/v1\nkind: SelectivePatch\nenabled: true\npatches:\n- target:\n\t\tname: storageClassName\n\t\tlabelSelector: app=engine\n\tpatch: |-\n\t\t- op: replace\n\t\t\tpath: /enabled\n\t\t\tvalue: false\n- target:\n\t\tname: storageClassName\n\t\tlabelSelector: app=qix-datafiles\n\tpatch: |-\n\t\t- op: replace\n\t\t\tpath: /enabled\n\t\t\tvalue: false`\n\terr = ioutil.WriteFile(filepath.Join(oprTansDir, \"kustomization.yaml\"), []byte(stk), tempPermissionCode)\n\tif err != nil {\n\t\tt.Log(err)\n\t\tos.Exit(1)\n\t}\n\terr = ioutil.WriteFile(filepath.Join(oprTansDir, \"storage-class.yaml\"), []byte(scf), tempPermissionCode)\n\tif err != nil {\n\t\tt.Log(err)\n\t\tos.Exit(1)\n\t}\n\trn := `\napiVersion: qlik.com/v1\nkind: SelectivePatch\nmetadata:\n name: release\nenabled: true\npatches:\n- target:\n name: release\n kind: LabelTransformer\n patch: |-\n apiVersion: builtin\n kind: LabelTransformer\n metadata:\n name: release\n labels:\n release: qliksense`\n\terr = ioutil.WriteFile(filepath.Join(oprTansDir, releaseTemplateFileName), []byte(rn), tempPermissionCode)\n\tif err != nil {\n\t\tt.Log(err)\n\t\tos.Exit(1)\n\t}\n\ttearDown := func() {\n\t\tos.RemoveAll(dir)\n\t}\n\treturn tearDown, dir\n}", "title": "" }, { "docid": "482ef8ac5a4a09cad0ebf394ff0be1b0", "score": "0.46172535", "text": "func (a Autoscaling_v2beta1Api) ListNamespacedHorizontalPodAutoscaler(namespace string, pretty string, continue_ string, fieldSelector string, includeUninitialized bool, labelSelector string, limit int32, resourceVersion string, timeoutSeconds int32, watch bool) (*V2beta1HorizontalPodAutoscalerList, *APIResponse, error) {\n\n\tvar localVarHttpMethod = strings.ToUpper(\"Get\")\n\t// create path and map variables\n\tlocalVarPath := a.Configuration.BasePath + \"/apis/autoscaling/v2beta1/namespaces/{namespace}/horizontalpodautoscalers\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"namespace\"+\"}\", fmt.Sprintf(\"%v\", namespace), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := make(map[string]string)\n\tvar localVarPostBody interface{}\n\tvar localVarFileName string\n\tvar localVarFileBytes []byte\n\t// authentication '(BearerToken)' required\n\t// set key with prefix in header\n\tlocalVarHeaderParams[\"authorization\"] = a.Configuration.GetAPIKeyWithPrefix(\"authorization\")\n\t// add default headers if any\n\tfor key := range a.Configuration.DefaultHeader {\n\t\tlocalVarHeaderParams[key] = a.Configuration.DefaultHeader[key]\n\t}\n\tlocalVarQueryParams.Add(\"pretty\", a.Configuration.APIClient.ParameterToString(pretty, \"\"))\n\tlocalVarQueryParams.Add(\"continue\", a.Configuration.APIClient.ParameterToString(continue_, \"\"))\n\tlocalVarQueryParams.Add(\"fieldSelector\", a.Configuration.APIClient.ParameterToString(fieldSelector, \"\"))\n\tlocalVarQueryParams.Add(\"includeUninitialized\", a.Configuration.APIClient.ParameterToString(includeUninitialized, \"\"))\n\tlocalVarQueryParams.Add(\"labelSelector\", a.Configuration.APIClient.ParameterToString(labelSelector, \"\"))\n\tlocalVarQueryParams.Add(\"limit\", a.Configuration.APIClient.ParameterToString(limit, \"\"))\n\tlocalVarQueryParams.Add(\"resourceVersion\", a.Configuration.APIClient.ParameterToString(resourceVersion, \"\"))\n\tlocalVarQueryParams.Add(\"timeoutSeconds\", a.Configuration.APIClient.ParameterToString(timeoutSeconds, \"\"))\n\tlocalVarQueryParams.Add(\"watch\", a.Configuration.APIClient.ParameterToString(watch, \"\"))\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ \"*/*\", }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"application/json\",\n\t\t\"application/yaml\",\n\t\t\"application/vnd.kubernetes.protobuf\",\n\t\t\"application/json;stream=watch\",\n\t\t\"application/vnd.kubernetes.protobuf;stream=watch\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tvar successPayload = new(V2beta1HorizontalPodAutoscalerList)\n\tlocalVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\n\tvar localVarURL, _ = url.Parse(localVarPath)\n\tlocalVarURL.RawQuery = localVarQueryParams.Encode()\n\tvar localVarAPIResponse = &APIResponse{Operation: \"ListNamespacedHorizontalPodAutoscaler\", Method: localVarHttpMethod, RequestURL: localVarURL.String()}\n\tif localVarHttpResponse != nil {\n\t\tlocalVarAPIResponse.Response = localVarHttpResponse.RawResponse\n\t\tlocalVarAPIResponse.Payload = localVarHttpResponse.Body()\n\t}\n\n\tif err != nil {\n\t\treturn successPayload, localVarAPIResponse, err\n\t}\n\terr = json.Unmarshal(localVarHttpResponse.Body(), &successPayload)\n\treturn successPayload, localVarAPIResponse, err\n}", "title": "" }, { "docid": "8f1e151733c92bb7cf80b674ad00d070", "score": "0.46151823", "text": "func getKubePods(ctx context.Context, pods []*libpod.Pod, options entities.GenerateKubeOptions) ([][]byte, [][]byte, error) {\n\tout := [][]byte{}\n\tsvcs := [][]byte{}\n\n\tfor _, p := range pods {\n\t\tpo, sp, err := p.GenerateForKube(ctx, options.Service, options.UseLongAnnotations, options.PodmanOnly)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\n\t\tswitch options.Type {\n\t\tcase define.K8sKindDeployment:\n\t\t\tdep, err := libpod.GenerateForKubeDeployment(ctx, libpod.ConvertV1PodToYAMLPod(po), options)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tb, err := generateKubeYAML(dep)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tout = append(out, b)\n\t\tcase define.K8sKindPod:\n\t\t\tb, err := generateKubeYAML(libpod.ConvertV1PodToYAMLPod(po))\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tout = append(out, b)\n\t\tdefault:\n\t\t\treturn nil, nil, fmt.Errorf(\"invalid generation type - only pods and deployments are currently supported\")\n\t\t}\n\n\t\tif options.Service {\n\t\t\tsvc, err := libpod.GenerateKubeServiceFromV1Pod(po, sp)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tb, err := generateKubeYAML(svc)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tsvcs = append(svcs, b)\n\t\t}\n\t}\n\n\treturn out, svcs, nil\n}", "title": "" }, { "docid": "2a8bfde61ddedb3ae6dc8dc13aed5b56", "score": "0.46143776", "text": "func storeCompositions_etcd(resourceKind string, resourceName string, compositionTree *[]CompositionTreeNode) {\n\t//fmt.Println(\"Entering storeCompositions_etcd\")\n\tjsonCompositionTree, err := json.Marshal(compositionTree)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tresourceComps := string(jsonCompositionTree)\n\tcfg := client.Config{\n\t\t//Endpoints: []string{\"http://192.168.99.100:32379\"},\n\t\tEndpoints: []string{etcdServiceURL},\n\t\tTransport: client.DefaultTransport,\n\t\t// set timeout per request to fail fast when the target endpoint is unavailable\n\t\t//HeaderTimeoutPerRequest: time.Second,\n\t}\n\t//fmt.Printf(\"%v\\n\", cfg)\n\tc, err := client.New(cfg)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tkapi := client.NewKeysAPI(c)\n\t// set \"/foo\" key with \"bar\" value\n\t//resourceKey := \"/compositions/Deployment/pod42test-deployment\"\n\t//resourceProv := \"{1 ReplicaSet; 2 Pod -1}\"\n\tresourceKey := string(\"/compositions/\" + resourceKind + \"/\" + resourceName)\n\tfmt.Printf(\"Setting %s->%s\\n\", resourceKey, resourceComps)\n\tresp, err := kapi.Set(context.Background(), resourceKey, resourceComps, nil)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t} else {\n\t\t// print common key info\n\t\tlog.Printf(\"Set is done. Metadata is %q\\n\", resp)\n\t}\n\t//fmt.Printf(\"Getting value for %s\\n\", resourceKey)\n\tresp, err = kapi.Get(context.Background(), resourceKey, nil)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t} else {\n\t\t// print common key info\n\t\t//log.Printf(\"Get is done. Metadata is %q\\n\", resp)\n\t\t// print value\n\t\tlog.Printf(\"%q key has %q value\\n\", resp.Node.Key, resp.Node.Value)\n\t}\n\t//fmt.Println(\"Exiting storeCompositions_etcd\")\n}", "title": "" }, { "docid": "c7fc45415ab40099bacffde1efd24c4e", "score": "0.4611115", "text": "func (c *client) ListKubernetesProvidersAndPermissions() ([]kubernetes.Provider, error) {\n\tps := []kubernetes.Provider{}\n\n\trows, err := c.db.Table(\"kubernetes_providers a\").\n\t\tSelect(\"a.name, \" +\n\t\t\t\"a.host, \" +\n\t\t\t\"a.ca_data, \" +\n\t\t\t\"b.read_group, \" +\n\t\t\t\"c.write_group\").\n\t\tJoins(\"LEFT JOIN provider_read_permissions b ON a.name = b.account_name\").\n\t\tJoins(\"LEFT JOIN provider_write_permissions c ON a.name = c.account_name\").\n\t\tRows()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer rows.Close()\n\n\tproviders := map[string]kubernetes.Provider{}\n\treadGroups := map[string][]string{}\n\twriteGroups := map[string][]string{}\n\n\tfor rows.Next() {\n\t\tvar r struct {\n\t\t\tCAData string\n\t\t\tHost string\n\t\t\tName string\n\t\t\tReadGroup *string\n\t\t\tWriteGroup *string\n\t\t}\n\n\t\terr = rows.Scan(&r.Name, &r.Host, &r.CAData, &r.ReadGroup, &r.WriteGroup)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif _, ok := providers[r.Name]; !ok {\n\t\t\tp := kubernetes.Provider{\n\t\t\t\tName: r.Name,\n\t\t\t\tHost: r.Host,\n\t\t\t\tCAData: r.CAData,\n\t\t\t}\n\t\t\tproviders[r.Name] = p\n\t\t}\n\n\t\tif r.ReadGroup != nil {\n\t\t\tif _, ok := readGroups[r.Name]; !ok {\n\t\t\t\treadGroups[r.Name] = []string{}\n\t\t\t}\n\n\t\t\tif !contains(readGroups[r.Name], *r.ReadGroup) {\n\t\t\t\treadGroups[r.Name] = append(readGroups[r.Name], *r.ReadGroup)\n\t\t\t}\n\t\t}\n\n\t\tif r.WriteGroup != nil {\n\t\t\tif _, ok := writeGroups[r.Name]; !ok {\n\t\t\t\twriteGroups[r.Name] = []string{}\n\t\t\t}\n\n\t\t\tif !contains(writeGroups[r.Name], *r.WriteGroup) {\n\t\t\t\twriteGroups[r.Name] = append(writeGroups[r.Name], *r.WriteGroup)\n\t\t\t}\n\t\t}\n\t}\n\n\tfor name, provider := range providers {\n\t\tprovider.Permissions.Read = readGroups[name]\n\t\tprovider.Permissions.Write = writeGroups[name]\n\t\tps = append(ps, provider)\n\t}\n\n\t// Sort ascending by name.\n\tsort.Slice(ps, func(i, j int) bool {\n\t\treturn ps[i].Name < ps[j].Name\n\t})\n\n\treturn ps, nil\n}", "title": "" }, { "docid": "9acc416a3a666ba0ed3eb23f220877ee", "score": "0.46103537", "text": "func parseGroupSubdirs(apisDir string, strictVersionMatch bool) (map[string][]string, error) {\n\tgvs := make(map[string][]string)\n\tgroups, err := ioutil.ReadDir(apisDir)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error reading directory %q to find API groups: %v\", apisDir, err)\n\t}\n\n\tfor _, g := range groups {\n\t\tif g.IsDir() {\n\t\t\tgroupDir := filepath.Join(apisDir, g.Name())\n\t\t\tversions, err := ioutil.ReadDir(groupDir)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"error reading directory %q to find API versions: %v\", groupDir, err)\n\t\t\t}\n\n\t\t\tgvs[g.Name()] = make([]string, 0)\n\t\t\tfor _, v := range versions {\n\t\t\t\tif v.IsDir() {\n\t\t\t\t\t// Ignore directories that do not contain any files, so generators\n\t\t\t\t\t// do not get empty directories as arguments.\n\t\t\t\t\tverDir := filepath.Join(groupDir, v.Name())\n\t\t\t\t\tfiles, err := ioutil.ReadDir(verDir)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"error reading directory %q to find API source files: %v\", verDir, err)\n\t\t\t\t\t}\n\t\t\t\t\tfor _, f := range files {\n\t\t\t\t\t\tif !f.IsDir() && filepath.Ext(f.Name()) == \".go\" {\n\t\t\t\t\t\t\t// If strictVersionMatch is true, strictly check if v.Name()\n\t\t\t\t\t\t\t// is a Kubernetes API version.\n\t\t\t\t\t\t\tif !strictVersionMatch || versionRegexp.MatchString(v.Name()) {\n\t\t\t\t\t\t\t\tgvs[g.Name()] = append(gvs[g.Name()], v.Name())\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(gvs) == 0 {\n\t\treturn nil, fmt.Errorf(\"no groups or versions found in %s\", apisDir)\n\t}\n\treturn gvs, nil\n}", "title": "" }, { "docid": "e7073816b63417b3ccd1a42b6ab91f7f", "score": "0.4608321", "text": "func createKubeconfig(clusterName string, username string, clusterControlPlaceAddress string, caBasebase64 string, crtBase64 string, privateKeyBase64 string) (kubeconfigYAML string) {\n\tkubeconfigYAML = fmt.Sprintf(`apiVersion: v1\nkind: Config\ncurrent-context: %s\nclusters:\n - name: %s\n cluster:\n server: %s\n certificate-authority-data: %s\ncontexts:\n - context:\n cluster: %s\n user: %s\n name: %s\nusers:\n - name: %s\n user:\n client-certificate-data: %s\n client-key-data: %s`,\n\t\tclusterName, clusterName, clusterControlPlaceAddress, caBasebase64, clusterName, username, clusterName, username, crtBase64, privateKeyBase64)\n\n\treturn kubeconfigYAML\n}", "title": "" }, { "docid": "6ac9caeb9c7754f890472789c59b694f", "score": "0.46079522", "text": "func ResourceGroup() *schema.Resource {\n\treadContext := func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {\n\t\tgroup, err := NewGroupsAPI(ctx, m).Read(d.Id())\n\t\tif err != nil {\n\t\t\tif e, ok := err.(common.APIError); ok && e.IsMissing() {\n\t\t\t\tlog.Printf(\"missing resource due to error: %v\\n\", e)\n\t\t\t\td.SetId(\"\")\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\treturn diag.FromErr(err)\n\t\t}\n\t\tif err = d.Set(\"display_name\", group.DisplayName); err != nil {\n\t\t\treturn diag.FromErr(err)\n\t\t}\n\t\tif err = d.Set(\"allow_cluster_create\", isGroupClusterCreateEntitled(&group)); err != nil {\n\t\t\treturn diag.FromErr(err)\n\t\t}\n\t\tif err = d.Set(\"allow_sql_analytics_access\", isGroupSQLAnalyticsAccessEntitled(&group)); err != nil {\n\t\t\treturn diag.FromErr(err)\n\t\t}\n\t\tif err = d.Set(\"allow_instance_pool_create\", isGroupInstancePoolCreateEntitled(&group)); err != nil {\n\t\t\treturn diag.FromErr(err)\n\t\t}\n\t\td.Set(\"url\", m.(*common.DatabricksClient).FormatURL(\"#setting/accounts/groups/\", d.Id()))\n\t\treturn nil\n\t}\n\treturn &schema.Resource{\n\t\tCreateContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {\n\t\t\tgroupName := d.Get(\"display_name\").(string)\n\t\t\tallowClusterCreate := d.Get(\"allow_cluster_create\").(bool)\n\t\t\tallowInstancePoolCreate := d.Get(\"allow_instance_pool_create\").(bool)\n\t\t\tallowSQLAnalyticsAccess := d.Get(\"allow_sql_analytics_access\").(bool)\n\n\t\t\t// If entitlement flags are set to be true\n\t\t\tvar entitlementsList []string\n\t\t\tif allowClusterCreate {\n\t\t\t\tentitlementsList = append(entitlementsList, string(AllowClusterCreateEntitlement))\n\t\t\t}\n\t\t\tif allowSQLAnalyticsAccess {\n\t\t\t\tentitlementsList = append(entitlementsList, string(AllowSQLAnalyticsAccessEntitlement))\n\t\t\t}\n\t\t\tif allowInstancePoolCreate {\n\t\t\t\tentitlementsList = append(entitlementsList, string(AllowInstancePoolCreateEntitlement))\n\t\t\t}\n\t\t\tgroup, err := NewGroupsAPI(ctx, m).Create(groupName, nil, nil, entitlementsList)\n\t\t\tif err != nil {\n\t\t\t\treturn diag.FromErr(err)\n\t\t\t}\n\t\t\td.SetId(group.ID)\n\t\t\treturn readContext(ctx, d, m)\n\t\t},\n\t\tUpdateContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {\n\t\t\t// Handle entitlements update\n\t\t\tvar entitlementsAddList []string\n\t\t\tvar entitlementsRemoveList []string\n\t\t\t// If allow_cluster_create has changed\n\t\t\tif d.HasChange(\"allow_cluster_create\") {\n\t\t\t\tallowClusterCreate := d.Get(\"allow_cluster_create\").(bool)\n\t\t\t\t// Changed to true\n\t\t\t\tif allowClusterCreate {\n\t\t\t\t\tentitlementsAddList = append(entitlementsAddList, string(AllowClusterCreateEntitlement))\n\t\t\t\t} else {\n\t\t\t\t\t// Changed to false\n\t\t\t\t\tentitlementsRemoveList = append(entitlementsRemoveList, string(AllowClusterCreateEntitlement))\n\t\t\t\t}\n\t\t\t}\n\t\t\t// If allow_sql_analytics_access has changed\n\t\t\tif d.HasChange(\"allow_sql_analytics_access\") {\n\t\t\t\tallowSQLAnalyticsAccess := d.Get(\"allow_sql_analytics_access\").(bool)\n\t\t\t\t// Changed to true\n\t\t\t\tif allowSQLAnalyticsAccess {\n\t\t\t\t\tentitlementsAddList = append(entitlementsAddList, string(AllowSQLAnalyticsAccessEntitlement))\n\t\t\t\t} else {\n\t\t\t\t\t// Changed to false\n\t\t\t\t\tentitlementsRemoveList = append(entitlementsRemoveList, string(AllowSQLAnalyticsAccessEntitlement))\n\t\t\t\t}\n\t\t\t}\n\t\t\t// If allow_instance_pool_create has changed\n\t\t\tif d.HasChange(\"allow_instance_pool_create\") {\n\t\t\t\tallowClusterCreate := d.Get(\"allow_instance_pool_create\").(bool)\n\t\t\t\t// Changed to true\n\t\t\t\tif allowClusterCreate {\n\t\t\t\t\tentitlementsAddList = append(entitlementsAddList, string(AllowInstancePoolCreateEntitlement))\n\t\t\t\t} else {\n\t\t\t\t\t// Changed to false\n\t\t\t\t\tentitlementsRemoveList = append(entitlementsRemoveList, string(AllowInstancePoolCreateEntitlement))\n\t\t\t\t}\n\t\t\t}\n\t\t\t// TODO: not currently possible to update group display name\n\t\t\tif entitlementsAddList != nil || entitlementsRemoveList != nil {\n\t\t\t\tif err := NewGroupsAPI(ctx, m).Patch(d.Id(),\n\t\t\t\t\tentitlementsAddList, entitlementsRemoveList,\n\t\t\t\t\tGroupEntitlementsPath); err != nil {\n\t\t\t\t\treturn diag.FromErr(err)\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t\tReadContext: readContext,\n\t\tDeleteContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {\n\t\t\tif err := NewGroupsAPI(ctx, m).Delete(d.Id()); err != nil {\n\t\t\t\treturn diag.FromErr(err)\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t\tImporter: &schema.ResourceImporter{\n\t\t\tStateContext: schema.ImportStatePassthroughContext,\n\t\t},\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"display_name\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tForceNew: true,\n\t\t\t\tRequired: true,\n\t\t\t},\n\t\t\t\"allow_cluster_create\": {\n\t\t\t\tType: schema.TypeBool,\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t\t\"allow_sql_analytics_access\": {\n\t\t\t\tType: schema.TypeBool,\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t\t\"allow_instance_pool_create\": {\n\t\t\t\tType: schema.TypeBool,\n\t\t\t\tOptional: true,\n\t\t\t},\n\t\t\t\"url\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tComputed: true,\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "717cfddf67db35ccb231d2c9aac3dbf3", "score": "0.4606197", "text": "func Name() string { return \"kubernetai\" }", "title": "" }, { "docid": "425256b7856d58bd62393e828993eed5", "score": "0.45837146", "text": "func GenerateRBACClusterRole(resourceName string) (*rbacv1.ClusterRole, error) {\n\n\tgroupName, err := getGroupName(resourceName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tverbs, err := generateVerbsForGroup(groupName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclusterRole := &rbacv1.ClusterRole{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: resourceName,\n\t\t},\n\t\tRules: []rbacv1.PolicyRule{\n\t\t\t{\n\t\t\t\tAPIGroups: []string{clusterPolicyAPIGroup},\n\t\t\t\tResources: []string{machinedeployments, machinesets, machines},\n\t\t\t\tVerbs: verbs,\n\t\t\t},\n\t\t\t{\n\t\t\t\tAPIGroups: []string{\"\"},\n\t\t\t\tResources: []string{\"configmaps\",\n\t\t\t\t\t\"endpoints\",\n\t\t\t\t\t\"persistentvolumeclaims\",\n\t\t\t\t\t\"pods\",\n\t\t\t\t\t\"replicationcontrollers\",\n\t\t\t\t\t\"replicationcontrollers/scale\",\n\t\t\t\t\t\"serviceaccounts\",\n\t\t\t\t\t\"services\",\n\t\t\t\t\t\"nodes\",\n\t\t\t\t\t\"namespaces\",\n\t\t\t\t},\n\t\t\t\tVerbs: verbs,\n\t\t\t},\n\t\t\t{\n\t\t\t\tAPIGroups: []string{\"\"},\n\t\t\t\tResources: []string{\"bindings\",\n\t\t\t\t\t\"events\",\n\t\t\t\t\t\"limitranges\",\n\t\t\t\t\t\"namespaces/status\",\n\t\t\t\t\t\"pods/log\",\n\t\t\t\t\t\"pods/status\",\n\t\t\t\t\t\"replicationcontrollers/status\",\n\t\t\t\t\t\"resourcequotas\",\n\t\t\t\t\t\"resourcequotas/status\",\n\t\t\t\t},\n\t\t\t\tVerbs: verbs,\n\t\t\t},\n\t\t\t{\n\t\t\t\tAPIGroups: []string{apps.GroupName},\n\t\t\t\tResources: []string{\"controllerrevisions\",\n\t\t\t\t\t\"daemonsets\",\n\t\t\t\t\t\"deployments\",\n\t\t\t\t\t\"deployments/scale\",\n\t\t\t\t\t\"replicasets\",\n\t\t\t\t\t\"replicasets/scale\",\n\t\t\t\t\t\"statefulsets\",\n\t\t\t\t\t\"statefulsets/scale\",\n\t\t\t\t},\n\t\t\t\tVerbs: verbs,\n\t\t\t},\n\t\t\t{\n\t\t\t\tAPIGroups: []string{autoscaling.GroupName},\n\t\t\t\tResources: []string{\"horizontalpodautoscalers\"},\n\t\t\t\tVerbs: verbs,\n\t\t\t},\n\t\t\t{\n\t\t\t\tAPIGroups: []string{batch.GroupName},\n\t\t\t\tResources: []string{\"cronjobs\", \"jobs\"},\n\t\t\t\tVerbs: verbs,\n\t\t\t},\n\t\t\t{\n\t\t\t\tAPIGroups: []string{extensions.GroupName},\n\t\t\t\tResources: []string{\"daemonsets\",\n\t\t\t\t\t\"deployments\",\n\t\t\t\t\t\"deployments/scale\",\n\t\t\t\t\t\"ingresses\",\n\t\t\t\t\t\"networkpolicies\",\n\t\t\t\t\t\"replicasets\",\n\t\t\t\t\t\"replicasets/scale\",\n\t\t\t\t\t\"replicationcontrollers/scale\",\n\t\t\t\t},\n\t\t\t\tVerbs: verbs,\n\t\t\t},\n\t\t\t{\n\t\t\t\tAPIGroups: []string{\"networking.k8s.io\"},\n\t\t\t\tResources: []string{\"ingresses\", \"networkpolicies\"},\n\t\t\t\tVerbs: verbs,\n\t\t\t},\n\t\t},\n\t}\n\tif groupName == rbac.OwnerGroupNamePrefix || groupName == rbac.EditorGroupNamePrefix {\n\t\tclusterRole.Rules = []rbacv1.PolicyRule{\n\t\t\t{\n\t\t\t\tAPIGroups: []string{\"*\"},\n\t\t\t\tResources: []string{\"*\"},\n\t\t\t\tVerbs: verbs,\n\t\t\t}}\n\t}\n\treturn clusterRole, nil\n}", "title": "" }, { "docid": "70aa3734606f60f47f3b75088577dcdc", "score": "0.45766038", "text": "func (c *operatorLogLevelNormalizer) sync(ctx context.Context, syncCtx factory.SyncContext) error {\n\tgvrs := []schema.GroupVersionResource{\n\t\t{\n\t\t\tGroup: \"imageregistry.operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"configs\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"configs\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"etcds\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"kubeapiservers\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"kubecontrollermanagers\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"kubeschedulers\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"openshiftapiservers\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"cloudcredentials\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"kubestorageversionmigrators\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"authentications\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"openshiftcontrollermanagers\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"storages\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"networks\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"consoles\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"csisnapshotcontrollers\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"clustercsidrivers\",\n\t\t},\n\t\t{\n\t\t\tGroup: \"operator.openshift.io\",\n\t\t\tVersion: \"v1\",\n\t\t\tResource: \"servicecas\",\n\t\t},\n\t}\n\n\tfor _, gvr := range gvrs {\n\t\tcustomResources, err := c.dynamicClient.Resource(gvr).List(ctx, metav1.ListOptions{})\n\t\tif err != nil {\n\t\t\tklog.V(4).Infof(\"error trying to list custom resources for %s: %v\", gvr.Resource, err)\n\t\t\tcontinue\n\t\t}\n\n\t\tfor _, cr := range customResources.Items {\n\t\t\tcrCopy := cr.DeepCopy()\n\t\t\teventMsgs, needsUpdate := normalizeLogLevelField(crCopy, gvr.Resource)\n\t\t\tif needsUpdate {\n\t\t\t\tif _, err := c.dynamicClient.Resource(gvr).Update(ctx, crCopy, metav1.UpdateOptions{}); err != nil {\n\t\t\t\t\tklog.Warningf(\"failed to normalize log level to %v for operator %s: %v\", operatorv1.Normal, gvr.Resource, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tfor _, event := range eventMsgs {\n\t\t\t\t\tsyncCtx.Recorder().Event(\"OperatorLogLevelChange\", event)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "b5caaef09651450de2b3b803806ecf3e", "score": "0.45695", "text": "func (mh *Memcached) uncommentManifestsKustomizationv4() {\n\tvar err error\n\tkustomization := filepath.Join(mh.ctx.Dir, \"config\", \"manifests\", \"kustomization.yaml\")\n\tlog.Info(\"uncommenting config/manifests/kustomization.yaml to enable webhooks in OLM\")\n\n\terr = kbutil.UncommentCode(kustomization,\n\t\t`#patchesJson6902:\n#- target:\n# group: apps\n# version: v1\n# kind: Deployment\n# name: controller-manager\n# namespace: system\n# patch: |-\n# # Remove the manager container's \"cert\" volumeMount, since OLM will create and mount a set of certs.\n# # Update the indices in this path if adding or removing containers/volumeMounts in the manager's Deployment.\n# - op: remove\n\n# path: /spec/template/spec/containers/0/volumeMounts/0\n# # Remove the \"cert\" volume, since OLM will create and mount a set of certs.\n# # Update the indices in this path if adding or removing volumes in the manager's Deployment.\n# - op: remove\n# path: /spec/template/spec/volumes/0`, \"#\")\n\tpkg.CheckError(\"uncommented webhook volume removal patch\", err)\n}", "title": "" }, { "docid": "b48ae92b403f26cb217d37857f2fc780", "score": "0.45693237", "text": "func main() {\n\tdriverName := flag.String(\"driver-name\", \"\", \"driver name\")\n\tdriverSecret := flag.String(\"driver-secret\", \"\", \"driver secret yaml file\")\n\tdriverConfig := flag.String(\"driver-config\", \"\", \"driver config yaml file\")\n\tenableLeaderElection := flag.Bool(\"leader-election\", false, \"boolean to enable leader election\")\n\tleaderElectionNamespace := flag.String(\"leader-election-namespace\", \"\", \"namespace where leader election lease will be created\")\n\n\tflag.Parse()\n\n\tif *driverName == \"\" {\n\t\tfmt.Fprintf(os.Stderr, \"driver-name argument is mandatory\")\n\t\tos.Exit(1)\n\t}\n\tservice.Name = *driverName\n\n\tif *driverSecret == \"\" {\n\t\tfmt.Fprintf(os.Stderr, \"driver-secret argument is mandatory\")\n\t\tos.Exit(1)\n\t}\n\tservice.DriverSecret = *driverSecret\n\n\tif *driverConfig == \"\" {\n\t\tfmt.Fprintf(os.Stderr, \"driver-config argument is mandatory\")\n\t\tos.Exit(1)\n\t}\n\tservice.DriverConfig = *driverConfig\n\n\t// Always set X_CSI_DEBUG to false irrespective of what user has specified\n\t_ = os.Setenv(gocsi.EnvVarDebug, \"false\")\n\t// We always want to enable Request and Response logging (no reason for users to control this)\n\t_ = os.Setenv(gocsi.EnvVarReqLogging, \"true\")\n\t_ = os.Setenv(gocsi.EnvVarRepLogging, \"true\")\n\n\tkubeconfig := flag.String(\"kubeconfig\", \"\", \"absolute path to the kubeconfig file\")\n\tflag.Parse()\n\trun := func(ctx context.Context) {\n\t\tgocsi.Run(ctx, service.Name, \"A Unity Container Storage Interface (CSI) Plugin\",\n\t\t\tusage, provider.New())\n\t}\n\tif !*enableLeaderElection {\n\t\trun(context.TODO())\n\t} else {\n\t\tdriverName := strings.Replace(service.Name, \".\", \"-\", -1)\n\t\tlockName := fmt.Sprintf(\"driver-%s\", driverName)\n\t\tk8sclientset, err := k8sutils.CreateKubeClientSet(*kubeconfig)\n\t\tif err != nil {\n\t\t\t_, _ = fmt.Fprintf(os.Stderr, \"failed to initialize leader election: %v\", err)\n\t\t\tos.Exit(1)\n\t\t}\n\t\t// Attempt to become leader and start the driver\n\t\tk8sutils.LeaderElection(k8sclientset, lockName, *leaderElectionNamespace, run)\n\t}\n}", "title": "" }, { "docid": "f87c024aa5d595dff3e7d5874bf3e3c3", "score": "0.45664474", "text": "func resourceVolterraContainerRegistry() *schema.Resource {\n\treturn &schema.Resource{\n\t\tCreate: resourceVolterraContainerRegistryCreate,\n\t\tRead: resourceVolterraContainerRegistryRead,\n\t\tUpdate: resourceVolterraContainerRegistryUpdate,\n\t\tDelete: resourceVolterraContainerRegistryDelete,\n\n\t\tSchema: map[string]*schema.Schema{\n\n\t\t\t\"annotations\": {\n\t\t\t\tType: schema.TypeMap,\n\t\t\t\tOptional: true,\n\t\t\t},\n\n\t\t\t\"description\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t},\n\n\t\t\t\"disable\": {\n\t\t\t\tType: schema.TypeBool,\n\t\t\t\tOptional: true,\n\t\t\t},\n\n\t\t\t\"labels\": {\n\t\t\t\tType: schema.TypeMap,\n\t\t\t\tOptional: true,\n\t\t\t},\n\n\t\t\t\"name\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tForceNew: true,\n\t\t\t},\n\n\t\t\t\"namespace\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tForceNew: true,\n\t\t\t},\n\n\t\t\t\"email\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tOptional: true,\n\t\t\t},\n\n\t\t\t\"password\": {\n\n\t\t\t\tType: schema.TypeSet,\n\t\t\t\tOptional: true,\n\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\tSchema: map[string]*schema.Schema{\n\n\t\t\t\t\t\t\"blindfold_secret_info_internal\": {\n\n\t\t\t\t\t\t\tType: schema.TypeSet,\n\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\n\t\t\t\t\t\t\t\t\t\"decryption_provider\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"location\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"store_provider\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\"secret_encoding_type\": {\n\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\"blindfold_secret_info\": {\n\n\t\t\t\t\t\t\tType: schema.TypeSet,\n\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\n\t\t\t\t\t\t\t\t\t\"decryption_provider\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"location\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"store_provider\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\"clear_secret_info\": {\n\n\t\t\t\t\t\t\tType: schema.TypeSet,\n\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\n\t\t\t\t\t\t\t\t\t\"provider\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"url\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\"vault_secret_info\": {\n\n\t\t\t\t\t\t\tType: schema.TypeSet,\n\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\n\t\t\t\t\t\t\t\t\t\"key\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"location\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"provider\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"secret_encoding\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t\t\"version\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeInt,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\"wingman_secret_info\": {\n\n\t\t\t\t\t\t\tType: schema.TypeSet,\n\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\tElem: &schema.Resource{\n\t\t\t\t\t\t\t\tSchema: map[string]*schema.Schema{\n\n\t\t\t\t\t\t\t\t\t\"name\": {\n\t\t\t\t\t\t\t\t\t\tType: schema.TypeString,\n\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\n\t\t\t\"registry\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t},\n\n\t\t\t\"user_name\": {\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t},\n\t\t},\n\t}\n}", "title": "" }, { "docid": "dbfe213f7a040d871db30d6be82bd162", "score": "0.4563552", "text": "func ValidateList(client *unversioned.Client, extClient *unversioned.ExtensionsClient, list *api.NamespaceList, config *utils.Config) (*api.NamespaceList, error) {\n for _, ns := range list.Items {\n if !config.IsExcluded(ns.Name) && !config.InIgnoreSelector(ns.GetLabels()) {\n err := ValidateNamespace(client, extClient, &ns, config)\n if err != nil {\n return nil, err\n }\n }\n }\n\n list, err := client.Namespaces().List(api.ListOptions{\n LabelSelector: config.LabelSelector,\n })\n if err != nil {\n return nil, err\n }\n\n return list, nil\n}", "title": "" }, { "docid": "d55363c3e56c31572ceb5b3b3e239066", "score": "0.45536485", "text": "func listPods(host, namespace string) (pods []string, err error) {\n\tresp, err := http.Get(fmt.Sprintf(\"http://%s/api/v1/namespaces/%s/pods\", host, namespace))\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer resp.Body.Close()\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn\n\t}\n\tvar kresp kubeGetResp\n\tjson.Unmarshal(body, &kresp)\n\tfor _, item := range kresp.Items {\n\t\tpods = append(pods, item.Metadata.Name)\n\t}\n\treturn\n}", "title": "" }, { "docid": "6d71886e884fec45a5ff01d5e165df1d", "score": "0.45535147", "text": "func compGetResourceList(restClientGetter genericclioptions.RESTClientGetter, cmd *cobra.Command, toComplete string) []string {\n\tbuf := new(bytes.Buffer)\n\tstreams := genericiooptions.IOStreams{In: os.Stdin, Out: buf, ErrOut: io.Discard}\n\to := apiresources.NewAPIResourceOptions(streams)\n\n\to.Complete(restClientGetter, cmd, nil)\n\n\t// Get the list of resources\n\to.Output = \"name\"\n\to.Cached = true\n\to.Verbs = []string{\"get\"}\n\t// TODO:Should set --request-timeout=5s\n\n\t// Ignore errors as the output may still be valid\n\to.RunAPIResources()\n\n\t// Resources can be a comma-separated list. The last element is then\n\t// the one we should complete. For example if toComplete==\"pods,secre\"\n\t// we should return \"pods,secrets\"\n\tprefix := \"\"\n\tsuffix := toComplete\n\tlastIdx := strings.LastIndex(toComplete, \",\")\n\tif lastIdx != -1 {\n\t\tprefix = toComplete[0 : lastIdx+1]\n\t\tsuffix = toComplete[lastIdx+1:]\n\t}\n\tvar comps []string\n\tresources := strings.Split(buf.String(), \"\\n\")\n\tfor _, res := range resources {\n\t\tif res != \"\" && strings.HasPrefix(res, suffix) {\n\t\t\tcomps = append(comps, fmt.Sprintf(\"%s%s\", prefix, res))\n\t\t}\n\t}\n\treturn comps\n}", "title": "" }, { "docid": "71d5a5a693103025b3a7c73b01a0a58f", "score": "0.4548361", "text": "func resourceVolterraContainerRegistryUpdate(d *schema.ResourceData, meta interface{}) error {\n\tclient := meta.(*APIClient)\n\n\tupdateMeta := &ves_io_schema.ObjectReplaceMetaType{}\n\tupdateSpec := &ves_io_schema_container_registry.ReplaceSpecType{}\n\tupdateReq := &ves_io_schema_container_registry.ReplaceRequest{\n\t\tMetadata: updateMeta,\n\t\tSpec: updateSpec,\n\t}\n\tif v, ok := d.GetOk(\"annotations\"); ok && !isIntfNil(v) {\n\n\t\tms := map[string]string{}\n\n\t\tfor k, v := range v.(map[string]interface{}) {\n\t\t\tval := v.(string)\n\t\t\tms[k] = val\n\t\t}\n\t\tupdateMeta.Annotations = ms\n\t}\n\n\tif v, ok := d.GetOk(\"description\"); ok && !isIntfNil(v) {\n\t\tupdateMeta.Description =\n\t\t\tv.(string)\n\t}\n\n\tif v, ok := d.GetOk(\"disable\"); ok && !isIntfNil(v) {\n\t\tupdateMeta.Disable =\n\t\t\tv.(bool)\n\t}\n\n\tif v, ok := d.GetOk(\"labels\"); ok && !isIntfNil(v) {\n\n\t\tms := map[string]string{}\n\n\t\tfor k, v := range v.(map[string]interface{}) {\n\t\t\tval := v.(string)\n\t\t\tms[k] = val\n\t\t}\n\t\tupdateMeta.Labels = ms\n\t}\n\n\tif v, ok := d.GetOk(\"name\"); ok && !isIntfNil(v) {\n\t\tupdateMeta.Name =\n\t\t\tv.(string)\n\t}\n\n\tif v, ok := d.GetOk(\"namespace\"); ok && !isIntfNil(v) {\n\t\tupdateMeta.Namespace =\n\t\t\tv.(string)\n\t}\n\n\tif v, ok := d.GetOk(\"email\"); ok && !isIntfNil(v) {\n\n\t\tupdateSpec.Email =\n\t\t\tv.(string)\n\n\t}\n\n\tif v, ok := d.GetOk(\"password\"); ok && !isIntfNil(v) {\n\n\t\tsl := v.(*schema.Set).List()\n\t\tpassword := &ves_io_schema.SecretType{}\n\t\tupdateSpec.Password = password\n\t\tfor _, set := range sl {\n\t\t\tpasswordMapStrToI := set.(map[string]interface{})\n\n\t\t\tif v, ok := passwordMapStrToI[\"blindfold_secret_info_internal\"]; ok && !isIntfNil(v) {\n\n\t\t\t\tsl := v.(*schema.Set).List()\n\t\t\t\tblindfoldSecretInfoInternal := &ves_io_schema.BlindfoldSecretInfoType{}\n\t\t\t\tpassword.BlindfoldSecretInfoInternal = blindfoldSecretInfoInternal\n\t\t\t\tfor _, set := range sl {\n\t\t\t\t\tblindfoldSecretInfoInternalMapStrToI := set.(map[string]interface{})\n\n\t\t\t\t\tif w, ok := blindfoldSecretInfoInternalMapStrToI[\"decryption_provider\"]; ok && !isIntfNil(w) {\n\t\t\t\t\t\tblindfoldSecretInfoInternal.DecryptionProvider = w.(string)\n\t\t\t\t\t}\n\n\t\t\t\t\tif w, ok := blindfoldSecretInfoInternalMapStrToI[\"location\"]; ok && !isIntfNil(w) {\n\t\t\t\t\t\tblindfoldSecretInfoInternal.Location = w.(string)\n\t\t\t\t\t}\n\n\t\t\t\t\tif w, ok := blindfoldSecretInfoInternalMapStrToI[\"store_provider\"]; ok && !isIntfNil(w) {\n\t\t\t\t\t\tblindfoldSecretInfoInternal.StoreProvider = w.(string)\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif v, ok := passwordMapStrToI[\"secret_encoding_type\"]; ok && !isIntfNil(v) {\n\n\t\t\t\tpassword.SecretEncodingType = ves_io_schema.SecretEncodingType(ves_io_schema.SecretEncodingType_value[v.(string)])\n\n\t\t\t}\n\n\t\t\tsecretInfoOneofTypeFound := false\n\n\t\t\tif v, ok := passwordMapStrToI[\"blindfold_secret_info\"]; ok && !isIntfNil(v) && !secretInfoOneofTypeFound {\n\n\t\t\t\tsecretInfoOneofTypeFound = true\n\t\t\t\tsecretInfoOneofInt := &ves_io_schema.SecretType_BlindfoldSecretInfo{}\n\t\t\t\tsecretInfoOneofInt.BlindfoldSecretInfo = &ves_io_schema.BlindfoldSecretInfoType{}\n\t\t\t\tpassword.SecretInfoOneof = secretInfoOneofInt\n\n\t\t\t\tsl := v.(*schema.Set).List()\n\t\t\t\tfor _, set := range sl {\n\t\t\t\t\tcs := set.(map[string]interface{})\n\n\t\t\t\t\tif v, ok := cs[\"decryption_provider\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.BlindfoldSecretInfo.DecryptionProvider = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif v, ok := cs[\"location\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.BlindfoldSecretInfo.Location = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif v, ok := cs[\"store_provider\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.BlindfoldSecretInfo.StoreProvider = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif v, ok := passwordMapStrToI[\"clear_secret_info\"]; ok && !isIntfNil(v) && !secretInfoOneofTypeFound {\n\n\t\t\t\tsecretInfoOneofTypeFound = true\n\t\t\t\tsecretInfoOneofInt := &ves_io_schema.SecretType_ClearSecretInfo{}\n\t\t\t\tsecretInfoOneofInt.ClearSecretInfo = &ves_io_schema.ClearSecretInfoType{}\n\t\t\t\tpassword.SecretInfoOneof = secretInfoOneofInt\n\n\t\t\t\tsl := v.(*schema.Set).List()\n\t\t\t\tfor _, set := range sl {\n\t\t\t\t\tcs := set.(map[string]interface{})\n\n\t\t\t\t\tif v, ok := cs[\"provider\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.ClearSecretInfo.Provider = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif v, ok := cs[\"url\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.ClearSecretInfo.Url = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif v, ok := passwordMapStrToI[\"vault_secret_info\"]; ok && !isIntfNil(v) && !secretInfoOneofTypeFound {\n\n\t\t\t\tsecretInfoOneofTypeFound = true\n\t\t\t\tsecretInfoOneofInt := &ves_io_schema.SecretType_VaultSecretInfo{}\n\t\t\t\tsecretInfoOneofInt.VaultSecretInfo = &ves_io_schema.VaultSecretInfoType{}\n\t\t\t\tpassword.SecretInfoOneof = secretInfoOneofInt\n\n\t\t\t\tsl := v.(*schema.Set).List()\n\t\t\t\tfor _, set := range sl {\n\t\t\t\t\tcs := set.(map[string]interface{})\n\n\t\t\t\t\tif v, ok := cs[\"key\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.VaultSecretInfo.Key = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif v, ok := cs[\"location\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.VaultSecretInfo.Location = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif v, ok := cs[\"provider\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.VaultSecretInfo.Provider = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif v, ok := cs[\"secret_encoding\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.VaultSecretInfo.SecretEncoding = ves_io_schema.SecretEncodingType(ves_io_schema.SecretEncodingType_value[v.(string)])\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif v, ok := cs[\"version\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.VaultSecretInfo.Version = uint32(v.(int))\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif v, ok := passwordMapStrToI[\"wingman_secret_info\"]; ok && !isIntfNil(v) && !secretInfoOneofTypeFound {\n\n\t\t\t\tsecretInfoOneofTypeFound = true\n\t\t\t\tsecretInfoOneofInt := &ves_io_schema.SecretType_WingmanSecretInfo{}\n\t\t\t\tsecretInfoOneofInt.WingmanSecretInfo = &ves_io_schema.WingmanSecretInfoType{}\n\t\t\t\tpassword.SecretInfoOneof = secretInfoOneofInt\n\n\t\t\t\tsl := v.(*schema.Set).List()\n\t\t\t\tfor _, set := range sl {\n\t\t\t\t\tcs := set.(map[string]interface{})\n\n\t\t\t\t\tif v, ok := cs[\"name\"]; ok && !isIntfNil(v) {\n\n\t\t\t\t\t\tsecretInfoOneofInt.WingmanSecretInfo.Name = v.(string)\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tif v, ok := d.GetOk(\"registry\"); ok && !isIntfNil(v) {\n\n\t\tupdateSpec.Registry =\n\t\t\tv.(string)\n\n\t}\n\n\tif v, ok := d.GetOk(\"user_name\"); ok && !isIntfNil(v) {\n\n\t\tupdateSpec.UserName =\n\t\t\tv.(string)\n\n\t}\n\n\tlog.Printf(\"[DEBUG] Updating Volterra ContainerRegistry obj with struct: %+v\", updateReq)\n\n\terr := client.ReplaceObject(context.Background(), ves_io_schema_container_registry.ObjectType, updateReq)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error updating ContainerRegistry: %s\", err)\n\t}\n\n\treturn resourceVolterraContainerRegistryRead(d, meta)\n}", "title": "" }, { "docid": "36384184987630109d19457898dde5e3", "score": "0.45480505", "text": "func labelsForMultiClusterMonitoring(name string) map[string]string {\n\treturn map[string]string{\"observability.open-cluster-management.io/name\": name}\n}", "title": "" }, { "docid": "36384184987630109d19457898dde5e3", "score": "0.45480505", "text": "func labelsForMultiClusterMonitoring(name string) map[string]string {\n\treturn map[string]string{\"observability.open-cluster-management.io/name\": name}\n}", "title": "" }, { "docid": "64f4c97928fbbe077bfa12a8a3bc2ae6", "score": "0.45456746", "text": "func BuildMandatoryControlPlaneLabels() map[string]string {\n\tnodeLabels := make(map[string]string)\n\tnodeLabels[RoleLabelControlPlane20] = \"\"\n\tnodeLabels[\"kops.k8s.io/kops-controller-pki\"] = \"\"\n\tnodeLabels[\"node.kubernetes.io/exclude-from-external-load-balancers\"] = \"\"\n\treturn nodeLabels\n}", "title": "" }, { "docid": "01832e8bffa791d726e287fb21a9108f", "score": "0.45456502", "text": "func generateKubeOutput(content [][]byte) ([]byte, error) {\n\toutput := make([]byte, 0)\n\n\theader := `# Save the output of this file and use kubectl create -f to import\n# it into Kubernetes.\n#\n# Created with podman-%s\n`\n\tpodmanVersion, err := define.GetVersion()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Add header to kube YAML file.\n\toutput = append(output, []byte(fmt.Sprintf(header, podmanVersion.Version))...)\n\n\t// kube generate order is based on helm install order (secret, persistentVolume, service, pod...).\n\t// Add kube kinds.\n\tfor i, b := range content {\n\t\tif i != 0 {\n\t\t\toutput = append(output, []byte(\"---\\n\")...)\n\t\t}\n\n\t\toutput = append(output, b...)\n\t}\n\n\treturn output, nil\n}", "title": "" }, { "docid": "58e6a2a5f3d1da517d0689dc579351f8", "score": "0.45444047", "text": "func CmdUpgrade(c *cli.Context) {\n\tvar kubeware string\n\tvar localKubePath string\n\tvar err error\n\n\tkubeware = os.Getenv(\"KDEPLOY_KUBEWARE\")\n\tlocalKubePath, err = fetchers.Fetch(kubeware)\n\tif err != nil {\n\t\tlog.Fatal(fmt.Errorf(\"Could not fetch kubeware: '%s' (%v)\", kubeware, err))\n\t}\n\n\tlog.Debugf(\"Going to parse kubeware in %s\", localKubePath)\n\n\tmd := template.ParseMetadata(localKubePath)\n\tutils.CheckError(err)\n\n\tkubernetes, err := webservice.NewKubeClient()\n\tutils.CheckError(err)\n\n\tnamespace := os.Getenv(\"KDEPLOY_NAMESPACE\")\n\t// labelSelector := fmt.Sprintf(\"kubeware=%s,kubeware-version=%s\", md.Name, md.Version)\n\n\t// Check if kubeware already installed, error if it's not\n\tv, err := kubernetes.FindDeployedKubewareVersion(namespace, md.Name)\n\tutils.CheckError(err)\n\tif v == \"\" {\n\t\tlog.Fatalf(\"Kubeware '%s.%s' is not deployed and thus it can't be upgraded\", namespace, md.Name)\n\t}\n\tlog.Infof(\"Found version %s of %s.%s\", v, namespace, md.Name)\n\n\t// Check if equal or newer version already exists, error if so\n\tdeployedVersion, err := version.NewVersion(v)\n\tutils.CheckError(err)\n\tupgradeVersion, err := version.NewVersion(md.Version)\n\tutils.CheckError(err)\n\tif upgradeVersion.LessThan(deployedVersion) {\n\t\tlog.Fatalf(\"Can not upgrade to version '%s' since version '%s' is already deployed\", md.Version, v)\n\t}\n\n\t// build attributes merging \"role list\" to defaults\n\tlog.Debugf(\"Building attributes\")\n\tdefaults, err := md.AttributeDefaults()\n\tutils.CheckError(err)\n\tattributes := template.BuildAttributes(c.String(\"attribute\"), defaults)\n\n\t// get services and parse each one\n\tlog.Debugf(\"Parsing services\")\n\tservicesSpecs, err := md.ParseServices(attributes)\n\tutils.CheckError(err)\n\n\t// get replica controllers and parse each one\n\tlog.Debugf(\"Parsing controllers\")\n\tcontrollersSpecs, err := md.ParseControllers(attributes)\n\tutils.CheckError(err)\n\n\t// upgStrategy := upgradeStrategies.RecreateAllStrategy(kubernetes)\n\t// upgStrategy := upgradeStrategies.RollRcPatchSvcStrategy(kubernetes, 1)\n\tupgStrategy := upgradeStrategies.BuildUpgradeStrategy(os.Getenv(\"KDEPLOY_UPGRADE_STRATEGY\"), kubernetes)\n\tupgStrategy.Upgrade(namespace, servicesSpecs, controllersSpecs)\n\n\tlog.Infof(\"Kubeware '%s.%s' has been upgraded from version '%s' to '%s'\", namespace, md.Name, v, md.Version)\n}", "title": "" }, { "docid": "355ca1ac424a61c841b3a182e1f96c6a", "score": "0.45443225", "text": "func deleteResources(kubeConfig []byte, ns string, logger *logrus.Entry) error {\n\tclient, err := k8sclient.NewClientFromKubeConfig(kubeConfig)\n\tif err != nil {\n\t\treturn err\n\t}\n\tresourceTypes := []struct {\n\t\tDeleteCollectioner interface {\n\t\t\tDeleteCollection(*metav1.DeleteOptions, metav1.ListOptions) error\n\t\t}\n\t\tName string\n\t}{\n\t\t{client.AppsV1().Deployments(ns), \"Deployments\"},\n\t\t{client.AppsV1().DaemonSets(ns), \"DaemonSets\"},\n\t\t{client.AppsV1().StatefulSets(ns), \"StatefulSets\"},\n\t\t{client.AppsV1().ReplicaSets(ns), \"ReplicaSets\"},\n\t\t{client.CoreV1().Pods(ns), \"Pods\"},\n\t\t{client.CoreV1().PersistentVolumeClaims(ns), \"PersistentVolumeClaims\"},\n\t}\n\n\tfor _, resourceType := range resourceTypes {\n\t\terr := retry(func() error {\n\t\t\tlogger.Debugf(\"deleting %s\", resourceType.Name)\n\t\t\terr := resourceType.DeleteCollectioner.DeleteCollection(&metav1.DeleteOptions{}, metav1.ListOptions{})\n\t\t\tif err != nil {\n\t\t\t\tlogger.Infof(\"could not delete %s: %v\", resourceType.Name, err)\n\t\t\t}\n\t\t\treturn err\n\t\t}, 6, 1)\n\t\tif err != nil {\n\t\t\treturn emperror.Wrapf(err, \"could not delete %s\", resourceType.Name)\n\t\t}\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "17604d2014770afd8636fe4285593df4", "score": "0.45440346", "text": "func (o *DevClient) warnAboutK8sComponents(devfileObj parser.DevfileObj) {\n\tvar components []string\n\t// get all standalone k8s components for a given commandGK\n\tk8sComponents, _ := devfile.GetKubernetesComponentsToPush(devfileObj, false)\n\n\tif len(k8sComponents) == 0 {\n\t\treturn\n\t}\n\n\tfor _, comp := range k8sComponents {\n\t\tcomponents = append(components, comp.Name)\n\t}\n\n\tlog.Warningf(\"Kubernetes components are not supported on Podman. Skipping: %v.\", strings.Join(components, \", \"))\n}", "title": "" }, { "docid": "bc5ab0b6b972d750a724b7da9d4bc04f", "score": "0.45428833", "text": "func (kCli K8sClient) watchableGroupVersionResources(ctx context.Context) ([]gvrWithNamespaced, error) {\n\t_, resourceLists, err := kCli.clientSet.Discovery().ServerGroupsAndResources()\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"error getting list of resource types\")\n\t}\n\n\tvar ret []gvrWithNamespaced\n\n\tauthCli := kCli.clientSet.AuthorizationV1()\n\n\tfor _, rl := range resourceLists {\n\t\t// one might think it'd be cleaner to use rl.GroupVersionKind().GroupVersion()\n\t\t// empirically, but that returns an empty `Group` for most resources\n\t\t// (one specific example in case we revisit this: statefulsets)\n\t\trlGV, err := schema.ParseGroupVersion(rl.GroupVersion)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrapf(err, \"error parsing GroupVersion '%s'\", rl.GroupVersion)\n\t\t}\n\t\tfor _, r := range rl.APIResources {\n\t\t\twatchable, err := kCli.isWatchable(ctx, authCli, r, rlGV)\n\t\t\tif err != nil {\n\t\t\t\tlogger.Get(ctx).Infof(\"ERROR setting up watch for '%s.%s': %v\", r.Name, rlGV.String(), err)\n\t\t\t}\n\n\t\t\tif !watchable {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// per comments on r.Group/r.Version: empty implies the value of the containing ResourceList\n\t\t\tgroup := r.Group\n\t\t\tif group == \"\" {\n\t\t\t\tgroup = rlGV.Group\n\t\t\t}\n\t\t\tversion := r.Version\n\t\t\tif version == \"\" {\n\t\t\t\tversion = rlGV.Version\n\t\t\t}\n\t\t\tret = append(ret, gvrWithNamespaced{\n\t\t\t\tGroupVersionResource: schema.GroupVersionResource{\n\t\t\t\t\tGroup: group,\n\t\t\t\t\tVersion: version,\n\t\t\t\t\tResource: r.Name,\n\t\t\t\t},\n\t\t\t\tnamespaced: r.Namespaced,\n\t\t\t})\n\t\t}\n\t}\n\n\treturn ret, nil\n}", "title": "" }, { "docid": "4cb665132d169ff39d77364b1e35d61f", "score": "0.45365182", "text": "func getPods(lbls map[string]string, namespace string) (*v1.PodList, error) {\n\n\t// use the current context in kubeconfig\n\tconfig, err := clientcmd.BuildConfigFromFlags(\"\", *kubeconfig)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\t// create the clientset\n\tclientset, err := kubernetes.NewForConfig(config)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tselector := labels.SelectorFromSet(labels.Set(lbls)).String()\n\treturn clientset.CoreV1().Pods(namespace).List(metav1.ListOptions{\n\t\tLabelSelector: selector,\n\t})\n\n}", "title": "" }, { "docid": "e9fe44d3213f25cdfb76c1bda6f192ab", "score": "0.45348847", "text": "func list(kv *api.KV, k *keysync.APIKeys, args []string) {\n\tif len(args) == 0 {\n\t\tprintHelp(\"list\")\n\t}\n\n\tif _, exists := subCommands[\"list\"][args[0]]; !exists {\n\t\tfmt.Printf(\"pgw-key list %s is invalid\\n\", args[0])\n\t\tprintHelp(\"list\")\n\t}\n\n\tswitch args[0] {\n\tcase \"names\":\n\t\tfor _, v := range k.Keys {\n\t\t\tfmt.Printf(\"%s\\n\", v)\n\t\t}\n\tcase \"all\":\n\t\tfor k, v := range k.Keys {\n\t\t\tfmt.Printf(\"%s: %s\\n\", v, k)\n\t\t}\n\t}\n}", "title": "" }, { "docid": "7b64389dba8b443b4a830fef4cf0324d", "score": "0.45342115", "text": "func generateLabels(object CommonInterface, workerType string) map[string]string {\n\tkind := object.GroupVersionKind().Kind\n\tgroup := object.GroupVersionKind().Group\n\n\tkeyPrefix := strings.ToLower(kind + \".\" + group + \"/\")\n\n\tlabels := make(map[string]string)\n\tlabels[keyPrefix+\"name\"] = object.GetName()\n\tlabels[keyPrefix+\"uid\"] = string(object.GetUID())\n\tif workerType != \"\" {\n\t\tlabels[keyPrefix+\"worker-type\"] = strings.ToLower(workerType)\n\t}\n\treturn labels\n}", "title": "" }, { "docid": "8126d4f999b941c26a6d1e36cacc7ab9", "score": "0.45335758", "text": "func validateCardinality(\n\tcr *kdv1.KubeDirectorCluster,\n\tappCR *kdv1.KubeDirectorApp,\n\tvalErrors []string,\n\tpatches []clusterPatchSpec,\n) ([]string, []clusterPatchSpec) {\n\n\tanyError := false\n\ttotalMembers := int32(0)\n\n\tnumRoles := len(cr.Spec.Roles)\n\trolesPath := field.NewPath(\"spec\", \"roles\")\n\tfor i := 0; i < numRoles; i++ {\n\t\trole := &(cr.Spec.Roles[i])\n\t\tappRole := catalog.GetRoleFromID(appCR, role.Name)\n\t\tif appRole == nil {\n\t\t\t// Do nothing; this error will be reported from validateRoles.\n\t\t\tcontinue\n\t\t}\n\t\tcardinality, isScaleOut := catalog.GetRoleCardinality(appRole)\n\t\tif role.Members != nil {\n\t\t\tvar invalidMemberCount = false\n\t\t\tif isScaleOut {\n\t\t\t\tif *(role.Members) < cardinality {\n\t\t\t\t\tinvalidMemberCount = true\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif *(role.Members) != cardinality {\n\t\t\t\t\tinvalidMemberCount = true\n\t\t\t\t}\n\t\t\t}\n\t\t\tif invalidMemberCount {\n\t\t\t\tanyError = true\n\t\t\t\tvalErrors = append(\n\t\t\t\t\tvalErrors,\n\t\t\t\t\tfmt.Sprintf(\n\t\t\t\t\t\tinvalidCardinality,\n\t\t\t\t\t\trole.Name,\n\t\t\t\t\t\t*(role.Members),\n\t\t\t\t\t\tappRole.Cardinality,\n\t\t\t\t\t),\n\t\t\t\t)\n\t\t\t}\n\t\t} else {\n\t\t\trole.Members = &cardinality\n\t\t\tpatches = append(\n\t\t\t\tpatches,\n\t\t\t\tclusterPatchSpec{\n\t\t\t\t\tOp: \"add\",\n\t\t\t\t\tPath: \"/spec/roles/\" + strconv.Itoa(i) + \"/members\",\n\t\t\t\t\tValue: clusterPatchValue{\n\t\t\t\t\t\tValueInt: role.Members,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t)\n\t\t}\n\n\t\ttotalMembers += *role.Members\n\t\tif totalMembers > maxKDMembers {\n\t\t\tanyError = true\n\t\t\tvalErrors = append(\n\t\t\t\tvalErrors,\n\t\t\t\tfmt.Sprint(\n\t\t\t\t\tmaxMemberLimit,\n\t\t\t\t\tmaxKDMembers,\n\t\t\t\t),\n\t\t\t)\n\t\t\tbreak\n\t\t}\n\n\t\t// validate user-specified labels\n\t\trolePath := rolesPath.Index(i)\n\t\tlabelErrors := appsvalidation.ValidateLabels(\n\t\t\trole.PodLabels,\n\t\t\trolePath.Child(\"podLabels\"),\n\t\t)\n\t\tserviceLabelErrors := appsvalidation.ValidateLabels(\n\t\t\trole.ServiceLabels,\n\t\t\trolePath.Child(\"serviceLabels\"),\n\t\t)\n\t\tif (len(labelErrors) != 0) || (len(serviceLabelErrors) != 0) {\n\t\t\tanyError = true\n\t\t\tfor _, labelErr := range labelErrors {\n\t\t\t\tvalErrors = append(valErrors, labelErr.Error())\n\t\t\t}\n\t\t\tfor _, serviceLabelErr := range serviceLabelErrors {\n\t\t\t\tvalErrors = append(valErrors, serviceLabelErr.Error())\n\t\t\t}\n\t\t}\n\t}\n\n\tif anyError {\n\t\tvar emptyPatchList []clusterPatchSpec\n\t\treturn valErrors, emptyPatchList\n\t}\n\treturn valErrors, patches\n}", "title": "" }, { "docid": "01738f2e723f3649e0b8b8690c83818c", "score": "0.4531694", "text": "func generateKRMManifest(rl *fn.ResourceList) (bool, error) {\n\tvar generatedSecrets fn.KubeObjects\n\n\tfor _, sopsSecretGeneratorManifest := range rl.Items {\n\t\tsecretManifest, err := processSopsSecretGenerator([]byte(sopsSecretGeneratorManifest.String()))\n\t\tif err != nil {\n\t\t\trl.LogResult(err)\n\t\t\treturn false, err\n\t\t}\n\n\t\tsecretKubeObject, err := fn.ParseKubeObject([]byte(secretManifest))\n\t\tif err != nil {\n\t\t\trl.LogResult(err)\n\t\t\treturn false, err\n\t\t}\n\n\t\tgeneratedSecrets = append(generatedSecrets, secretKubeObject)\n\t}\n\n\trl.Items = generatedSecrets\n\n\treturn true, nil\n}", "title": "" }, { "docid": "c2c014031c9db794b41c0f28fc2017e9", "score": "0.4519986", "text": "func GetStatusList(clientset kubernetes.Interface) []byte {\n\ttype nodeStatus struct {\n\t\tNode string `json:\"node\"`\n\t\tReady string `json:\"ready\"`\n\t\tIP string `json:\"ip\"`\n\t\tAge int `json:\"age\"`\n\t\tHardware string `json:\"hardware\"`\n\t\tNamespaces []string `json:\"namespaces\"`\n\t\tCity string `json:\"city\"`\n\t\tState string `json:\"state-iso\"`\n\t\tCountry string `json:\"country-iso\"`\n\t\tContinent string `json:\"continent\"`\n\t\tLon string `json:\"lon\"`\n\t\tLat string `json:\"lat\"`\n\t}\n\n\tnodesRaw, err := clientset.CoreV1().Nodes().List(metav1.ListOptions{})\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\tpanic(err.Error())\n\t}\n\tpodsRaw, err := clientset.CoreV1().Pods(\"\").List(metav1.ListOptions{})\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\tpanic(err.Error())\n\t}\n\tnodesArr := make([]nodeStatus, len(nodesRaw.Items))\n\tfor i, nodeRow := range nodesRaw.Items {\n\t\tnodesArr[i].Node = nodeRow.Name\n\t\tnodesArr[i].City = nodeRow.Labels[\"edge-net.io/city\"]\n\t\tnodesArr[i].State = nodeRow.Labels[\"edge-net.io/state-iso\"]\n\t\tnodesArr[i].Country = nodeRow.Labels[\"edge-net.io/country-iso\"]\n\t\tnodesArr[i].Continent = nodeRow.Labels[\"edge-net.io/continent\"]\n\t\tlonStr := nodeRow.Labels[\"edge-net.io/lon\"]\n\t\tlatStr := nodeRow.Labels[\"edge-net.io/lat\"]\n\t\tif nodeRow.Labels[\"edge-net.io/lon\"] != \"\" && nodeRow.Labels[\"edge-net.io/lat\"] != \"\" {\n\t\t\tlonStr = string(lonStr[1:])\n\t\t\tlatStr = string(latStr[1:])\n\t\t}\n\t\tnodesArr[i].Lon = lonStr\n\t\tnodesArr[i].Lat = latStr\n\t\tfor _, conditionRow := range nodeRow.Status.Conditions {\n\t\t\tif conditionType := conditionRow.Type; conditionType == \"Ready\" {\n\t\t\t\tnodesArr[i].Ready = string(conditionRow.Status)\n\t\t\t}\n\t\t}\n\n\t\tinternalIP, externalIP := GetNodeIPAddresses(nodeRow.DeepCopy())\n\t\tif internalIP != \"\" {\n\t\t\tnodesArr[i].IP = internalIP\n\t\t} else if externalIP != \"\" {\n\t\t\tnodesArr[i].IP = externalIP\n\t\t}\n\n\t\tnodesArr[i].Age = nodeRow.GetCreationTimestamp().Day()\n\t\tnodesArr[i].Hardware = fmt.Sprintf(\"%s %d CPU %dMiB, %s\", nodeRow.Status.NodeInfo.Architecture,\n\t\t\tnodeRow.Status.Capacity.Cpu().Value(), nodeRow.Status.Capacity.Memory().Value()/1048576,\n\t\t\tnodeRow.Status.NodeInfo.OSImage)\n\n\t\tnamespaces := []string{}\n\t\tfor _, podRow := range podsRaw.Items {\n\t\t\tif nodeRow.Name == podRow.Spec.NodeName {\n\t\t\t\tnamespaces = append(namespaces, podRow.GetNamespace())\n\t\t\t}\n\t\t}\n\t\tnodesArr[i].Namespaces = unique(namespaces)\n\t}\n\tnodesJSON, _ := json.Marshal(nodesArr)\n\n\treturn nodesJSON\n}", "title": "" }, { "docid": "0d2abcf48d036717d1169cd0b6e26913", "score": "0.45192343", "text": "func CreateKubeControllersSecrets(ctx context.Context, esAdminUserSecret *corev1.Secret, esAdminUserName string, cli client.Client) (*corev1.Secret, *corev1.Secret, *corev1.Secret, error) {\n\tkubeControllersGatewaySecret, err := utils.GetSecret(ctx, cli, kubecontrollers.ElasticsearchKubeControllersUserSecret, common.OperatorNamespace())\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\tif kubeControllersGatewaySecret == nil {\n\t\tpassword := crypto.GeneratePassword(16)\n\t\tkubeControllersGatewaySecret = &corev1.Secret{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: kubecontrollers.ElasticsearchKubeControllersUserSecret,\n\t\t\t\tNamespace: common.OperatorNamespace(),\n\t\t\t},\n\t\t\tData: map[string][]byte{\n\t\t\t\t\"username\": []byte(kubecontrollers.ElasticsearchKubeControllersUserName),\n\t\t\t\t\"password\": []byte(password),\n\t\t\t},\n\t\t}\n\t}\n\thashedPassword, err := bcrypt.GenerateFromPassword(kubeControllersGatewaySecret.Data[\"password\"], bcrypt.MinCost)\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\n\tkubeControllersVerificationSecret, err := utils.GetSecret(ctx, cli, kubecontrollers.ElasticsearchKubeControllersVerificationUserSecret, render.ElasticsearchNamespace)\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\tif kubeControllersVerificationSecret == nil {\n\t\tkubeControllersVerificationSecret = &corev1.Secret{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: kubecontrollers.ElasticsearchKubeControllersVerificationUserSecret,\n\t\t\t\tNamespace: render.ElasticsearchNamespace,\n\t\t\t\tLabels: map[string]string{\n\t\t\t\t\tESGatewaySelectorLabel: ESGatewaySelectorLabelValue,\n\t\t\t\t},\n\t\t\t},\n\t\t\tData: map[string][]byte{\n\t\t\t\t\"username\": []byte(kubecontrollers.ElasticsearchKubeControllersUserName),\n\t\t\t\t\"password\": hashedPassword,\n\t\t\t},\n\t\t}\n\t}\n\n\tkubeControllersSecureUserSecret, err := utils.GetSecret(ctx, cli, kubecontrollers.ElasticsearchKubeControllersSecureUserSecret, render.ElasticsearchNamespace)\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\tif kubeControllersSecureUserSecret == nil {\n\t\tkubeControllersSecureUserSecret = &corev1.Secret{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: kubecontrollers.ElasticsearchKubeControllersSecureUserSecret,\n\t\t\t\tNamespace: render.ElasticsearchNamespace,\n\t\t\t\tLabels: map[string]string{\n\t\t\t\t\tESGatewaySelectorLabel: ESGatewaySelectorLabelValue,\n\t\t\t\t},\n\t\t\t},\n\t\t\tData: map[string][]byte{\n\t\t\t\t\"username\": []byte(esAdminUserName),\n\t\t\t\t\"password\": esAdminUserSecret.Data[esAdminUserName],\n\t\t\t},\n\t\t}\n\t}\n\n\treturn kubeControllersGatewaySecret, kubeControllersVerificationSecret, kubeControllersSecureUserSecret, nil\n}", "title": "" }, { "docid": "7d5e8e056dd380ecede0e0ff4bc0ba4c", "score": "0.4514698", "text": "func TestCreateReadPodsRole(t *testing.T) {\n\tt.Parallel()\n\n\tttKubectlOptions, kubectlOptions := GetKubectlOptions(t)\n\n\t// Create a namespace so we don't collide with other tests\n\tnamespace := strings.ToLower(random.UniqueId())\n\tk8s.CreateNamespace(t, ttKubectlOptions, namespace)\n\tdefer k8s.DeleteNamespace(t, ttKubectlOptions, namespace)\n\n\ttestRules := []rbacv1.PolicyRule{\n\t\trbacv1.PolicyRule{\n\t\t\tVerbs: []string{\"get\", \"list\"},\n\t\t\tAPIGroups: []string{\"\"},\n\t\t\tResources: []string{\"pods\"},\n\t\t},\n\t}\n\troleName := getTestRoleName(namespace)\n\trole := PrepareRole(\n\t\tnamespace,\n\t\troleName,\n\t\tmap[string]string{},\n\t\tmap[string]string{},\n\t\ttestRules,\n\t)\n\trequire.NoError(t, CreateRole(kubectlOptions, role))\n\n\t// Now verify the role was actually created in the cluster\n\t// We use the terratest role lib instead of the one in kubectl.\n\tttKubectlOptions.Namespace = namespace\n\trole = k8s.GetRole(t, ttKubectlOptions, roleName)\n\tassert.Equal(t, role.Name, roleName)\n\tassert.Equal(t, len(role.Rules), 1)\n\tassert.Equal(t, role.Rules[0], testRules[0])\n}", "title": "" }, { "docid": "5a95a227adf492811e1dc55164036125", "score": "0.45138425", "text": "func newKudoResources(options *Options, c *kudo.Client) (*resourceFuncsConfig, error) {\n\topts := metav1.ListOptions{LabelSelector: fmt.Sprintf(\"app=%s\", kudoinit.DefaultKudoLabel)}\n\tns, err := c.KubeClientset.CoreV1().Namespaces().List(context.TODO(), opts)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to get kudo system namespace: %v\", err)\n\t}\n\tif ns == nil || len(ns.Items) == 0 {\n\t\treturn nil, fmt.Errorf(\"kudo system namespace not found\")\n\t}\n\treturn &resourceFuncsConfig{\n\t\tc: c,\n\t\tns: ns.Items[0].Name,\n\t\topts: opts,\n\t\tlogOpts: corev1.PodLogOptions{SinceSeconds: options.LogSince},\n\t}, nil\n}", "title": "" }, { "docid": "f41f2e481d3337a10f15359428b0b44c", "score": "0.45135584", "text": "func Print(writer io.Writer, listeners []*listener.Listener) {\n\tparsedListeners := parse(listeners)\n\tif parsedListeners == nil {\n\t\treturn\n\t}\n\n\tactionToPolicy := map[rbacpb.RBAC_Action]map[string]struct{}{}\n\tpolicyToRule := map[string]map[string]struct{}{}\n\n\taddPolicy := func(action rbacpb.RBAC_Action, name string, rule string) {\n\t\tif actionToPolicy[action] == nil {\n\t\t\tactionToPolicy[action] = map[string]struct{}{}\n\t\t}\n\t\tif policyToRule[name] == nil {\n\t\t\tpolicyToRule[name] = map[string]struct{}{}\n\t\t}\n\t\tactionToPolicy[action][name] = struct{}{}\n\t\tpolicyToRule[name][rule] = struct{}{}\n\t}\n\n\tfor _, parsed := range parsedListeners {\n\t\tfor _, fc := range parsed.filterChains {\n\t\t\tfor _, rbacHTTP := range fc.rbacHTTP {\n\t\t\t\taction := rbacHTTP.GetRules().GetAction()\n\t\t\t\tfor name := range rbacHTTP.GetRules().GetPolicies() {\n\t\t\t\t\tnameOfPolicy, indexOfRule := extractName(name)\n\t\t\t\t\taddPolicy(action, nameOfPolicy, indexOfRule)\n\t\t\t\t}\n\t\t\t\tif len(rbacHTTP.GetRules().GetPolicies()) == 0 {\n\t\t\t\t\taddPolicy(action, anonymousName, \"0\")\n\t\t\t\t}\n\t\t\t}\n\t\t\tfor _, rbacTCP := range fc.rbacTCP {\n\t\t\t\taction := rbacTCP.GetRules().GetAction()\n\t\t\t\tfor name := range rbacTCP.GetRules().GetPolicies() {\n\t\t\t\t\tnameOfPolicy, indexOfRule := extractName(name)\n\t\t\t\t\taddPolicy(action, nameOfPolicy, indexOfRule)\n\t\t\t\t}\n\t\t\t\tif len(rbacTCP.GetRules().GetPolicies()) == 0 {\n\t\t\t\t\taddPolicy(action, anonymousName, \"0\")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tbuf := strings.Builder{}\n\tbuf.WriteString(\"ACTION\\tAuthorizationPolicy\\tRULES\\n\")\n\tfor _, action := range []rbacpb.RBAC_Action{rbacpb.RBAC_DENY, rbacpb.RBAC_ALLOW, rbacpb.RBAC_LOG} {\n\t\tif names, ok := actionToPolicy[action]; ok {\n\t\t\tsortedNames := make([]string, 0, len(names))\n\t\t\tfor name := range names {\n\t\t\t\tsortedNames = append(sortedNames, name)\n\t\t\t}\n\t\t\tsort.Strings(sortedNames)\n\t\t\tfor _, name := range sortedNames {\n\t\t\t\tbuf.WriteString(fmt.Sprintf(\"%s\\t%s\\t%d\\n\", action, name, len(policyToRule[name])))\n\t\t\t}\n\t\t}\n\t}\n\n\tw := new(tabwriter.Writer).Init(writer, 0, 8, 3, ' ', 0)\n\tif _, err := fmt.Fprint(w, buf.String()); err != nil {\n\t\tlog.Errorf(\"failed to print output: %s\", err)\n\t}\n\t_ = w.Flush()\n}", "title": "" }, { "docid": "a5ce19a1de6478ecf42d30e5b23771f2", "score": "0.45112363", "text": "func (c *Controller) ListGroups(w http.ResponseWriter, r *http.Request) {\n\n // ls /consumers\n //[console-consumer-40820]\n gPath := \"/consumers\"\n groups, err := c.lsChildren(gPath)\n if err != nil {\n utils.WriteError(w, err)\n return\n }\n\n ara.Logger().Debug(\"groups: %v\", groups)\n\n // return 404 if no group found(that means no consumer exists)\n if len(groups) <= 0 {\n ara.Logger().Debug(\"return 404\")\n utils.Write404(w)\n return\n }\n\n // map[groupName]map[topicName]map[partitionId]map[string]string (last map is offset, log size and lag etc.)\n resp := make(map[string]map[string]map[string]map[string]string, len(groups))\n for _, g := range groups {\n\n //ls /consumers/console-consumer-40820/offsets\n //[mytopic]\n topicsPath := path.Join(gPath, g, \"offsets\")\n topics, err := c.lsChildren(topicsPath)\n if err != nil {\n utils.WriteError(w, err)\n return\n }\n\n // init topic map\n topicMap := make(map[string]map[string]map[string]string)\n for _, topic := range topics {\n //ls /consumers/console-consumer-40820/offsets/mytopic\n //[0, 1, 2]\n partitionPath := path.Join(topicsPath, topic)\n partitions, err := c.lsChildren(partitionPath)\n if err != nil {\n utils.WriteError(w, err)\n return\n }\n\n // init partition map\n pMap := make(map[string]map[string]string)\n for _, pidStr := range partitions {\n //get /consumers/console-consumer-40820/offsets/mytopic/0\n //2\n offsetPath := path.Join(partitionPath, pidStr)\n offsetStr, err := c.getChildren(offsetPath)\n if err != nil {\n utils.WriteError(w, err)\n return\n }\n\n pid64, err := strconv.ParseInt(pidStr, 10, 32)\n if err != nil {\n utils.WriteError(w, err)\n return\n }\n pid := int32(pid64)\n offset, err := strconv.ParseInt(offsetStr, 10, 64)\n\n logSize := c.getLogSize(topic, pid)\n lag := logSize - offset\n\n pDataMap := make(map[string]string)\n pDataMap[\"offset\"] = offsetStr\n pDataMap[\"logSize\"] = strconv.FormatInt(logSize, 10)\n pDataMap[\"lag\"] = strconv.FormatInt(lag, 10)\n pMap[pidStr] = pDataMap\n }\n\n topicMap[topic] = pMap\n }\n resp[g] = topicMap\n }\n encoder := json.NewEncoder(w)\n err = encoder.Encode(resp)\n if err != nil {\n ara.Logger().Debug(err.Error())\n }\n\n // b, err := json.Marshal(topics)\n // if err != nil {\n // fmt.Println(\"error:\", err)\n // }\n // w.Write(b)\n\n\n // bts, stat, ch, err := zc.GetW(path)\n // if err != nil {\n // panic(err)\n // }\n // fmt.Printf(\"%s *** %+v\\n\", string(bts), stat)\n\n // e := <-ch\n // fmt.Printf(\"--- %+v\\n\", e)\n // if e.Type == zk.EventNodeDataChanged {\n // watchData(zc)\n // }\n}", "title": "" }, { "docid": "7b6885e556ec01b41925f5bcb2cdcb57", "score": "0.45036486", "text": "func helmOverridesEdge(root *tls.CA) []string {\n\treturn []string{\n\t\t\"--set\", \"controllerLogLevel=debug\",\n\t\t\"--set\", \"global.linkerdVersion=\" + TestHelper.GetVersion(),\n\t\t\"--set\", \"global.proxy.image.version=\" + TestHelper.GetVersion(),\n\t\t\"--set\", \"global.identityTrustDomain=cluster.local\",\n\t\t\"--set\", \"global.identityTrustAnchorsPEM=\" + root.Cred.Crt.EncodeCertificatePEM(),\n\t\t\"--set\", \"identity.issuer.tls.crtPEM=\" + root.Cred.Crt.EncodeCertificatePEM(),\n\t\t\"--set\", \"identity.issuer.tls.keyPEM=\" + root.Cred.EncodePrivateKeyPEM(),\n\t\t\"--set\", \"identity.issuer.crtExpiry=\" + root.Cred.Crt.Certificate.NotAfter.Format(time.RFC3339),\n\t\t\"--set\", \"grafana.image.version=\" + TestHelper.GetVersion(),\n\t}\n}", "title": "" }, { "docid": "fb6b7f2f8ea682b8cc32391c74ea6e02", "score": "0.45000502", "text": "func AddKubeRouterPermissions(b *PolicyBuilder, p *Policy) {\n\tp.clusterTaggedAction.Insert(\n\t\t\"ec2:ModifyInstanceAttribute\",\n\t)\n}", "title": "" }, { "docid": "73ada0cd39e8b9c769150b6049054d14", "score": "0.44976357", "text": "func VerifyTierConfigWithWeb(t *testing.T, yamlContent string, options *helm.Options) {\n var pegaConfigMap k8score.ConfigMap\n configSlice := strings.Split(yamlContent, \"---\")\n for index, configData := range configSlice {\n if index >= 1 && index <= 3 {\n UnmarshalK8SYaml(t, configData, &pegaConfigMap)\n pegaConfigMapData := pegaConfigMap.Data\n compareConfigMapData(t, pegaConfigMapData[\"prconfig.xml\"], \"data/expectedInstallDeployPrconfig.xml\")\n compareConfigMapData(t, pegaConfigMapData[\"context.xml.tmpl\"], \"data/expectedInstallDeployContext.xml.tmpl\")\n compareConfigMapData(t, pegaConfigMapData[\"prlog4j2.xml\"], \"data/expectedInstallDeployPRlog4j2.xml\")\n compareConfigMapData(t, pegaConfigMapData[\"server.xml.tmpl\"], \"data/expectedInstallDeployServer.xml.tmpl\")\n compareConfigMapData(t, pegaConfigMapData[\"web.xml\"], \"data/expectedInstallDeployWeb.xml\")\n }\n }\n}", "title": "" }, { "docid": "7709943c3e5d9372fdfaa8fa60986a20", "score": "0.44955486", "text": "func validateSecrets(\n\tcr *kdv1.KubeDirectorCluster,\n\tvalErrors []string,\n\tpatches []clusterPatchSpec,\n) ([]string, []clusterPatchSpec) {\n\n\trequiredNamePrefix := shared.GetRequiredSecretPrefix()\n\n\tvalidateFunc := func(\n\t\tsecretName string,\n\t) secretValidateResult {\n\n\t\t// First check the name against any required prefix.\n\t\tif strings.HasPrefix(secretName, requiredNamePrefix) {\n\t\t\t// Now also check that the secret exists in this namespace.\n\t\t\t_, fetchErr := observer.GetSecret(\n\t\t\t\tcr.Namespace,\n\t\t\t\tsecretName,\n\t\t\t)\n\t\t\tif fetchErr != nil {\n\t\t\t\treturn secretNotFound\n\t\t\t}\n\t\t} else {\n\t\t\treturn secretPrefixNotMatched\n\t\t}\n\t\treturn secretIsValid\n\t}\n\n\tdefaultSecret := cr.Spec.DefaultSecret\n\tif defaultSecret != nil {\n\t\t// Validate the default secret, and return early if there are errors.\n\t\tdefaultSecretValidateResult := validateFunc(defaultSecret.Name)\n\t\tif defaultSecretValidateResult == secretPrefixNotMatched {\n\t\t\tvalErrors = append(\n\t\t\t\tvalErrors,\n\t\t\t\tfmt.Sprintf(\n\t\t\t\t\tinvalidDefaultSecretPrefix,\n\t\t\t\t\tdefaultSecret.Name,\n\t\t\t\t\trequiredNamePrefix,\n\t\t\t\t),\n\t\t\t)\n\t\t\treturn valErrors, patches\n\t\t}\n\t\tif defaultSecretValidateResult == secretNotFound {\n\t\t\tvalErrors = append(\n\t\t\t\tvalErrors,\n\t\t\t\tfmt.Sprintf(\n\t\t\t\t\tinvalidDefaultSecret,\n\t\t\t\t\tdefaultSecret.Name,\n\t\t\t\t\tcr.Namespace,\n\t\t\t\t),\n\t\t\t)\n\t\t\treturn valErrors, patches\n\t\t}\n\t}\n\n\t// Now also validate any role-specific secrets, and also handle populating\n\t// unspecified ones with the default (if any).\n\tnumRoles := len(cr.Spec.Roles)\n\tfor i := 0; i < numRoles; i++ {\n\t\trole := &(cr.Spec.Roles[i])\n\n\t\tif role.Secret != nil {\n\t\t\tsecretValidateResult := validateFunc(role.Secret.Name)\n\t\t\tif secretValidateResult == secretPrefixNotMatched {\n\t\t\t\tvalErrors = append(\n\t\t\t\t\tvalErrors,\n\t\t\t\t\tfmt.Sprintf(\n\t\t\t\t\t\tinvalidSecretPrefix,\n\t\t\t\t\t\trole.Secret.Name,\n\t\t\t\t\t\trole.Name,\n\t\t\t\t\t\trequiredNamePrefix,\n\t\t\t\t\t),\n\t\t\t\t)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif secretValidateResult == secretNotFound {\n\t\t\t\tvalErrors = append(\n\t\t\t\t\tvalErrors,\n\t\t\t\t\tfmt.Sprintf(\n\t\t\t\t\t\tinvalidSecret,\n\t\t\t\t\t\trole.Secret.Name,\n\t\t\t\t\t\trole.Name,\n\t\t\t\t\t\trequiredNamePrefix,\n\t\t\t\t\t),\n\t\t\t\t)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\t// If there is a defaultSecret, use that for this role (if not specified)\n\t\tif role.Secret == nil && cr.Spec.DefaultSecret != nil {\n\t\t\tpatches = append(\n\t\t\t\tpatches,\n\t\t\t\tclusterPatchSpec{\n\t\t\t\t\tOp: \"add\",\n\t\t\t\t\tPath: \"/spec/roles/\" + strconv.Itoa(i) + \"/secret\",\n\t\t\t\t\tValue: clusterPatchValue{\n\t\t\t\t\t\tValueKDSecret: defaultSecret,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t)\n\t\t}\n\t}\n\n\treturn valErrors, patches\n}", "title": "" }, { "docid": "c0806bf4da22b74d22bd75913fae3123", "score": "0.44948196", "text": "func listPermissions(w http.ResponseWriter, r *http.Request, t auth.Token) error {\n\tif !permission.Check(t, permission.PermRoleUpdate) {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tlst := permission.PermissionRegistry.Permissions()\n\tsort.Sort(lst)\n\tpermList := make([]permissionSchemeData, len(lst))\n\tfor i, perm := range lst {\n\t\tcontexts := perm.AllowedContexts()\n\t\tcontextNames := make([]string, len(contexts))\n\t\tfor j, ctx := range contexts {\n\t\t\tcontextNames[j] = string(ctx)\n\t\t}\n\t\tpermList[i] = permissionSchemeData{\n\t\t\tName: perm.FullName(),\n\t\t\tContexts: contextNames,\n\t\t}\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\treturn json.NewEncoder(w).Encode(permList)\n}", "title": "" }, { "docid": "29d3a72168cfc5f71759688f27191cc5", "score": "0.44896883", "text": "func generateKubeYAMLCmd(c *cli.Context) error {\n\tvar (\n\t\tcontainer *libpod.Container\n\t\terr error\n\t\toutput []byte\n\t)\n\n\tif rootless.IsRootless() {\n\t\treturn errors.Wrapf(libpod.ErrNotImplemented, \"rootless users\")\n\t}\n\targs := c.Args()\n\tif len(args) > 1 || (len(args) < 1 && !c.Bool(\"latest\")) {\n\t\treturn errors.Errorf(\"you must provide one container ID or name or --latest\")\n\t}\n\tif c.Bool(\"service\") {\n\t\treturn errors.Wrapf(libpod.ErrNotImplemented, \"service generation\")\n\t}\n\n\truntime, err := libpodruntime.GetRuntime(c)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"could not get runtime\")\n\t}\n\tdefer runtime.Shutdown(false)\n\n\t// Get the container in question\n\tif c.Bool(\"latest\") {\n\t\tcontainer, err = runtime.GetLatestContainer()\n\t} else {\n\t\tcontainer, err = runtime.LookupContainer(args[0])\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(container.Dependencies()) > 0 {\n\t\treturn errors.Wrapf(libpod.ErrNotImplemented, \"containers with dependencies\")\n\t}\n\n\tpodYAML, err := container.InspectForKube()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdevelopmentComment := []byte(\"# Generation of Kubenetes YAML is still under development!\\n\")\n\tlogrus.Warn(\"This function is still under heavy development.\")\n\t// Marshall the results\n\tb, err := yaml.Marshal(podYAML)\n\tif err != nil {\n\t\treturn err\n\t}\n\toutput = append(output, developmentComment...)\n\toutput = append(output, b...)\n\t// Output the v1.Pod with the v1.Container\n\tfmt.Println(string(output))\n\n\treturn nil\n}", "title": "" }, { "docid": "80dd3e4a3171a167aba1ab2010818e06", "score": "0.44895178", "text": "func (m *manager) generateAROServiceKubeconfig(g graph) (*kubeconfig.AdminInternalClient, error) {\n\treturn generateKubeconfig(g, \"system:aro-service\", []string{\"system:masters\"})\n}", "title": "" }, { "docid": "a8644df7d0a6c6ba44a6209c4d91bf73", "score": "0.44891495", "text": "func GenerateKubeadmCfg(mgr *manager.Manager) (string, error) {\n\t// generate etcd configuration\n\tvar externalEtcd kubekeyapiv1alpha1.ExternalEtcd\n\tvar endpointsList []string\n\tvar caFile, certFile, keyFile, containerRuntimeEndpoint string\n\n\tfor _, host := range mgr.EtcdNodes {\n\t\tendpoint := fmt.Sprintf(\"https://%s:%s\", host.InternalAddress, kubekeyapiv1alpha1.DefaultEtcdPort)\n\t\tendpointsList = append(endpointsList, endpoint)\n\t}\n\texternalEtcd.Endpoints = endpointsList\n\n\tcaFile = \"/etc/ssl/etcd/ssl/ca.pem\"\n\tcertFile = fmt.Sprintf(\"/etc/ssl/etcd/ssl/node-%s.pem\", mgr.MasterNodes[0].Name)\n\tkeyFile = fmt.Sprintf(\"/etc/ssl/etcd/ssl/node-%s-key.pem\", mgr.MasterNodes[0].Name)\n\n\texternalEtcd.CaFile = caFile\n\texternalEtcd.CertFile = certFile\n\texternalEtcd.KeyFile = keyFile\n\n\t// generate cri configuration\n\tswitch mgr.Cluster.Kubernetes.ContainerManager {\n\tcase \"docker\":\n\t\tcontainerRuntimeEndpoint = \"\"\n\tcase \"crio\":\n\t\tcontainerRuntimeEndpoint = kubekeyapiv1alpha1.DefaultCrioEndpoint\n\tcase \"containerd\":\n\t\tcontainerRuntimeEndpoint = kubekeyapiv1alpha1.DefaultContainerdEndpoint\n\tcase \"isula\":\n\t\tcontainerRuntimeEndpoint = kubekeyapiv1alpha1.DefaultIsulaEndpoint\n\tdefault:\n\t\tcontainerRuntimeEndpoint = \"\"\n\t}\n\n\tif mgr.Cluster.Kubernetes.ContainerRuntimeEndpoint != \"\" {\n\t\tcontainerRuntimeEndpoint = mgr.Cluster.Kubernetes.ContainerRuntimeEndpoint\n\t}\n\n\tcgroupDriver, err := getKubeletCgroupDriver(mgr)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn util.Render(KubeadmCfgTempl, util.Data{\n\t\t\"ImageRepo\": strings.TrimSuffix(preinstall.GetImage(mgr, \"kube-apiserver\").ImageRepo(), \"/kube-apiserver\"),\n\t\t\"CorednsRepo\": strings.TrimSuffix(preinstall.GetImage(mgr, \"coredns\").ImageRepo(), \"/coredns\"),\n\t\t\"CorednsTag\": preinstall.GetImage(mgr, \"coredns\").Tag,\n\t\t\"Version\": mgr.Cluster.Kubernetes.Version,\n\t\t\"ClusterName\": mgr.Cluster.Kubernetes.ClusterName,\n\t\t\"ControlPlaneEndpoint\": fmt.Sprintf(\"%s:%d\", mgr.Cluster.ControlPlaneEndpoint.Domain, mgr.Cluster.ControlPlaneEndpoint.Port),\n\t\t\"PodSubnet\": mgr.Cluster.Network.KubePodsCIDR,\n\t\t\"ServiceSubnet\": mgr.Cluster.Network.KubeServiceCIDR,\n\t\t\"CertSANs\": mgr.Cluster.GenerateCertSANs(),\n\t\t\"ExternalEtcd\": externalEtcd,\n\t\t\"ClusterIP\": \"169.254.25.10\",\n\t\t\"MasqueradeAll\": mgr.Cluster.Kubernetes.MasqueradeAll,\n\t\t\"NodeCidrMaskSize\": mgr.Cluster.Kubernetes.NodeCidrMaskSize,\n\t\t\"MaxPods\": mgr.Cluster.Kubernetes.MaxPods,\n\t\t\"ProxyMode\": mgr.Cluster.Kubernetes.ProxyMode,\n\t\t\"CriSock\": containerRuntimeEndpoint,\n\t\t\"CgroupDriver\": cgroupDriver,\n\t})\n}", "title": "" }, { "docid": "82efab34378fc5b8ad37a9558b6c2b15", "score": "0.4488705", "text": "func addDefaults(dClient *discovery.DiscoveryClient, mapper *meta.DefaultRESTMapper) error {\n\tvar err error\n\n\t// Liqo groups\n\tif err = addGroup(dClient, configv1alpha1.GroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\tif err = addGroup(dClient, discoveryv1alpha1.GroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\tif err = addGroup(dClient, netv1alpha1.GroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\tif err = addGroup(dClient, sharingv1alpha1.GroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\tif err = addGroup(dClient, virtualKubeletv1alpha1.GroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\tif err = addGroup(dClient, offv1alpha1.GroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\n\t// Capsule groups\n\tif err = addGroup(dClient, capsulev1beta1.GroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\n\t// Kubernetes groups\n\tif err = addGroup(dClient, corev1.SchemeGroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\tif err = addGroup(dClient, appsv1.SchemeGroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\tif err = addGroup(dClient, rbacv1.SchemeGroupVersion, mapper); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "8abe1322e33c05a20257ab4989f9954c", "score": "0.4488014", "text": "func patchResources(ctx context.Context, resources *unstructured.UnstructuredList, namespace string, options *installOptions) error {\n\ti := 0\n\tsize := 2\n\tvar deploy apps.Deployment\n\tvar imagePullSecret corev1.Secret\n\n\tfor resourceIndex, resource := range resources.Items {\n\t\t// apply the image pull secret to avoid the image pull limit of Docker Hub\n\t\tif len(options.RegistryCredentialFile) > 0 && resource.GetKind() == \"ServiceAccount\" &&\n\t\t\tresource.GetName() == \"velero\" {\n\t\t\tcredential, err := os.ReadFile(options.RegistryCredentialFile)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrapf(err, \"failed to read the registry credential file %s\", options.RegistryCredentialFile)\n\t\t\t}\n\t\t\timagePullSecret = corev1.Secret{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"Secret\",\n\t\t\t\t\tAPIVersion: corev1.SchemeGroupVersion.String(),\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: \"image-pull-secret\",\n\t\t\t\t\tNamespace: namespace,\n\t\t\t\t},\n\t\t\t\tType: corev1.SecretTypeDockerConfigJson,\n\t\t\t\tData: map[string][]byte{\n\t\t\t\t\t\".dockerconfigjson\": credential,\n\t\t\t\t},\n\t\t\t}\n\t\t\tresource.Object[\"imagePullSecrets\"] = []map[string]interface{}{\n\t\t\t\t{\n\t\t\t\t\t\"name\": \"image-pull-secret\",\n\t\t\t\t},\n\t\t\t}\n\t\t\tresources.Items[resourceIndex] = resource\n\t\t\tfmt.Printf(\"image pull secret %q set for velero serviceaccount \\n\", \"image-pull-secret\")\n\n\t\t\tun, err := toUnstructured(imagePullSecret)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrapf(err, \"failed to convert pull secret to unstructure\")\n\t\t\t}\n\t\t\tresources.Items = append(resources.Items, un)\n\t\t\ti++\n\t\t} else if options.VeleroServerDebugMode && resource.GetKind() == \"Deployment\" &&\n\t\t\tresource.GetName() == \"velero\" {\n\t\t\tdeployJsonStr, err := json.Marshal(resource.Object)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrapf(err, \"failed to marshal velero deployment\")\n\t\t\t}\n\t\t\tif err := json.Unmarshal(deployJsonStr, &deploy); err != nil {\n\t\t\t\treturn errors.Wrapf(err, \"failed to unmarshal velero deployment\")\n\t\t\t}\n\t\t\tveleroDeployIndex := -1\n\t\t\tfor containerIndex, container := range deploy.Spec.Template.Spec.Containers {\n\t\t\t\tif container.Name == \"velero\" {\n\t\t\t\t\tveleroDeployIndex = containerIndex\n\t\t\t\t\tcontainer.Args = append(container.Args, \"--log-level\", \"debug\")\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tif veleroDeployIndex >= 0 {\n\t\t\t\tdeploy.Spec.Template.Spec.Containers[veleroDeployIndex].Args = append(deploy.Spec.Template.Spec.Containers[veleroDeployIndex].Args, \"--log-level\")\n\t\t\t\tdeploy.Spec.Template.Spec.Containers[veleroDeployIndex].Args = append(deploy.Spec.Template.Spec.Containers[veleroDeployIndex].Args, \"debug\")\n\t\t\t\tun, err := toUnstructured(deploy)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn errors.Wrapf(err, \"failed to unstructured velero deployment\")\n\t\t\t\t}\n\t\t\t\tresources.Items = append(resources.Items, un)\n\t\t\t\tresources.Items = append(resources.Items[:resourceIndex], resources.Items[resourceIndex+1:]...)\n\t\t\t} else {\n\t\t\t\treturn errors.New(\"failed to get velero container in velero pod\")\n\t\t\t}\n\t\t\ti++\n\t\t}\n\t\tif i == size {\n\t\t\tbreak\n\t\t}\n\t}\n\n\t// customize the restic restore helper image\n\tif len(options.RestoreHelperImage) > 0 {\n\t\trestoreActionConfig := corev1.ConfigMap{\n\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\tAPIVersion: corev1.SchemeGroupVersion.String(),\n\t\t\t},\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"restic-restore-action-config\",\n\t\t\t\tNamespace: namespace,\n\t\t\t\tLabels: map[string]string{\n\t\t\t\t\t\"velero.io/plugin-config\": \"\",\n\t\t\t\t\t\"velero.io/pod-volume-restore\": \"RestoreItemAction\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tData: map[string]string{\n\t\t\t\t\"image\": options.RestoreHelperImage,\n\t\t\t},\n\t\t}\n\n\t\tun, err := toUnstructured(restoreActionConfig)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to convert restore action config to unstructure\")\n\t\t}\n\t\tresources.Items = append(resources.Items, un)\n\t\tfmt.Printf(\"the restic restore helper image is set by the configmap %q \\n\", \"restic-restore-action-config\")\n\t}\n\n\treturn nil\n}", "title": "" }, { "docid": "f97ffbdaed7eb8056111b027a7e60783", "score": "0.44879746", "text": "func printNamespaces(writer io.Writer) error {\n\tcurrentConfig := getKubeConfigOfCurrentTarget()\n\tout, err := ExecCmdReturnOutput(\"kubectl\", \"--kubeconfig=\"+currentConfig, \"get\", \"ns\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Fprint(writer, out)\n\treturn nil\n}", "title": "" }, { "docid": "58dfa7cf11c6b973b232f36ef01fe79e", "score": "0.44857472", "text": "func main() {\r\n ctx := context.Background()\r\n const TenantName = \"you_tenant_name\"\r\n const ClientID = \"you_client_id\"\r\n const ClientSecret = \"you_secret\"\r\n const SiteName = \"Test\"\r\n const ConnectorName = \"kubernets\"\r\n\r\n\r\n// create new client\r\n client := goluminate.NewClient(ctx, ClientID, ClientSecret, TenantName)\r\n\r\n\r\n\r\n// -- New Site\r\n site := goluminate.NewSiteRequest{Name: SiteName}\r\n newSite, _, err := client.CreateSite(ctx,site)\r\n if err != nil {\r\n panic(err)\r\n }\r\n fmt.Println(newSite.ID)\r\n\r\n// -- New Connector\r\n connector := goluminate.NewConnectorRequest{Name: ConnectorName, Version: \"1.0\"}\r\n newConnector, _, err := client.CreateConnector(ctx,connector,newSite.ID)\r\n if err != nil {\r\n panic(err)\r\n }\r\n fmt.Println(newConnector.ID)\r\n\r\n\r\n// -- Install K8S \r\n connectorGetCommand := goluminate.ConnectorCommandRequest{ConnectorName: ConnectorName}\r\n ConnectorInstall, _, err := client.GetConnectorCommand(ctx,connectorGetCommand,newConnector.ID)\r\n if err != nil {\r\n panic(err)\r\n }\r\n fmt.Println(ConnectorInstall.K8S)\r\n\r\n// -- Create http application\r\n newAppHttp := goluminate.AppHttpCreateRequest{Name: \"test AppB\", Type: \"HTTP\",IsVisible: true,IsNotificationEnabled: true}\r\n newAppHttp.ConnectionSettings.InternalAddress=\"http://test.local.com\"\r\n newAppHttp.ConnectionSettings.CustomRootPath=\"/\"\r\n newAppHttp.ConnectionSettings.HealthURL=\"/\"\r\n newAppHttp.ConnectionSettings.HealthMethod=\"Head\"\r\n HttpApp, _, err := client.CreateApp(ctx,newAppHttp)\r\n if err != nil {\r\n panic(err)\r\n }\r\n fmt.Println(HttpApp.ID)\r\n// -- Bind App to Site\r\n client.BindAppToSite(ctx,HttpApp.ID,newSite.ID)\r\n\r\n// -- Create ssh application\r\n newAppSSH := goluminate.AppSshCreateRequest{Name: \"test AppC\", Type: \"SSH\",IsVisible: true,IsNotificationEnabled: true}\r\n newAppSSH.ConnectionSettings.InternalAddress=\"tcp://test.local.com:22\"\r\n newAppSSH.SSHSettings.UserAccounts = append(newAppSSH.SSHSettings.UserAccounts, goluminate.SshUserAccounts{Name: \"root\"})\r\n SSHApp, _, err := client.CreateApp(ctx,newAppSSH)\r\n if err != nil {\r\n panic(err)\r\n }\r\n fmt.Println(SSHApp.ID)\r\n// -- Bind App to Site\r\n client.BindAppToSite(ctx,SSHApp.ID,newSite.ID)\r\n\r\n\r\n// -- Create tcp application\r\n tcpAppName := \"test AppD\"\r\n newAppTCP := goluminate.AppTcpCreateRequest{Name: tcpAppName, Type: \"TCP\",IsVisible: true,IsNotificationEnabled: true}\r\n var TcpAppPortList []string\r\n TcpAppPortList = append(TcpAppPortList, \"3306\")\r\n var subdomain string\r\n subdomain = strings.Replace(tcpAppName, \" \", \"\", -1)\r\n newAppTCP.ConnectionSettings.Subdomain=strings.ToLower(subdomain)\r\n newAppTCP.TcpTunnelSettings = append(newAppTCP.TcpTunnelSettings, goluminate.TcpTunnelSettings{Target: \"test.local.com\", Ports: TcpAppPortList})\r\n \r\n TCPApp, _, err := client.CreateApp(ctx,newAppTCP)\r\n if err != nil {\r\n panic(err)\r\n }\r\n fmt.Println(TCPApp.ID)\r\n// -- Bind App to Site\r\n client.BindAppToSite(ctx,TCPApp.ID,newSite.ID)\r\n\r\n\r\n//list all application\r\n applist, _, err := client.ListApp(ctx, \"\")\r\n\r\n if err != nil {\r\n panic(err)\r\n }\r\n fmt.Println(applist.Content[0].ID, applist.Content[0].Name)\r\n fmt.Println(applist.Content[3].ID, applist.Content[3].Name)\r\n\r\n// Update site\r\nupdatesite := goluminate.NewSiteRequest{Name: \"updated\", Description: \"updated version\"}\r\n_, _, err = client.UpdateSite(ctx,updatesite, newSite.ID)\r\nif err != nil {\r\n panic(err)\r\n}\r\n// Update connector\r\nupdateconnector := goluminate.NewConnectorRequest{Name: \"myupdate\", Version: \"1.0\"}\r\n_, _, err = client.UpdateConnector(ctx,updateconnector, newConnector.ID)\r\nif err != nil {\r\n panic(err)\r\n}\r\n// Updated SSH App\r\nupdateAppSSH := goluminate.AppSshCreateRequest{Name: \"test AppC updated\", Type: \"SSH\",IsVisible: true,IsNotificationEnabled: true}\r\nupdateAppSSH.ConnectionSettings.InternalAddress=\"tcp://newhost.local.com:22\"\r\nupdateAppSSH.SSHSettings.UserAccounts = append(updateAppSSH.SSHSettings.UserAccounts, goluminate.SshUserAccounts{Name: \"ubuntu\"})\r\n_, _, err = client.UpdateApp(ctx,updateAppSSH, SSHApp.ID)\r\nif err != nil {\r\n panic(err)\r\n}\r\n\r\n\r\n// Get info about TCP application \r\nclient.GetApp(ctx, TCPApp.ID)\r\n// Delete TCP application\r\nclient.DeleteApp(ctx, TCPApp.ID)\r\n// Get connector info\r\nclient.GetConnector(ctx, newConnector.ID)\r\n// Delete connector\r\nclient.DeleteConnector(ctx, newConnector.ID)\r\n// Get site info\r\nsite, _ ,_ := client.GetSite(ctx, newSite.ID)\r\nfmt.Println(site.Connectors[0])\r\n// Delete site\r\nclient.DeleteSite(ctx, newSite.ID)\r\n}", "title": "" } ]
4b77199705901d469fd4e6a602ff8401
FilterUnbond is a free log retrieval operation binding the contract event 0x1bcf1da2dc680801f7898e72d42550dffb4a3e26dd3a33ff2b23e9f4f6131e9e. Solidity: event Unbond(address indexed sender, uint256 tokenAmount, uint256 dropburnAmount, address nodeAddr)
[ { "docid": "a6b9a4a357fada36e067f15e8c9c200f", "score": "0.7783553", "text": "func (_Staking *StakingFilterer) FilterUnbond(opts *bind.FilterOpts, sender []common.Address) (*StakingUnbondIterator, error) {\n\n\tvar senderRule []interface{}\n\tfor _, senderItem := range sender {\n\t\tsenderRule = append(senderRule, senderItem)\n\t}\n\n\tlogs, sub, err := _Staking.contract.FilterLogs(opts, \"Unbond\", senderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &StakingUnbondIterator{contract: _Staking.contract, event: \"Unbond\", logs: logs, sub: sub}, nil\n}", "title": "" } ]
[ { "docid": "d7c586d824bbe5f3eb9dcc46b109ed7e", "score": "0.6907213", "text": "func (_Staking *StakingFilterer) WatchUnbond(opts *bind.WatchOpts, sink chan<- *StakingUnbond, sender []common.Address) (event.Subscription, error) {\n\n\tvar senderRule []interface{}\n\tfor _, senderItem := range sender {\n\t\tsenderRule = append(senderRule, senderItem)\n\t}\n\n\tlogs, sub, err := _Staking.contract.WatchLogs(opts, \"Unbond\", senderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(StakingUnbond)\n\t\t\t\tif err := _Staking.contract.UnpackLog(event, \"Unbond\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "fda12b92ec18f29d43ae3cd14e1bd018", "score": "0.60053915", "text": "func (_Token *TokenFilterer) FilterBurn(opts *bind.FilterOpts, from []common.Address) (*TokenBurnIterator, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _Token.contract.FilterLogs(opts, \"Burn\", fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &TokenBurnIterator{contract: _Token.contract, event: \"Burn\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "ddfdb701ee121d53fb04b274d877bc5e", "score": "0.5944761", "text": "func (_Staking *StakingTransactor) NodeUnbond(opts *bind.TransactOpts, _tokenAmount *big.Int, _dropburnAmount *big.Int, _nodeAddr common.Address) (*types.Transaction, error) {\n\treturn _Staking.contract.Transact(opts, \"nodeUnbond\", _tokenAmount, _dropburnAmount, _nodeAddr)\n}", "title": "" }, { "docid": "2d75910d426d2494222cbed27b6da818", "score": "0.5867665", "text": "func (_Staking *StakingSession) NodeUnbond(_tokenAmount *big.Int, _dropburnAmount *big.Int, _nodeAddr common.Address) (*types.Transaction, error) {\n\treturn _Staking.Contract.NodeUnbond(&_Staking.TransactOpts, _tokenAmount, _dropburnAmount, _nodeAddr)\n}", "title": "" }, { "docid": "0d3db49a2a4058e38f0f5a045399d5d9", "score": "0.5860635", "text": "func (_Staking *StakingTransactorSession) NodeUnbond(_tokenAmount *big.Int, _dropburnAmount *big.Int, _nodeAddr common.Address) (*types.Transaction, error) {\n\treturn _Staking.Contract.NodeUnbond(&_Staking.TransactOpts, _tokenAmount, _dropburnAmount, _nodeAddr)\n}", "title": "" }, { "docid": "ca4ef87603620cfc579203752a39fc19", "score": "0.58068675", "text": "func (_Main *MainFilterer) FilterBurn(opts *bind.FilterOpts, sender []common.Address, to []common.Address) (*MainBurnIterator, error) {\n\n\tvar senderRule []interface{}\n\tfor _, senderItem := range sender {\n\t\tsenderRule = append(senderRule, senderItem)\n\t}\n\n\tvar toRule []interface{}\n\tfor _, toItem := range to {\n\t\ttoRule = append(toRule, toItem)\n\t}\n\n\tlogs, sub, err := _Main.contract.FilterLogs(opts, \"Burn\", senderRule, toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &MainBurnIterator{contract: _Main.contract, event: \"Burn\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "d31f3d3345c1fcf12e529d27d1bf613c", "score": "0.5799071", "text": "func (_Supercoin *SupercoinFilterer) FilterBurn(opts *bind.FilterOpts, from []common.Address) (*SupercoinBurnIterator, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _Supercoin.contract.FilterLogs(opts, \"Burn\", fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &SupercoinBurnIterator{contract: _Supercoin.contract, event: \"Burn\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "2a451085a3e30f60aef8e48a138b6822", "score": "0.56658447", "text": "func (_BondingManager *BondingManagerTransactor) Unbond(opts *bind.TransactOpts) (*types.Transaction, error) {\n\treturn _BondingManager.contract.Transact(opts, \"unbond\")\n}", "title": "" }, { "docid": "92bbc9a18ee842d315fbef012ef0fef7", "score": "0.5559341", "text": "func (_BondingManager *BondingManagerTransactorSession) Unbond() (*types.Transaction, error) {\n\treturn _BondingManager.Contract.Unbond(&_BondingManager.TransactOpts)\n}", "title": "" }, { "docid": "a7295811b2de3fe524b641eaf8923947", "score": "0.5542772", "text": "func (_InterchainSwap *InterchainSwapFilterer) FilterBurn(opts *bind.FilterOpts) (*InterchainSwapBurnIterator, error) {\n\n\tlogs, sub, err := _InterchainSwap.contract.FilterLogs(opts, \"Burn\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &InterchainSwapBurnIterator{contract: _InterchainSwap.contract, event: \"Burn\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "296ebe1cf592beabe309b9115ba52f1e", "score": "0.55310076", "text": "func (_BondingManager *BondingManagerSession) Unbond() (*types.Transaction, error) {\n\treturn _BondingManager.Contract.Unbond(&_BondingManager.TransactOpts)\n}", "title": "" }, { "docid": "3cc1ca79267cb59b0263b3a6ebe20249", "score": "0.54746306", "text": "func (_Staking *StakingFilterer) FilterUpdateUnbondDuration(opts *bind.FilterOpts) (*StakingUpdateUnbondDurationIterator, error) {\n\n\tlogs, sub, err := _Staking.contract.FilterLogs(opts, \"UpdateUnbondDuration\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &StakingUpdateUnbondDurationIterator{contract: _Staking.contract, event: \"UpdateUnbondDuration\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "631828c118c21bab52a143e56245acd3", "score": "0.5462143", "text": "func (_Sigmacore *SigmacoreFilterer) FilterLOGTOKENREMOVED(opts *bind.FilterOpts) (*SigmacoreLOGTOKENREMOVEDIterator, error) {\n\n\tlogs, sub, err := _Sigmacore.contract.FilterLogs(opts, \"LOG_TOKEN_REMOVED\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &SigmacoreLOGTOKENREMOVEDIterator{contract: _Sigmacore.contract, event: \"LOG_TOKEN_REMOVED\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "19e91943d38c9bcb2154f8b84701c977", "score": "0.5402183", "text": "func (_IPancakePair *IPancakePairFilterer) FilterBurn(opts *bind.FilterOpts, sender []common.Address, to []common.Address) (*IPancakePairBurnIterator, error) {\n\n\tvar senderRule []interface{}\n\tfor _, senderItem := range sender {\n\t\tsenderRule = append(senderRule, senderItem)\n\t}\n\n\tvar toRule []interface{}\n\tfor _, toItem := range to {\n\t\ttoRule = append(toRule, toItem)\n\t}\n\n\tlogs, sub, err := _IPancakePair.contract.FilterLogs(opts, \"Burn\", senderRule, toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &IPancakePairBurnIterator{contract: _IPancakePair.contract, event: \"Burn\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "ec118d2a9a497bc8c94302040dadaf0c", "score": "0.5388568", "text": "func (_EtherToken *EtherTokenFilterer) FilterWithdrawn(opts *bind.FilterOpts, to []common.Address) (*EtherTokenWithdrawnIterator, error) {\n\n\tvar toRule []interface{}\n\tfor _, toItem := range to {\n\t\ttoRule = append(toRule, toItem)\n\t}\n\n\tlogs, sub, err := _EtherToken.contract.FilterLogs(opts, \"Withdrawn\", toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &EtherTokenWithdrawnIterator{contract: _EtherToken.contract, event: \"Withdrawn\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "6e5eafb323570c190cedb99b8f1efd3f", "score": "0.5355469", "text": "func (_Dospayment *DospaymentFilterer) FilterUpdateDropBurnTokenAddress(opts *bind.FilterOpts) (*DospaymentUpdateDropBurnTokenAddressIterator, error) {\n\n\tlogs, sub, err := _Dospayment.contract.FilterLogs(opts, \"UpdateDropBurnTokenAddress\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &DospaymentUpdateDropBurnTokenAddressIterator{contract: _Dospayment.contract, event: \"UpdateDropBurnTokenAddress\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "29d98a303654afa51a710bbc67e34039", "score": "0.52809507", "text": "func (_DataRegistry *DataRegistryFilterer) FilterBundleUnregistered(opts *bind.FilterOpts, collectionId [][8]byte, bundleId [][8]byte) (*DataRegistryBundleUnregisteredIterator, error) {\n\n\tvar collectionIdRule []interface{}\n\tfor _, collectionIdItem := range collectionId {\n\t\tcollectionIdRule = append(collectionIdRule, collectionIdItem)\n\t}\n\tvar bundleIdRule []interface{}\n\tfor _, bundleIdItem := range bundleId {\n\t\tbundleIdRule = append(bundleIdRule, bundleIdItem)\n\t}\n\n\tlogs, sub, err := _DataRegistry.contract.FilterLogs(opts, \"BundleUnregistered\", collectionIdRule, bundleIdRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &DataRegistryBundleUnregisteredIterator{contract: _DataRegistry.contract, event: \"BundleUnregistered\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "39a631932b334110734534bf594b8ae7", "score": "0.52734435", "text": "func (t *bloomClient) Drop(ctx context.Context, filter string) error {\n\treq := &pb.FilterRequest{Name: filter}\n\n\ttimedCtx, cancel := context.WithTimeout(context.Background(), t.timeout)\n\tdefer cancel()\n\t_, err := t.client.DropFilter(timedCtx, req)\n\treturn err\n}", "title": "" }, { "docid": "cc774c6cdb8d9577dc5f1ffeb46f5d4c", "score": "0.5229515", "text": "func (_TestDepositTokenContract *TestDepositTokenContractFilterer) FilterBurned(opts *bind.FilterOpts, operator []common.Address, from []common.Address) (*TestDepositTokenContractBurnedIterator, error) {\n\n\tvar operatorRule []interface{}\n\tfor _, operatorItem := range operator {\n\t\toperatorRule = append(operatorRule, operatorItem)\n\t}\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _TestDepositTokenContract.contract.FilterLogs(opts, \"Burned\", operatorRule, fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &TestDepositTokenContractBurnedIterator{contract: _TestDepositTokenContract.contract, event: \"Burned\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "0a12e7875f142a2ec181556f69bf98af", "score": "0.5228469", "text": "func (f Filter) Drop() error {\n\treturn checkResponse(f.client.sendAndReceive([]byte(\"drop \" + f.Name)))\n}", "title": "" }, { "docid": "3dbe1b4713d9eab511b3ab5026d2198f", "score": "0.5181757", "text": "func (_Bridge *BridgeFilterer) FilterRefundClaimed(opts *bind.FilterOpts, _userAddress []common.Address, _tokenAddress []common.Address) (*BridgeRefundClaimedIterator, error) {\n\n\tvar _userAddressRule []interface{}\n\tfor _, _userAddressItem := range _userAddress {\n\t\t_userAddressRule = append(_userAddressRule, _userAddressItem)\n\t}\n\tvar _tokenAddressRule []interface{}\n\tfor _, _tokenAddressItem := range _tokenAddress {\n\t\t_tokenAddressRule = append(_tokenAddressRule, _tokenAddressItem)\n\t}\n\n\tlogs, sub, err := _Bridge.contract.FilterLogs(opts, \"RefundClaimed\", _userAddressRule, _tokenAddressRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BridgeRefundClaimedIterator{contract: _Bridge.contract, event: \"RefundClaimed\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "d133e8f15d729b8155bce54a2dcb4270", "score": "0.5116979", "text": "func (b *BusImpl) Filter(args ...interface{}) error {\n\treturn nil\n}", "title": "" }, { "docid": "8faadf0f3c8ad49c88343bbed75dcd3c", "score": "0.5116367", "text": "func (_DFedUSDD *DFedUSDDFilterer) FilterWithdraw(opts *bind.FilterOpts, from []common.Address) (*DFedUSDDWithdrawIterator, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _DFedUSDD.contract.FilterLogs(opts, \"Withdraw\", fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &DFedUSDDWithdrawIterator{contract: _DFedUSDD.contract, event: \"Withdraw\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "13d6a37693c88f90e6839d66705aef29", "score": "0.50927466", "text": "func (_Sigmacore *SigmacoreFilterer) FilterLOGEXIT(opts *bind.FilterOpts, caller []common.Address, tokenOut []common.Address) (*SigmacoreLOGEXITIterator, error) {\n\n\tvar callerRule []interface{}\n\tfor _, callerItem := range caller {\n\t\tcallerRule = append(callerRule, callerItem)\n\t}\n\tvar tokenOutRule []interface{}\n\tfor _, tokenOutItem := range tokenOut {\n\t\ttokenOutRule = append(tokenOutRule, tokenOutItem)\n\t}\n\n\tlogs, sub, err := _Sigmacore.contract.FilterLogs(opts, \"LOG_EXIT\", callerRule, tokenOutRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &SigmacoreLOGEXITIterator{contract: _Sigmacore.contract, event: \"LOG_EXIT\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "1ae087d960e9f172b34f9a7a56812cf3", "score": "0.50290984", "text": "func (l *SLogger) Deb(fnc tFncName, msg string) {\n\tl.logw(fnc, msg, SEVDEBUG, DLB)\n}", "title": "" }, { "docid": "c61f6f90956ffcb915878c454b9b5955", "score": "0.5026245", "text": "func (_ERC777 *ERC777Filterer) FilterBurned(opts *bind.FilterOpts, operator []common.Address, from []common.Address) (*ERC777BurnedIterator, error) {\n\n\tvar operatorRule []interface{}\n\tfor _, operatorItem := range operator {\n\t\toperatorRule = append(operatorRule, operatorItem)\n\t}\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _ERC777.contract.FilterLogs(opts, \"Burned\", operatorRule, fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &ERC777BurnedIterator{contract: _ERC777.contract, event: \"Burned\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "86fd9199eb0083d73964615d472e7d43", "score": "0.5022342", "text": "func (_Dospayment *DospaymentFilterer) FilterUpdateDropBurnMaxQuota(opts *bind.FilterOpts) (*DospaymentUpdateDropBurnMaxQuotaIterator, error) {\n\n\tlogs, sub, err := _Dospayment.contract.FilterLogs(opts, \"UpdateDropBurnMaxQuota\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &DospaymentUpdateDropBurnMaxQuotaIterator{contract: _Dospayment.contract, event: \"UpdateDropBurnMaxQuota\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "db92373f9b87f9eadd9102d35297bbc0", "score": "0.5005149", "text": "func (_BaseContentSpace *BaseContentSpaceFilterer) FilterBindUserWallet(opts *bind.FilterOpts) (*BaseContentSpaceBindUserWalletIterator, error) {\n\n\tlogs, sub, err := _BaseContentSpace.contract.FilterLogs(opts, \"BindUserWallet\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BaseContentSpaceBindUserWalletIterator{contract: _BaseContentSpace.contract, event: \"BindUserWallet\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "96ad0293a39f337a9d86804438cdfe39", "score": "0.49999642", "text": "func (_Escrow *EscrowFilterer) FilterFunded(opts *bind.FilterOpts, from []common.Address) (*EscrowFundedIterator, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _Escrow.contract.FilterLogs(opts, \"Funded\", fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &EscrowFundedIterator{contract: _Escrow.contract, event: \"Funded\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "81b552199a429857d13b7ff1dabd77d7", "score": "0.49995285", "text": "func (_FeeBankContract *FeeBankContractFilterer) FilterWithdrawEvent(opts *bind.FilterOpts) (*FeeBankContractWithdrawEventIterator, error) {\n\n\tlogs, sub, err := _FeeBankContract.contract.FilterLogs(opts, \"WithdrawEvent\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &FeeBankContractWithdrawEventIterator{contract: _FeeBankContract.contract, event: \"WithdrawEvent\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "d1c12c59645cf17d069c96c90364bdd0", "score": "0.49969128", "text": "func (_Staking *StakingFilterer) FilterUpdateDropBurnMaxQuota(opts *bind.FilterOpts) (*StakingUpdateDropBurnMaxQuotaIterator, error) {\n\n\tlogs, sub, err := _Staking.contract.FilterLogs(opts, \"UpdateDropBurnMaxQuota\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &StakingUpdateDropBurnMaxQuotaIterator{contract: _Staking.contract, event: \"UpdateDropBurnMaxQuota\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "0929e2e4a14596a351e719b27c847e7e", "score": "0.49675527", "text": "func (_Stakingbindings *StakingbindingsFilterer) FilterWithdrawn(opts *bind.FilterOpts, user []common.Address) (*StakingbindingsWithdrawnIterator, error) {\n\n\tvar userRule []interface{}\n\tfor _, userItem := range user {\n\t\tuserRule = append(userRule, userItem)\n\t}\n\n\tlogs, sub, err := _Stakingbindings.contract.FilterLogs(opts, \"Withdrawn\", userRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &StakingbindingsWithdrawnIterator{contract: _Stakingbindings.contract, event: \"Withdrawn\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "baabe176f981caef6b64d14ee15788f8", "score": "0.49498048", "text": "func (_IERC777 *IERC777Filterer) FilterBurned(opts *bind.FilterOpts, operator []common.Address, from []common.Address) (*IERC777BurnedIterator, error) {\n\n\tvar operatorRule []interface{}\n\tfor _, operatorItem := range operator {\n\t\toperatorRule = append(operatorRule, operatorItem)\n\t}\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _IERC777.contract.FilterLogs(opts, \"Burned\", operatorRule, fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &IERC777BurnedIterator{contract: _IERC777.contract, event: \"Burned\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "48534891061738f14298fea48bdf2537", "score": "0.49459642", "text": "func (_DataRegistry *DataRegistryFilterer) WatchBundleUnregistered(opts *bind.WatchOpts, sink chan<- *DataRegistryBundleUnregistered, collectionId [][8]byte, bundleId [][8]byte) (event.Subscription, error) {\n\n\tvar collectionIdRule []interface{}\n\tfor _, collectionIdItem := range collectionId {\n\t\tcollectionIdRule = append(collectionIdRule, collectionIdItem)\n\t}\n\tvar bundleIdRule []interface{}\n\tfor _, bundleIdItem := range bundleId {\n\t\tbundleIdRule = append(bundleIdRule, bundleIdItem)\n\t}\n\n\tlogs, sub, err := _DataRegistry.contract.WatchLogs(opts, \"BundleUnregistered\", collectionIdRule, bundleIdRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(DataRegistryBundleUnregistered)\n\t\t\t\tif err := _DataRegistry.contract.UnpackLog(event, \"BundleUnregistered\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "2c4915712a7ee8681dcf38156860625b", "score": "0.49172434", "text": "func (r *Recorder) OnUnbind(fn ObjectUnbind) ObjectUnbind {\n\treturn func(request *osb.UnbindRequest, c *broker.RequestContext) (*broker.UnbindResponse, error) {\n\n\t\tvar id, status string\n\t\tvar bindID, bindStatus string\n\t\terrc := make(chan error)\n\n\t\t// Check to see if instance already exists\n\t\tr.Cli.QueryRow(`SELECT instances_id, instance_status FROM share_instance where instances_id = $1`,\n\t\t\trequest.InstanceID).Scan(&id, &status)\n\n\t\tif id == request.InstanceID && status == statusDeleted || id != request.InstanceID {\n\t\t\terrString := http.StatusText(http.StatusNotFound)\n\t\t\tglog.Error(errString)\n\t\t\treturn nil, osb.HTTPStatusCodeError{\n\t\t\t\tStatusCode: http.StatusNotFound,\n\t\t\t\tErrorMessage: &errString,\n\t\t\t}\n\n\t\t}\n\n\t\t// Check to see if bind already exists\n\t\tr.Cli.QueryRow(`SELECT binding_id, binding_status FROM share_binding where binding_id = $1`,\n\t\t\trequest.BindingID).Scan(&bindID, &bindStatus)\n\n\t\t// v0.1.1 spec:\n\t\t// duplicated unbind return 404\n\t\tif bindID == request.BindingID && bindStatus == statusDeleted {\n\t\t\terrString := errToString(fmt.Errorf(\"Already Unbinded\"))\n\t\t\treturn nil, osb.HTTPStatusCodeError{\n\t\t\t\tStatusCode: http.StatusNotFound,\n\t\t\t\tErrorMessage: &errString,\n\t\t\t}\n\t\t}\n\n\t\tvar uri *string\n\t\tvar err error\n\t\t// DEPRECATED: change to getTargetDatabase\n\t\t// Get credential and ip from ops DB\n\t\t// Query database target information for the service\n\t\t// if e := r.Cli.QueryRow(`SELECT uri FROM credentials where shared_use = true LIMIT 1`).Scan(&uri); e != nil {\n\t\t// \terrString := errToString(e)\n\t\t// \treturn nil, osb.HTTPStatusCodeError{\n\t\t// \t\tStatusCode: http.StatusServiceUnavailable,\n\t\t// \t\tErrorMessage: &errString,\n\t\t// \t}\n\t\t// }\n\t\turi, err = r.getTargetDatabase(nil, &id, maxInstance, false)\n\t\tif err != nil {\n\t\t\terrString := fmt.Sprintf(\"getting target database from id: %s failed; %+v\", id, err)\n\t\t\treturn nil, osb.HTTPStatusCodeError{\n\t\t\t\tStatusCode: http.StatusInsufficientStorage,\n\t\t\t\tErrorMessage: &errString,\n\t\t\t}\n\t\t}\n\n\t\t// Pass values to be callable by logic business to connect to target database based on information\n\t\t// from ops db\n\n\t\tgo func() {\n\t\t\terr := pubMsg(uri)\n\t\t\tif err != nil {\n\t\t\t\terrc <- err\n\t\t\t\tglog.Error(err)\n\t\t\t}\n\t\t}()\n\n\t\tres, err := fn(request, c)\n\n\t\t// This section will not handle any error, allowed to print to stderr but should pass error without mutate.\n\t\tif err != nil {\n\t\t\tglog.Error(err)\n\t\t\treturn nil, err\n\t\t}\n\n\t\ttx, err := r.Cli.Begin()\n\t\tdefer func() {\n\t\t\tswitch err {\n\t\t\tcase nil:\n\t\t\t\terr = tx.Commit()\n\t\t\tdefault:\n\t\t\t\ttx.Rollback()\n\t\t\t}\n\t\t}()\n\n\t\tif bindID == request.BindingID && bindStatus == statusExist {\n\t\t\t_, err := r.Cli.Exec(`UPDATE share_binding set binding_status=$1, unbinding_time=$2 where binding_id=$3`,\n\t\t\t\tstatusDeleted, time.Now(), request.BindingID)\n\t\t\tif err != nil {\n\t\t\t\terrString := errToString(err)\n\t\t\t\treturn nil, osb.HTTPStatusCodeError{\n\t\t\t\t\tStatusCode: http.StatusServiceUnavailable,\n\t\t\t\t\tErrorMessage: &errString,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tselect {\n\t\tcase <-errc:\n\t\t\treturn nil, <-errc\n\t\tdefault:\n\t\t\treturn res, nil\n\t\t}\n\t}\n}", "title": "" }, { "docid": "94b74c9a9746031a9ff683556dbb1c25", "score": "0.49144977", "text": "func (_Token *TokenFilterer) FilterMinterRemoved(opts *bind.FilterOpts, account []common.Address) (*TokenMinterRemovedIterator, error) {\n\n\tvar accountRule []interface{}\n\tfor _, accountItem := range account {\n\t\taccountRule = append(accountRule, accountItem)\n\t}\n\n\tlogs, sub, err := _Token.contract.FilterLogs(opts, \"MinterRemoved\", accountRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &TokenMinterRemovedIterator{contract: _Token.contract, event: \"MinterRemoved\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "771c7f1ff1dd0f480b9759e057a6ea69", "score": "0.49030292", "text": "func (_Dospayment *DospaymentFilterer) FilterLogRefundServiceFee(opts *bind.FilterOpts) (*DospaymentLogRefundServiceFeeIterator, error) {\n\n\tlogs, sub, err := _Dospayment.contract.FilterLogs(opts, \"LogRefundServiceFee\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &DospaymentLogRefundServiceFeeIterator{contract: _Dospayment.contract, event: \"LogRefundServiceFee\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "9a78427b6448d8aa8487f8a87119a040", "score": "0.4900936", "text": "func (_KittyAuction *KittyAuctionFilterer) FilterBirth(opts *bind.FilterOpts) (*KittyAuctionBirthIterator, error) {\n\n\tlogs, sub, err := _KittyAuction.contract.FilterLogs(opts, \"Birth\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &KittyAuctionBirthIterator{contract: _KittyAuction.contract, event: \"Birth\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "d98cc0ffbda8fcdeeea285e16eb5cd8d", "score": "0.4897034", "text": "func (_Token *TokenFilterer) WatchBurn(opts *bind.WatchOpts, sink chan<- *TokenBurn, from []common.Address) (event.Subscription, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _Token.contract.WatchLogs(opts, \"Burn\", fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(TokenBurn)\n\t\t\t\tif err := _Token.contract.UnpackLog(event, \"Burn\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "12c515808aeec779cef632585ffe9005", "score": "0.48918235", "text": "func (_F3Devents *F3DeventsFilterer) FilterOnWithdrawAndDistribute(opts *bind.FilterOpts) (*F3DeventsOnWithdrawAndDistributeIterator, error) {\n\n\tlogs, sub, err := _F3Devents.contract.FilterLogs(opts, \"onWithdrawAndDistribute\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &F3DeventsOnWithdrawAndDistributeIterator{contract: _F3Devents.contract, event: \"onWithdrawAndDistribute\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "0471ab54eb1bcaf1a3a232df4dee8b09", "score": "0.48895776", "text": "func (_DataRegistry *DataRegistryFilterer) ParseBundleUnregisteredFromReceipt(receipt *ethTypes.Receipt) (*DataRegistryBundleUnregistered, error) {\n\tfor _, log := range receipt.Logs {\n\t\tif log.Topics[0] == common.HexToHash(\"0x259f3143de0f6ea1e210d29804908efbc46730cacd3ae119f71d72a65229d544\") {\n\t\t\tevent := new(DataRegistryBundleUnregistered)\n\t\t\tif err := _DataRegistry.contract.UnpackLog(event, \"BundleUnregistered\", *log); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\treturn event, nil\n\t\t}\n\t}\n\treturn nil, errors.New(\"BundleUnregistered event not found\")\n}", "title": "" }, { "docid": "48f1a44b34727f5b77a949a7d44f6535", "score": "0.48753402", "text": "func (f *Filter) Drop() error {\n\treturn f.sendCommand(FilterDropCmd)\n}", "title": "" }, { "docid": "e5018dc74c570e7494ab15f5175718ec", "score": "0.48532957", "text": "func (_Supercoin *SupercoinFilterer) WatchBurn(opts *bind.WatchOpts, sink chan<- *SupercoinBurn, from []common.Address) (event.Subscription, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\n\tlogs, sub, err := _Supercoin.contract.WatchLogs(opts, \"Burn\", fromRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(SupercoinBurn)\n\t\t\t\tif err := _Supercoin.contract.UnpackLog(event, \"Burn\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "a53e4e59c3d896554faa8eb53fd4a510", "score": "0.48504686", "text": "func (_Bridge *BridgeFilterer) FilterRelayerRemoved(opts *bind.FilterOpts, account []common.Address, sender []common.Address) (*BridgeRelayerRemovedIterator, error) {\n\n\tvar accountRule []interface{}\n\tfor _, accountItem := range account {\n\t\taccountRule = append(accountRule, accountItem)\n\t}\n\tvar senderRule []interface{}\n\tfor _, senderItem := range sender {\n\t\tsenderRule = append(senderRule, senderItem)\n\t}\n\n\tlogs, sub, err := _Bridge.contract.FilterLogs(opts, \"RelayerRemoved\", accountRule, senderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BridgeRelayerRemovedIterator{contract: _Bridge.contract, event: \"RelayerRemoved\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "e0a8e22062fb89f56b316467a1932b5e", "score": "0.4848354", "text": "func (_Staking *StakingFilterer) FilterRewardWithdraw(opts *bind.FilterOpts, sender []common.Address) (*StakingRewardWithdrawIterator, error) {\n\n\tvar senderRule []interface{}\n\tfor _, senderItem := range sender {\n\t\tsenderRule = append(senderRule, senderItem)\n\t}\n\n\tlogs, sub, err := _Staking.contract.FilterLogs(opts, \"RewardWithdraw\", senderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &StakingRewardWithdrawIterator{contract: _Staking.contract, event: \"RewardWithdraw\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "745124972368006e8516c0196560beb7", "score": "0.4840603", "text": "func (_Bridge *BridgeFilterer) FilterTokenRemoved(opts *bind.FilterOpts, _tokenAddress []common.Address) (*BridgeTokenRemovedIterator, error) {\n\n\tvar _tokenAddressRule []interface{}\n\tfor _, _tokenAddressItem := range _tokenAddress {\n\t\t_tokenAddressRule = append(_tokenAddressRule, _tokenAddressItem)\n\t}\n\n\tlogs, sub, err := _Bridge.contract.FilterLogs(opts, \"TokenRemoved\", _tokenAddressRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BridgeTokenRemovedIterator{contract: _Bridge.contract, event: \"TokenRemoved\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "1bffb40c073ce4d1a66268e25f7448da", "score": "0.4833512", "text": "func (_Main *MainFilterer) WatchBurn(opts *bind.WatchOpts, sink chan<- *MainBurn, sender []common.Address, to []common.Address) (event.Subscription, error) {\n\n\tvar senderRule []interface{}\n\tfor _, senderItem := range sender {\n\t\tsenderRule = append(senderRule, senderItem)\n\t}\n\n\tvar toRule []interface{}\n\tfor _, toItem := range to {\n\t\ttoRule = append(toRule, toItem)\n\t}\n\n\tlogs, sub, err := _Main.contract.WatchLogs(opts, \"Burn\", senderRule, toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(MainBurn)\n\t\t\t\tif err := _Main.contract.UnpackLog(event, \"Burn\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "ad2fd442fdffb102f40ec6739c982a2a", "score": "0.48138246", "text": "func (_StockToken *StockTokenFilterer) FilterAddressRemovedFromWhitelist(opts *bind.FilterOpts, AuthorizedBy []common.Address, AddressRemoved []common.Address) (*StockTokenAddressRemovedFromWhitelistIterator, error) {\n\n\tvar AuthorizedByRule []interface{}\n\tfor _, AuthorizedByItem := range AuthorizedBy {\n\t\tAuthorizedByRule = append(AuthorizedByRule, AuthorizedByItem)\n\t}\n\tvar AddressRemovedRule []interface{}\n\tfor _, AddressRemovedItem := range AddressRemoved {\n\t\tAddressRemovedRule = append(AddressRemovedRule, AddressRemovedItem)\n\t}\n\n\tlogs, sub, err := _StockToken.contract.FilterLogs(opts, \"AddressRemovedFromWhitelist\", AuthorizedByRule, AddressRemovedRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &StockTokenAddressRemovedFromWhitelistIterator{contract: _StockToken.contract, event: \"AddressRemovedFromWhitelist\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "bf36eb6cb3d69e7a9ccb979fe920635f", "score": "0.4812226", "text": "func (_KittyOwnership *KittyOwnershipFilterer) FilterBirth(opts *bind.FilterOpts) (*KittyOwnershipBirthIterator, error) {\n\n\tlogs, sub, err := _KittyOwnership.contract.FilterLogs(opts, \"Birth\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &KittyOwnershipBirthIterator{contract: _KittyOwnership.contract, event: \"Birth\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "502e5b951481aaa1d788ce3826e8c90c", "score": "0.48044857", "text": "func (_Broker *BrokerFilterer) FilterWithdraw(opts *bind.FilterOpts, trader []common.Address) (*BrokerWithdrawIterator, error) {\n\n\tvar traderRule []interface{}\n\tfor _, traderItem := range trader {\n\t\ttraderRule = append(traderRule, traderItem)\n\t}\n\n\tlogs, sub, err := _Broker.contract.FilterLogs(opts, \"Withdraw\", traderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BrokerWithdrawIterator{contract: _Broker.contract, event: \"Withdraw\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "9394f6b20506143307b49827f71d4723", "score": "0.47818425", "text": "func (_HashDice *HashDiceFilterer) FilterCloseRoundTooLate(opts *bind.FilterOpts) (*HashDiceCloseRoundTooLateIterator, error) {\n\n\tlogs, sub, err := _HashDice.contract.FilterLogs(opts, \"CloseRoundTooLate\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &HashDiceCloseRoundTooLateIterator{contract: _HashDice.contract, event: \"CloseRoundTooLate\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "66cce2d9099f1117d9b6672f425513c4", "score": "0.4771507", "text": "func (_Whitelistable *WhitelistableFilterer) WatchAddressRemovedFromWhitelist(opts *bind.WatchOpts, sink chan<- *WhitelistableAddressRemovedFromWhitelist, AuthorizedBy []common.Address, AddressRemoved []common.Address) (event.Subscription, error) {\n\n\tvar AuthorizedByRule []interface{}\n\tfor _, AuthorizedByItem := range AuthorizedBy {\n\t\tAuthorizedByRule = append(AuthorizedByRule, AuthorizedByItem)\n\t}\n\tvar AddressRemovedRule []interface{}\n\tfor _, AddressRemovedItem := range AddressRemoved {\n\t\tAddressRemovedRule = append(AddressRemovedRule, AddressRemovedItem)\n\t}\n\n\tlogs, sub, err := _Whitelistable.contract.WatchLogs(opts, \"AddressRemovedFromWhitelist\", AuthorizedByRule, AddressRemovedRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(WhitelistableAddressRemovedFromWhitelist)\n\t\t\t\tif err := _Whitelistable.contract.UnpackLog(event, \"AddressRemovedFromWhitelist\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "c2e33aaf00c68446f78a429e25266745", "score": "0.47703522", "text": "func (_Activatable *ActivatableFilterer) FilterDeactivate(opts *bind.FilterOpts, _sender []common.Address) (*ActivatableDeactivateIterator, error) {\n\n\tvar _senderRule []interface{}\n\tfor _, _senderItem := range _sender {\n\t\t_senderRule = append(_senderRule, _senderItem)\n\t}\n\n\tlogs, sub, err := _Activatable.contract.FilterLogs(opts, \"Deactivate\", _senderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &ActivatableDeactivateIterator{contract: _Activatable.contract, event: \"Deactivate\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "c2e33aaf00c68446f78a429e25266745", "score": "0.47703522", "text": "func (_Activatable *ActivatableFilterer) FilterDeactivate(opts *bind.FilterOpts, _sender []common.Address) (*ActivatableDeactivateIterator, error) {\n\n\tvar _senderRule []interface{}\n\tfor _, _senderItem := range _sender {\n\t\t_senderRule = append(_senderRule, _senderItem)\n\t}\n\n\tlogs, sub, err := _Activatable.contract.FilterLogs(opts, \"Deactivate\", _senderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &ActivatableDeactivateIterator{contract: _Activatable.contract, event: \"Deactivate\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "04bd3eab3ffa0bd21714615ad243dead", "score": "0.47685027", "text": "func (_StockToken *StockTokenFilterer) WatchAddressRemovedFromWhitelist(opts *bind.WatchOpts, sink chan<- *StockTokenAddressRemovedFromWhitelist, AuthorizedBy []common.Address, AddressRemoved []common.Address) (event.Subscription, error) {\n\n\tvar AuthorizedByRule []interface{}\n\tfor _, AuthorizedByItem := range AuthorizedBy {\n\t\tAuthorizedByRule = append(AuthorizedByRule, AuthorizedByItem)\n\t}\n\tvar AddressRemovedRule []interface{}\n\tfor _, AddressRemovedItem := range AddressRemoved {\n\t\tAddressRemovedRule = append(AddressRemovedRule, AddressRemovedItem)\n\t}\n\n\tlogs, sub, err := _StockToken.contract.WatchLogs(opts, \"AddressRemovedFromWhitelist\", AuthorizedByRule, AddressRemovedRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(StockTokenAddressRemovedFromWhitelist)\n\t\t\t\tif err := _StockToken.contract.UnpackLog(event, \"AddressRemovedFromWhitelist\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "206070f5365dadfc0ea55ddb04444b82", "score": "0.47613922", "text": "func (_Room *RoomFilterer) FilterDeactivate(opts *bind.FilterOpts, _sender []common.Address) (*RoomDeactivateIterator, error) {\n\n\tvar _senderRule []interface{}\n\tfor _, _senderItem := range _sender {\n\t\t_senderRule = append(_senderRule, _senderItem)\n\t}\n\n\tlogs, sub, err := _Room.contract.FilterLogs(opts, \"Deactivate\", _senderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &RoomDeactivateIterator{contract: _Room.contract, event: \"Deactivate\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "df88ebef96b39ce8c3e98158c873fc35", "score": "0.47587323", "text": "func (_EthereumBridge *EthereumBridgeFilterer) FilterHmyLogUnlockCompleted(opts *bind.FilterOpts) (*EthereumBridgeHmyLogUnlockCompletedIterator, error) {\n\n\tlogs, sub, err := _EthereumBridge.contract.FilterLogs(opts, \"HmyLogUnlockCompleted\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &EthereumBridgeHmyLogUnlockCompletedIterator{contract: _EthereumBridge.contract, event: \"HmyLogUnlockCompleted\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "88a6c7fad35634094d8d5cfc9ffb3912", "score": "0.47427142", "text": "func (_KittyMinting *KittyMintingFilterer) FilterBirth(opts *bind.FilterOpts) (*KittyMintingBirthIterator, error) {\n\n\tlogs, sub, err := _KittyMinting.contract.FilterLogs(opts, \"Birth\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &KittyMintingBirthIterator{contract: _KittyMinting.contract, event: \"Birth\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "5e9f0b69e81f0e0ea0c59e26dba435ff", "score": "0.4736214", "text": "func (_BEP20Token *BEP20TokenFilterer) FilterTransfer(opts *bind.FilterOpts, from []common.Address, to []common.Address) (*BEP20TokenTransferIterator, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\tvar toRule []interface{}\n\tfor _, toItem := range to {\n\t\ttoRule = append(toRule, toItem)\n\t}\n\n\tlogs, sub, err := _BEP20Token.contract.FilterLogs(opts, \"Transfer\", fromRule, toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BEP20TokenTransferIterator{contract: _BEP20Token.contract, event: \"Transfer\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "ffb0474066c4f5ab88d6b9d528a40779", "score": "0.4732241", "text": "func (_Dospayment *DospaymentCaller) DropburnToken(opts *bind.CallOpts) (common.Address, error) {\n\tvar (\n\t\tret0 = new(common.Address)\n\t)\n\tout := ret0\n\terr := _Dospayment.contract.Call(opts, out, \"dropburnToken\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "0111ec41a7b893487569ee0cfab6da13", "score": "0.47181696", "text": "func (_Sigmacore *SigmacoreFilterer) FilterLOGDENORMUPDATED(opts *bind.FilterOpts, token []common.Address) (*SigmacoreLOGDENORMUPDATEDIterator, error) {\n\n\tvar tokenRule []interface{}\n\tfor _, tokenItem := range token {\n\t\ttokenRule = append(tokenRule, tokenItem)\n\t}\n\n\tlogs, sub, err := _Sigmacore.contract.FilterLogs(opts, \"LOG_DENORM_UPDATED\", tokenRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &SigmacoreLOGDENORMUPDATEDIterator{contract: _Sigmacore.contract, event: \"LOG_DENORM_UPDATED\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "e6a7e3eb8335b87b942221da3a5c8aac", "score": "0.47131646", "text": "func (_ZkPoDExchange *ZkPoDExchangeTransactor) BobUnDeposit(opts *bind.TransactOpts, _to common.Address) (*types.Transaction, error) {\n\treturn _ZkPoDExchange.contract.Transact(opts, \"bobUnDeposit\", _to)\n}", "title": "" }, { "docid": "db8adb309250feccf75098fe1b7d7b56", "score": "0.4709823", "text": "func (*serviceBroker) Unbind(instanceID, bindingID string, details brokerapi.UnbindDetails) error {\n\n\tlogger.Info(\"unbind-called\", lager.Data{\"instanceId\": instanceID, \"bindingId\": bindingID, \"details\": details})\n\n\treturn nil\n}", "title": "" }, { "docid": "ff3a3f3d4e7d770840f96ee1b86d4da1", "score": "0.47053602", "text": "func (_Bridge *BridgeFilterer) WatchRefundClaimed(opts *bind.WatchOpts, sink chan<- *BridgeRefundClaimed, _userAddress []common.Address, _tokenAddress []common.Address) (event.Subscription, error) {\n\n\tvar _userAddressRule []interface{}\n\tfor _, _userAddressItem := range _userAddress {\n\t\t_userAddressRule = append(_userAddressRule, _userAddressItem)\n\t}\n\tvar _tokenAddressRule []interface{}\n\tfor _, _tokenAddressItem := range _tokenAddress {\n\t\t_tokenAddressRule = append(_tokenAddressRule, _tokenAddressItem)\n\t}\n\n\tlogs, sub, err := _Bridge.contract.WatchLogs(opts, \"RefundClaimed\", _userAddressRule, _tokenAddressRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(BridgeRefundClaimed)\n\t\t\t\tif err := _Bridge.contract.UnpackLog(event, \"RefundClaimed\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "e6b47fd375c05a1af1fc5d75d0294ba3", "score": "0.47051203", "text": "func (_BondingManager *BondingManagerCaller) UnbondingPeriod(opts *bind.CallOpts) (uint64, error) {\n\tvar (\n\t\tret0 = new(uint64)\n\t)\n\tout := ret0\n\terr := _BondingManager.contract.Call(opts, out, \"unbondingPeriod\")\n\treturn *ret0, err\n}", "title": "" }, { "docid": "7354ac96a2ce995b776bf9099cba3b92", "score": "0.4703624", "text": "func Filter(addrs []ma.Multiaddr) []ma.Multiaddr {\n\traddrs := make([]ma.Multiaddr, 0, len(addrs))\n\tfor _, addr := range addrs {\n\t\tif isRelayAddr(addr) {\n\t\t\tcontinue\n\t\t}\n\t\traddrs = append(raddrs, addr)\n\t}\n\treturn raddrs\n}", "title": "" }, { "docid": "276b00d41c6acd52cba8897cf7a3619c", "score": "0.46982554", "text": "func ProcessUnfollowEvent(e Event) {\n\n\tlog.Println(\"Bot has been unfollowed by user: \" + e.Source.UserId)\n\n}", "title": "" }, { "docid": "791177c6e73b3bdca51414ac43659fc8", "score": "0.4697817", "text": "func (_Erc20basic *Erc20basicFilterer) FilterWithdrawal(opts *bind.FilterOpts, src []common.Address) (*Erc20basicWithdrawalIterator, error) {\n\n\tvar srcRule []interface{}\n\tfor _, srcItem := range src {\n\t\tsrcRule = append(srcRule, srcItem)\n\t}\n\n\tlogs, sub, err := _Erc20basic.contract.FilterLogs(opts, \"Withdrawal\", srcRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &Erc20basicWithdrawalIterator{contract: _Erc20basic.contract, event: \"Withdrawal\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "608727bde055c09806739a93ebbc049e", "score": "0.46849567", "text": "func (s *BaseNuggetListener) ExitFilter(ctx *FilterContext) {}", "title": "" }, { "docid": "a7aadd24326491c08efb7125b1c6f125", "score": "0.46818477", "text": "func (_Sigmacore *SigmacoreFilterer) FilterLOGSWAPFEEUPDATED(opts *bind.FilterOpts) (*SigmacoreLOGSWAPFEEUPDATEDIterator, error) {\n\n\tlogs, sub, err := _Sigmacore.contract.FilterLogs(opts, \"LOG_SWAP_FEE_UPDATED\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &SigmacoreLOGSWAPFEEUPDATEDIterator{contract: _Sigmacore.contract, event: \"LOG_SWAP_FEE_UPDATED\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "ef3bdcd85b8185bf968f6ecbda3b8fc6", "score": "0.4676974", "text": "func (_BondingManager *BondingManagerCallerSession) UnbondingPeriod() (uint64, error) {\n\treturn _BondingManager.Contract.UnbondingPeriod(&_BondingManager.CallOpts)\n}", "title": "" }, { "docid": "d64a2718a4bcf71a5c6ca1cf987cb835", "score": "0.46750012", "text": "func (_SfcContract *SfcContractFilterer) FilterUndelegated(opts *bind.FilterOpts, delegator []common.Address, toValidatorID []*big.Int, wrID []*big.Int) (*SfcContractUndelegatedIterator, error) {\n\n\tvar delegatorRule []interface{}\n\tfor _, delegatorItem := range delegator {\n\t\tdelegatorRule = append(delegatorRule, delegatorItem)\n\t}\n\tvar toValidatorIDRule []interface{}\n\tfor _, toValidatorIDItem := range toValidatorID {\n\t\ttoValidatorIDRule = append(toValidatorIDRule, toValidatorIDItem)\n\t}\n\tvar wrIDRule []interface{}\n\tfor _, wrIDItem := range wrID {\n\t\twrIDRule = append(wrIDRule, wrIDItem)\n\t}\n\n\tlogs, sub, err := _SfcContract.contract.FilterLogs(opts, \"Undelegated\", delegatorRule, toValidatorIDRule, wrIDRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &SfcContractUndelegatedIterator{contract: _SfcContract.contract, event: \"Undelegated\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "446f236da9c67c30d956e9bcabfc52d0", "score": "0.46728432", "text": "func (_IdentityRegistry *IdentityRegistryFilterer) WatchAddressRemovedFromWhitelist(opts *bind.WatchOpts, sink chan<- *IdentityRegistryAddressRemovedFromWhitelist, AuthorizedBy []common.Address, AddressRemoved []common.Address) (event.Subscription, error) {\n\n\tvar AuthorizedByRule []interface{}\n\tfor _, AuthorizedByItem := range AuthorizedBy {\n\t\tAuthorizedByRule = append(AuthorizedByRule, AuthorizedByItem)\n\t}\n\tvar AddressRemovedRule []interface{}\n\tfor _, AddressRemovedItem := range AddressRemoved {\n\t\tAddressRemovedRule = append(AddressRemovedRule, AddressRemovedItem)\n\t}\n\n\tlogs, sub, err := _IdentityRegistry.contract.WatchLogs(opts, \"AddressRemovedFromWhitelist\", AuthorizedByRule, AddressRemovedRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(IdentityRegistryAddressRemovedFromWhitelist)\n\t\t\t\tif err := _IdentityRegistry.contract.UnpackLog(event, \"AddressRemovedFromWhitelist\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "96d270d8c1c08fe273739e78beb99798", "score": "0.4669593", "text": "func (_DFedUSDD *DFedUSDDFilterer) FilterDeposit(opts *bind.FilterOpts, from []common.Address, to []common.Address) (*DFedUSDDDepositIterator, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\tvar toRule []interface{}\n\tfor _, toItem := range to {\n\t\ttoRule = append(toRule, toItem)\n\t}\n\n\tlogs, sub, err := _DFedUSDD.contract.FilterLogs(opts, \"Deposit\", fromRule, toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &DFedUSDDDepositIterator{contract: _DFedUSDD.contract, event: \"Deposit\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "68d4358cf13028d2da5ad1b817f98fe8", "score": "0.46627554", "text": "func (_Broker *BrokerFilterer) FilterDeposit(opts *bind.FilterOpts, trader []common.Address) (*BrokerDepositIterator, error) {\n\n\tvar traderRule []interface{}\n\tfor _, traderItem := range trader {\n\t\ttraderRule = append(traderRule, traderItem)\n\t}\n\n\tlogs, sub, err := _Broker.contract.FilterLogs(opts, \"Deposit\", traderRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BrokerDepositIterator{contract: _Broker.contract, event: \"Deposit\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "02b12684b7d003f1a6fe1468326cb2de", "score": "0.46617654", "text": "func (_KittyAuction *KittyAuctionFilterer) FilterTransfer(opts *bind.FilterOpts) (*KittyAuctionTransferIterator, error) {\n\n\tlogs, sub, err := _KittyAuction.contract.FilterLogs(opts, \"Transfer\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &KittyAuctionTransferIterator{contract: _KittyAuction.contract, event: \"Transfer\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "b45b538f7d3fddf9bbcb172978b9b9e5", "score": "0.4661236", "text": "func (_IdentityRegistry *IdentityRegistryFilterer) FilterAddressRemovedFromWhitelist(opts *bind.FilterOpts, AuthorizedBy []common.Address, AddressRemoved []common.Address) (*IdentityRegistryAddressRemovedFromWhitelistIterator, error) {\n\n\tvar AuthorizedByRule []interface{}\n\tfor _, AuthorizedByItem := range AuthorizedBy {\n\t\tAuthorizedByRule = append(AuthorizedByRule, AuthorizedByItem)\n\t}\n\tvar AddressRemovedRule []interface{}\n\tfor _, AddressRemovedItem := range AddressRemoved {\n\t\tAddressRemovedRule = append(AddressRemovedRule, AddressRemovedItem)\n\t}\n\n\tlogs, sub, err := _IdentityRegistry.contract.FilterLogs(opts, \"AddressRemovedFromWhitelist\", AuthorizedByRule, AddressRemovedRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &IdentityRegistryAddressRemovedFromWhitelistIterator{contract: _IdentityRegistry.contract, event: \"AddressRemovedFromWhitelist\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "b5ab4db4a5cc9dc63c4e700cbf62b7ac", "score": "0.46479", "text": "func (_F3Devents *F3DeventsFilterer) FilterOnWithdraw(opts *bind.FilterOpts, playerID []*big.Int) (*F3DeventsOnWithdrawIterator, error) {\n\n\tvar playerIDRule []interface{}\n\tfor _, playerIDItem := range playerID {\n\t\tplayerIDRule = append(playerIDRule, playerIDItem)\n\t}\n\n\tlogs, sub, err := _F3Devents.contract.FilterLogs(opts, \"onWithdraw\", playerIDRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &F3DeventsOnWithdrawIterator{contract: _F3Devents.contract, event: \"onWithdraw\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "362b58fb3d0011d82e34c9ef085310fb", "score": "0.46471107", "text": "func (_CZRX *CZRXFilterer) FilterRedeem(opts *bind.FilterOpts) (*CZRXRedeemIterator, error) {\n\n\tlogs, sub, err := _CZRX.contract.FilterLogs(opts, \"Redeem\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &CZRXRedeemIterator{contract: _CZRX.contract, event: \"Redeem\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "659631eea6c41f87132e705aaeeddf3e", "score": "0.46450806", "text": "func (a *Adjudicator) filterWithdrawn(ctx context.Context, fundingID [32]byte, asset assetHolder) (bool, error) {\n\tfilterOpts, err := a.NewFilterOpts(ctx)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\titer, err := asset.FilterWithdrawn(filterOpts, [][32]byte{fundingID})\n\tif err != nil {\n\t\treturn false, errors.Wrap(err, \"creating iterator\")\n\t}\n\t// nolint:errcheck\n\tdefer iter.Close()\n\n\tif !iter.Next() {\n\t\treturn false, errors.Wrap(iter.Error(), \"iterating\")\n\t}\n\t// Event found\n\treturn true, nil\n}", "title": "" }, { "docid": "6e8eb908889c197158ba3600c30b8005", "score": "0.46402663", "text": "func (_Broker *BrokerFilterer) FilterTradeFailed(opts *bind.FilterOpts) (*BrokerTradeFailedIterator, error) {\n\n\tlogs, sub, err := _Broker.contract.FilterLogs(opts, \"TradeFailed\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BrokerTradeFailedIterator{contract: _Broker.contract, event: \"TradeFailed\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "6f2269a393513e58817e89fd76bd861f", "score": "0.46302405", "text": "func (_StockToken *StockTokenFilterer) FilterTransfer(opts *bind.FilterOpts, from []common.Address, to []common.Address) (*StockTokenTransferIterator, error) {\n\n\tvar fromRule []interface{}\n\tfor _, fromItem := range from {\n\t\tfromRule = append(fromRule, fromItem)\n\t}\n\tvar toRule []interface{}\n\tfor _, toItem := range to {\n\t\ttoRule = append(toRule, toItem)\n\t}\n\n\tlogs, sub, err := _StockToken.contract.FilterLogs(opts, \"Transfer\", fromRule, toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &StockTokenTransferIterator{contract: _StockToken.contract, event: \"Transfer\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "5c3bf38961f9d97e82f5f9af8d865018", "score": "0.46285197", "text": "func (_KittyBase *KittyBaseFilterer) FilterBirth(opts *bind.FilterOpts) (*KittyBaseBirthIterator, error) {\n\n\tlogs, sub, err := _KittyBase.contract.FilterLogs(opts, \"Birth\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &KittyBaseBirthIterator{contract: _KittyBase.contract, event: \"Birth\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "52710c7905cf128884ee6fb921ad11ab", "score": "0.4621309", "text": "func (_BondingManager *BondingManagerSession) UnbondingPeriod() (uint64, error) {\n\treturn _BondingManager.Contract.UnbondingPeriod(&_BondingManager.CallOpts)\n}", "title": "" }, { "docid": "5ef4f7356144e93d7fb824627cc3125a", "score": "0.46207234", "text": "func (_Bondcoin *BondcoinFilterer) FilterTransfer(opts *bind.FilterOpts, _from []common.Address, _to []common.Address) (*BondcoinTransferIterator, error) {\n\n\tvar _fromRule []interface{}\n\tfor _, _fromItem := range _from {\n\t\t_fromRule = append(_fromRule, _fromItem)\n\t}\n\tvar _toRule []interface{}\n\tfor _, _toItem := range _to {\n\t\t_toRule = append(_toRule, _toItem)\n\t}\n\n\tlogs, sub, err := _Bondcoin.contract.FilterLogs(opts, \"Transfer\", _fromRule, _toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BondcoinTransferIterator{contract: _Bondcoin.contract, event: \"Transfer\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "956b0375bfa577838a2e36213e5cf3e6", "score": "0.46207047", "text": "func (_FeeBankContract *FeeBankContractFilterer) FilterDepositEvent(opts *bind.FilterOpts) (*FeeBankContractDepositEventIterator, error) {\n\n\tlogs, sub, err := _FeeBankContract.contract.FilterLogs(opts, \"DepositEvent\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &FeeBankContractDepositEventIterator{contract: _FeeBankContract.contract, event: \"DepositEvent\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "185b1469ded523097de6b9b136c52c10", "score": "0.46108675", "text": "func (_KittyCore *KittyCoreFilterer) FilterBirth(opts *bind.FilterOpts) (*KittyCoreBirthIterator, error) {\n\n\tlogs, sub, err := _KittyCore.contract.FilterLogs(opts, \"Birth\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &KittyCoreBirthIterator{contract: _KittyCore.contract, event: \"Birth\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "99974aa225c319972e434967f16d9710", "score": "0.46076536", "text": "func (_Erc20basic *Erc20basicFilterer) FilterDeposit(opts *bind.FilterOpts, dst []common.Address) (*Erc20basicDepositIterator, error) {\n\n\tvar dstRule []interface{}\n\tfor _, dstItem := range dst {\n\t\tdstRule = append(dstRule, dstItem)\n\t}\n\n\tlogs, sub, err := _Erc20basic.contract.FilterLogs(opts, \"Deposit\", dstRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &Erc20basicDepositIterator{contract: _Erc20basic.contract, event: \"Deposit\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "3d56a133daa1bd33ed7c7560b35d5d95", "score": "0.4605763", "text": "func (_DummyERC20Token *DummyERC20TokenFilterer) FilterTransfer(opts *bind.FilterOpts, _from []common.Address, _to []common.Address) (*DummyERC20TokenTransferIterator, error) {\n\n\tvar _fromRule []interface{}\n\tfor _, _fromItem := range _from {\n\t\t_fromRule = append(_fromRule, _fromItem)\n\t}\n\tvar _toRule []interface{}\n\tfor _, _toItem := range _to {\n\t\t_toRule = append(_toRule, _toItem)\n\t}\n\n\tlogs, sub, err := _DummyERC20Token.contract.FilterLogs(opts, \"Transfer\", _fromRule, _toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &DummyERC20TokenTransferIterator{contract: _DummyERC20Token.contract, event: \"Transfer\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "33bbdf0b455cb035e19988a0b19901b2", "score": "0.4605095", "text": "func (_Staking *StakingFilterer) WatchUpdateUnbondDuration(opts *bind.WatchOpts, sink chan<- *StakingUpdateUnbondDuration) (event.Subscription, error) {\n\n\tlogs, sub, err := _Staking.contract.WatchLogs(opts, \"UpdateUnbondDuration\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn event.NewSubscription(func(quit <-chan struct{}) error {\n\t\tdefer sub.Unsubscribe()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase log := <-logs:\n\t\t\t\t// New log arrived, parse the event and forward to the user\n\t\t\t\tevent := new(StakingUpdateUnbondDuration)\n\t\t\t\tif err := _Staking.contract.UnpackLog(event, \"UpdateUnbondDuration\", log); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tevent.Raw = log\n\n\t\t\t\tselect {\n\t\t\t\tcase sink <- event:\n\t\t\t\tcase err := <-sub.Err():\n\t\t\t\t\treturn err\n\t\t\t\tcase <-quit:\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\tcase err := <-sub.Err():\n\t\t\t\treturn err\n\t\t\tcase <-quit:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}), nil\n}", "title": "" }, { "docid": "1f7163125318e9adc2e38040900b3fc8", "score": "0.45953363", "text": "func (_ModularLong *ModularLongFilterer) FilterOnWithdrawAndDistribute(opts *bind.FilterOpts) (*ModularLongOnWithdrawAndDistributeIterator, error) {\n\n\tlogs, sub, err := _ModularLong.contract.FilterLogs(opts, \"onWithdrawAndDistribute\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &ModularLongOnWithdrawAndDistributeIterator{contract: _ModularLong.contract, event: \"onWithdrawAndDistribute\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "925b4d6177b8f8fc24a90de5ba405a98", "score": "0.45953065", "text": "func (_Main *MainFilterer) ParseBurn(log types.Log) (*MainBurn, error) {\n\tevent := new(MainBurn)\n\tif err := _Main.contract.UnpackLog(event, \"Burn\", log); err != nil {\n\t\treturn nil, err\n\t}\n\tevent.Raw = log\n\treturn event, nil\n}", "title": "" }, { "docid": "d55525c8718eef62de357974ea070d5a", "score": "0.45925567", "text": "func (hdlr *NullHandler) Filter(*LogRecord) bool {\n\treturn true\n}", "title": "" }, { "docid": "dabc32a58a7556ce8b4f8974b5faef14", "score": "0.4591861", "text": "func (g *GossipCollector) DropRPC(*pubsub.RPC, peer.ID) {}", "title": "" }, { "docid": "e5a999aef83f7c6b978c787b020bd444", "score": "0.4591834", "text": "func (_FoMo3Dlong *FoMo3DlongFilterer) FilterOnWithdrawAndDistribute(opts *bind.FilterOpts) (*FoMo3DlongOnWithdrawAndDistributeIterator, error) {\n\n\tlogs, sub, err := _FoMo3Dlong.contract.FilterLogs(opts, \"onWithdrawAndDistribute\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &FoMo3DlongOnWithdrawAndDistributeIterator{contract: _FoMo3Dlong.contract, event: \"onWithdrawAndDistribute\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "d00516274e3e2820337bdfb4cffba220", "score": "0.45746648", "text": "func (i *HasMetricsAdapter) Filter(ctx context.Context, ref *corev2.ResourceReference, event *corev2.Event) (bool, error) {\n\t// Prepare log entry\n\tfields := utillogging.EventFields(event, false)\n\tfields[\"pipeline\"] = corev2.ContextPipeline(ctx)\n\tfields[\"pipeline_workflow\"] = corev2.ContextPipelineWorkflow(ctx)\n\n\t// Deny an event if it does not have metrics\n\tif !event.HasMetrics() {\n\t\tlogger.WithFields(fields).Debug(\"denying event without metrics\")\n\t\treturn true, nil\n\t}\n\n\treturn false, nil\n}", "title": "" }, { "docid": "90c4b90038437aeabfdd558fb583405a", "score": "0.4572712", "text": "func (_FluxAggregator *FluxAggregatorFilterer) FilterAvailableFundsUpdated(opts *bind.FilterOpts, amount []*big.Int) (*FluxAggregatorAvailableFundsUpdatedIterator, error) {\n\n\tvar amountRule []interface{}\n\tfor _, amountItem := range amount {\n\t\tamountRule = append(amountRule, amountItem)\n\t}\n\n\tlogs, sub, err := _FluxAggregator.contract.FilterLogs(opts, \"AvailableFundsUpdated\", amountRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &FluxAggregatorAvailableFundsUpdatedIterator{contract: _FluxAggregator.contract, event: \"AvailableFundsUpdated\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "f4f666a3cd7f5df9d3421f6b308d829b", "score": "0.45724496", "text": "func (_HashDice *HashDiceFilterer) FilterWithdrawed(opts *bind.FilterOpts) (*HashDiceWithdrawedIterator, error) {\n\n\tlogs, sub, err := _HashDice.contract.FilterLogs(opts, \"Withdrawed\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &HashDiceWithdrawedIterator{contract: _HashDice.contract, event: \"Withdrawed\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "126e6ac8e84b53690be6882362fff7b4", "score": "0.45707878", "text": "func (_BaseContent *BaseContentFilterer) FilterInsufficientFunds(opts *bind.FilterOpts) (*BaseContentInsufficientFundsIterator, error) {\n\n\tlogs, sub, err := _BaseContent.contract.FilterLogs(opts, \"InsufficientFunds\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &BaseContentInsufficientFundsIterator{contract: _BaseContent.contract, event: \"InsufficientFunds\", logs: logs, sub: sub}, nil\n}", "title": "" }, { "docid": "3ed0accdb29a7a84a3de355b60a198e3", "score": "0.45677632", "text": "func (_EtherToken *EtherTokenFilterer) FilterTransfer(opts *bind.FilterOpts, source []common.Address, to []common.Address) (*EtherTokenTransferIterator, error) {\n\n\tvar sourceRule []interface{}\n\tfor _, sourceItem := range source {\n\t\tsourceRule = append(sourceRule, sourceItem)\n\t}\n\tvar toRule []interface{}\n\tfor _, toItem := range to {\n\t\ttoRule = append(toRule, toItem)\n\t}\n\n\tlogs, sub, err := _EtherToken.contract.FilterLogs(opts, \"Transfer\", sourceRule, toRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &EtherTokenTransferIterator{contract: _EtherToken.contract, event: \"Transfer\", logs: logs, sub: sub}, nil\n}", "title": "" } ]
739ff6f63699ec3f511c01f0e3d03d8c
WithDstPort sets the destination port of the TCP header to the specified value. To generate a range of destination ports, use DstPortRange() instead.
[ { "docid": "9970336897226ddb0f523ec2a9f3070d", "score": "0.73405313", "text": "func (h *UDPHeader) WithDstPort(port uint16) *UDPHeader {\n\th.pb.DstPort = intRangeSingle(uint32(port))\n\treturn h\n}", "title": "" } ]
[ { "docid": "01abef53f632d3f92a3797c6bd1a6d1f", "score": "0.81068623", "text": "func (h *TCPHeader) WithDstPort(port uint16) *TCPHeader {\n\th.pb.DstPort = intRangeSingle(uint32(port))\n\treturn h\n}", "title": "" }, { "docid": "38698914e94274d6473f2ffd75af5ea7", "score": "0.67584467", "text": "func (h *TCPHeader) WithSrcPort(port uint16) *TCPHeader {\n\th.pb.SrcPort = intRangeSingle(uint32(port))\n\treturn h\n}", "title": "" }, { "docid": "b20a9aa0a2e016fce1be145804e3d940", "score": "0.6650346", "text": "func (h *UDPHeader) WithSrcPort(port uint16) *UDPHeader {\n\th.pb.SrcPort = intRangeSingle(uint32(port))\n\treturn h\n}", "title": "" }, { "docid": "387043a3f65356eb90eb4b8530c1945d", "score": "0.61823463", "text": "func (b DataPullReqBuilder) WithDst(dst sim.Port) DataPullReqBuilder {\n\tb.dst = dst\n\treturn b\n}", "title": "" }, { "docid": "8de5fbd04e0daf3cff5f50ba4f06e38c", "score": "0.58059675", "text": "func (b TCP) SetDestinationPort(port uint16) {\n\tbinary.BigEndian.PutUint16(b[TCPDstPortOffset:], port)\n}", "title": "" }, { "docid": "59485ac6bcbdb6020121dc318f480270", "score": "0.5805364", "text": "func (b PageMigrationReqToPMCBuilder) WithDst(dst sim.Port) PageMigrationReqToPMCBuilder {\n\tb.dst = dst\n\treturn b\n}", "title": "" }, { "docid": "d64f2c55bf32c3f600fa9199d8d924f3", "score": "0.575438", "text": "func (b DataPullRspBuilder) WithDst(dst sim.Port) DataPullRspBuilder {\n\tb.dst = dst\n\treturn b\n}", "title": "" }, { "docid": "8350d3abf328eb1aa66d785b87d1bb64", "score": "0.5731954", "text": "func (b PageMigrationRspFromPMCBuilder) WithDst(dst sim.Port) PageMigrationRspFromPMCBuilder {\n\tb.dst = dst\n\treturn b\n}", "title": "" }, { "docid": "2228a26866870397f7c6a3d5deea3f59", "score": "0.56279373", "text": "func (b TCP) SetSourcePort(port uint16) {\n\tbinary.BigEndian.PutUint16(b[TCPSrcPortOffset:], port)\n}", "title": "" }, { "docid": "3530d5f0a53ff6aae759fc91cd5e3c7c", "score": "0.55467963", "text": "func (ex *ExcludeConfig) AddSrcPort(port string) error {\n\ti, err := strconv.ParseInt(port, 10, 16)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif ex.SrcPorts == nil {\n\t\tex.SrcPorts = map[uint16]bool{}\n\t}\n\tex.SrcPorts[uint16(i)] = true\n\treturn nil\n}", "title": "" }, { "docid": "b65510838caede8ae978a33b4d5ef62c", "score": "0.55145955", "text": "func WithPort(p string) Port {\n\treturn Port(p)\n}", "title": "" }, { "docid": "ca8d635ee6888629c9dbff83989a102b", "score": "0.54581785", "text": "func (h *IPv4Header) WithDstAddress(addr string) *IPv4Header {\n\th.pb.DstAddr = addrRangeSingle(addr)\n\treturn h\n}", "title": "" }, { "docid": "a7853c019b68e6719e1fedcd572fa774", "score": "0.5452097", "text": "func (h *IPv6Header) WithDstAddress(addr string) *IPv6Header {\n\th.pb.DstAddr = addrRangeSingle(addr)\n\treturn h\n}", "title": "" }, { "docid": "21f8ab49abeb7f977484b6e43076a03b", "score": "0.53803766", "text": "func (o *IpEvent) SetSrcPort(v int32) {\n\to.SrcPort = &v\n}", "title": "" }, { "docid": "5fe75aff09cbca71dd8376fb7265d3ef", "score": "0.5368262", "text": "func (b TCP) DestinationPort() uint16 {\n\treturn binary.BigEndian.Uint16(b[TCPDstPortOffset:])\n}", "title": "" }, { "docid": "04f1577ddb6ff11b1d33a884cccf3be8", "score": "0.5332246", "text": "func (h *EthernetHeader) WithDstAddress(addr string) *EthernetHeader {\n\th.pb.DstAddr = addrRangeSingle(addr)\n\treturn h\n}", "title": "" }, { "docid": "eb2f81ff64eed7f3ba0638c8b5b557a2", "score": "0.5203465", "text": "func (h *TCPHeader) DstPortRange() *UIntRange {\n\tif h.pb.DstPort == nil {\n\t\th.pb.DstPort = newPortRange()\n\t}\n\treturn &UIntRange{pb: h.pb.DstPort}\n}", "title": "" }, { "docid": "84c94ad5ae0cc11aa4257ff4fce8043a", "score": "0.5137689", "text": "func WithPort(port uint16) ConfigOption {\n\treturn func(c *Config) {\n\t\tc.port = port\n\t}\n}", "title": "" }, { "docid": "9c516bf54c03e803c32ab1dd021ed729", "score": "0.51237684", "text": "func WithPort(port int) Option {\n\treturn func(s *Server) error {\n\t\ts.Port = port\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "03c5f379a54ed030a44e56f0c98af7ac", "score": "0.51058173", "text": "func EthDst(ethDst uint64) *ofp.OfpOxmOfbField {\n\treturn &ofp.OfpOxmOfbField{Type: ETH_DST, Value: &ofp.OfpOxmOfbField_TableMetadata{TableMetadata: ethDst}}\n}", "title": "" }, { "docid": "f19742563318d7686c5cc51cee814698", "score": "0.5066992", "text": "func (o *NiatelemetryHttpsAclFilterDetails) SetSrcToPort(v string) {\n\to.SrcToPort = &v\n}", "title": "" }, { "docid": "591f54f6f5e55ab647ade8174e845c89", "score": "0.5063691", "text": "func WithDashboardPort(dashboardPort int) SpiceDbOption {\n\treturn func(s *Node) {\n\t\ts.DashboardPort = dashboardPort\n\t}\n}", "title": "" }, { "docid": "6785d728276c44f99135f4561d9907e1", "score": "0.5034019", "text": "func WithPort(Port string) dbConfigParam {\n\treturn func(c *DBConfig) {\n\t\tc.Port = Port\n\t}\n}", "title": "" }, { "docid": "549c0f2e4835b96fa5a43b407861743c", "score": "0.5029147", "text": "func SourcePort(port string) ChannelOption {\n\treturn func(c *channelOptions) {\n\t\tc.sourcePort = port\n\t}\n}", "title": "" }, { "docid": "549c0f2e4835b96fa5a43b407861743c", "score": "0.5029147", "text": "func SourcePort(port string) ChannelOption {\n\treturn func(c *channelOptions) {\n\t\tc.sourcePort = port\n\t}\n}", "title": "" }, { "docid": "76a7f7f3bf0a8796b29938a71075f48c", "score": "0.49870512", "text": "func (b udp) destinationPort() uint16 {\n\treturn binary.BigEndian.Uint16(b[udpDstPort:])\n}", "title": "" }, { "docid": "41d9381c50e52b200d9f1922e8bb5535", "score": "0.4981501", "text": "func (b TCP) SourcePort() uint16 {\n\treturn binary.BigEndian.Uint16(b[TCPSrcPortOffset:])\n}", "title": "" }, { "docid": "448697eee175276173f31b9a03033e03", "score": "0.49660096", "text": "func (u ExportDBUpdater) SetPort(port int) ExportDBUpdater {\n\tu.fields[string(ExportDBDBSchema.Port)] = port\n\treturn u\n}", "title": "" }, { "docid": "d8b1749a9dc9718826331e43030b78e3", "score": "0.49119458", "text": "func (o *IpEvent) SetDestPort(v int32) {\n\to.DestPort = &v\n}", "title": "" }, { "docid": "c987ff6780d2954d84ad8a28c2d34fda", "score": "0.4910443", "text": "func (b udp) sourcePort() uint16 {\n\treturn binary.BigEndian.Uint16(b[udpSrcPort:])\n}", "title": "" }, { "docid": "f4297290aa4bdf9976f9899ff26fe0ca", "score": "0.4881402", "text": "func (o *NiatelemetryHttpsAclFilterDetails) SetDestToPort(v string) {\n\to.DestToPort = &v\n}", "title": "" }, { "docid": "d3dee1e606891bf726bbaeccf9755443", "score": "0.48385537", "text": "func (b DataPullRspBuilder) WithSrc(src sim.Port) DataPullRspBuilder {\n\tb.src = src\n\treturn b\n}", "title": "" }, { "docid": "c2934d99dc34e67b215c5a87aa4de601", "score": "0.482102", "text": "func (c *frContext) setDst(dst draw.Image) {\n\tc.dst = dst\n}", "title": "" }, { "docid": "fb5e333d9e6c712c1643ca8d704eac73", "score": "0.4807221", "text": "func (b DataPullReqBuilder) WithSrc(src sim.Port) DataPullReqBuilder {\n\tb.src = src\n\treturn b\n}", "title": "" }, { "docid": "a3ddfc7676cc1929b970412371f8adaf", "score": "0.48006493", "text": "func TargetPort(port string) ChannelOption {\n\treturn func(c *channelOptions) {\n\t\tc.targetPort = port\n\t}\n}", "title": "" }, { "docid": "a3ddfc7676cc1929b970412371f8adaf", "score": "0.48006493", "text": "func TargetPort(port string) ChannelOption {\n\treturn func(c *channelOptions) {\n\t\tc.targetPort = port\n\t}\n}", "title": "" }, { "docid": "1c543edbb0db4d4ef0ecbc7a64481b8a", "score": "0.47983626", "text": "func WithServerPort(port string) OptionFunc {\n\treturn func(c *config) {\n\t\tc.serverPort = port\n\t}\n}", "title": "" }, { "docid": "80d164122a4aae232a8e9122b9cdbfc4", "score": "0.47706836", "text": "func WithHttpRunnerPort(port int) HttpRunnerOption {\n\treturn func(hr *HttpRunner) {\n\t\thr.port = port\n\t}\n}", "title": "" }, { "docid": "dbb987c0acaedb7078ab341e9f05868d", "score": "0.47643846", "text": "func (ex *ExcludeConfig) AddDstIP(dst string) error {\n\tip := net.ParseIP(dst)\n\tif ip == nil {\n\t\treturn errors.New(\"invalid ip: \" + dst)\n\t}\n\tif ex.DstIPs == nil {\n\t\tex.DstIPs = map[[16]byte]bool{}\n\t}\n\tkey := [16]byte{}\n\tif ip.To4() != nil {\n\t\t// NOTE: The Linux-native byte position for IPv4 addresses is the first four bytes.\n\t\t// The net.IP package format uses the last four bytes. Copy the net.IP bytes to a\n\t\t// new array to generate a key for dstIPs.\n\t\tcopy(key[:], ip[12:])\n\t} else {\n\t\tcopy(key[:], ip[:])\n\t}\n\tex.DstIPs[key] = true\n\treturn nil\n}", "title": "" }, { "docid": "26fbf9e97ebbcf5e045abbd6a026227a", "score": "0.4756646", "text": "func WithPort(port int32, name string) ObjectOption {\n\treturn func(o metav1.Object) {\n\t\td := o.(*appsv1.Deployment)\n\t\tports := &firstDeploymentContainer(d).Ports\n\n\t\t*ports = append(*ports, corev1.ContainerPort{\n\t\t\tContainerPort: port,\n\t\t\tName: name,\n\t\t})\n\t}\n}", "title": "" }, { "docid": "d15f91ab2af62e5b2bf2ef2a2ebfbade", "score": "0.47102898", "text": "func (options) HostPort(hostPort string) Option {\n\treturn func(o *options) {\n\t\to.hostPort = hostPort\n\t}\n}", "title": "" }, { "docid": "3bcc62bff57c8a0ac0313b5af43afca6", "score": "0.46768132", "text": "func (c *Conn) dstAddr(ip net.IP, zone string) net.Addr {\n\tif !c.test() || c.udpTestPort == 0 {\n\t\treturn &net.IPAddr{\n\t\t\tIP: ip,\n\t\t\tZone: zone,\n\t\t}\n\t}\n\n\treturn &net.UDPAddr{\n\t\tIP: ip,\n\t\tPort: c.udpTestPort,\n\t\tZone: c.ifi.Name,\n\t}\n}", "title": "" }, { "docid": "1a1016286b935f23325975bac98227c9", "score": "0.46760744", "text": "func (h *UDPHeader) DstPortRange() *UIntRange {\n\tif h.pb.DstPort == nil {\n\t\th.pb.DstPort = newPortRange()\n\t}\n\treturn &UIntRange{pb: h.pb.DstPort}\n}", "title": "" }, { "docid": "0e6fe60d6287c9e5e2e03715cd2f8ddb", "score": "0.4535598", "text": "func WithRemovePort() url.ParserOption {\n\treturn &funcCanonParserOption{\n\t\tf: func(p *profile) {\n\t\t\tp.removePort = true\n\t\t},\n\t}\n}", "title": "" }, { "docid": "a2c671b660b10a8f2d6b6b6fee64d24a", "score": "0.45244887", "text": "func (s *BaseTrafficClassListener) EnterMatchDstPort(ctx *MatchDstPortContext) {}", "title": "" }, { "docid": "f18f13444f02fa013e20b42f02cecd47", "score": "0.4523216", "text": "func WithHttpPort(httpPort int) SpiceDbOption {\n\treturn func(s *Node) {\n\t\ts.HttpPort = httpPort\n\t}\n}", "title": "" }, { "docid": "b278644197e7cb7f1caec94ffaca8aa0", "score": "0.45185268", "text": "func (cfg *commandLineServerConfig) WithPort(port int) *commandLineServerConfig {\n\tcfg.port = port\n\treturn cfg\n}", "title": "" }, { "docid": "5dd0972dd9d09fb43df0f89c3a1bfd7f", "score": "0.44890422", "text": "func (params *Params) ParseSrcPortOpt(srcPort *int) (err error) {\n\terr = fmt.Errorf(\"ParseSrcPortOpt error: unimplemented\")\n\treturn\n}", "title": "" }, { "docid": "94e13273fc0610a84ccc7de9dc37a2d4", "score": "0.4471174", "text": "func (b PageMigrationRspFromPMCBuilder) WithSrc(src sim.Port) PageMigrationRspFromPMCBuilder {\n\tb.src = src\n\treturn b\n}", "title": "" }, { "docid": "ccb0778d85d623ffbcf37fb0a1b7983e", "score": "0.44685876", "text": "func (this *SipURIImpl) SetPort(p int) {\n\tif this.authority == nil {\n\t\tthis.authority = NewAuthority()\n\t}\n\tthis.authority.SetPort(p)\n}", "title": "" }, { "docid": "8fc630247f5569c0e9a045a02fc0699c", "score": "0.44551724", "text": "func (h *TCPHeader) SrcPortRange() *UIntRange {\n\tif h.pb.SrcPort == nil {\n\t\th.pb.SrcPort = newPortRange()\n\t}\n\treturn &UIntRange{pb: h.pb.SrcPort}\n}", "title": "" }, { "docid": "24c138cdf9fee3c04a18fd69882041fc", "score": "0.44521806", "text": "func (c Config) Port(port uint32) Config {\n\tc.port = port\n\treturn c\n}", "title": "" }, { "docid": "69aac1673a20e852048ec2bb34c84b01", "score": "0.44404554", "text": "func (s *BaseTrafficClassListener) EnterMatchDstPortRange(ctx *MatchDstPortRangeContext) {}", "title": "" }, { "docid": "1b3995205b6a2abc7ac972e0e360e69f", "score": "0.44256216", "text": "func (s *HDLC) SetPort(port io.ReadWriter) error {\n\ts.sendBuffer.Lock()\n\tdefer s.sendBuffer.Unlock()\n\n\ts.port = port\n\n\treturn nil\n}", "title": "" }, { "docid": "6eaff2d90c85184cb497edcbc377dc00", "score": "0.44105595", "text": "func (t *tspClient) Port(port int) *tspClient {\n\tt.addr.Port = port\n\treturn t\n}", "title": "" }, { "docid": "b60aa10b4ab7923fdd5c9ab3dfbbbd3d", "score": "0.44067103", "text": "func SetClientPort(s string) func(*SAMClientForwarder) error {\n\treturn func(c *SAMClientForwarder) error {\n\t\tport, err := strconv.Atoi(s)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Invalid TCP Client Target Port %s; non-number \", s)\n\t\t}\n\t\tif port < 65536 && port > -1 {\n\t\t\tc.Conf.TargetPort = s\n\t\t\treturn nil\n\t\t}\n\t\treturn fmt.Errorf(\"Invalid port\")\n\t}\n}", "title": "" }, { "docid": "708373e2fc81bdabe0349614fd623367", "score": "0.4394204", "text": "func (o *WafCreateSiteRequestOrigin) SetPort(v int32) {\n\to.Port = &v\n}", "title": "" }, { "docid": "dc9e2e513be579c49638457d9bf2a398", "score": "0.43668538", "text": "func (o *TargetGroupTarget) SetPort(v int32) {\n\n\to.Port = &v\n\n}", "title": "" }, { "docid": "24f955da3fc76a5f02b32ae7ddc29d9a", "score": "0.43623954", "text": "func (o RouteSpecTcpRouteMatchOutput) Port() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v RouteSpecTcpRouteMatch) *int { return v.Port }).(pulumi.IntPtrOutput)\n}", "title": "" }, { "docid": "ffabe4f4d1e49125f0ee297d48071462", "score": "0.43577403", "text": "func (b PageMigrationReqToPMCBuilder) WithSrc(src sim.Port) PageMigrationReqToPMCBuilder {\n\tb.src = src\n\treturn b\n}", "title": "" }, { "docid": "aaa820f218e726bcd81fc2c93db70b19", "score": "0.43420485", "text": "func Port(port string) func(*serverOpts) {\n\treturn func(s *serverOpts) {\n\t\ts.port = port\n\t}\n}", "title": "" }, { "docid": "72b8404d3700243b94426a2a2f10f9ab", "score": "0.43303716", "text": "func (c *Connector) SetPort(isInput bool, port int) {\n\tif isInput {\n\t\tc.ToPort = port\n\t\treturn\n\t}\n\tc.FromPort = port\n}", "title": "" }, { "docid": "3250aa405e898d2098b61e51d845e313", "score": "0.43180978", "text": "func (h *UDPHeader) SrcPortRange() *UIntRange {\n\tif h.pb.SrcPort == nil {\n\t\th.pb.SrcPort = newPortRange()\n\t}\n\treturn &UIntRange{pb: h.pb.SrcPort}\n}", "title": "" }, { "docid": "ee5feefd02f3c4d13977452535e4c185", "score": "0.42994556", "text": "func (o *NiatelemetryHttpsAclFilterDetails) SetDestFromPort(v string) {\n\to.DestFromPort = &v\n}", "title": "" }, { "docid": "8a753cfbd48c13409c6329c28f6be428", "score": "0.4298493", "text": "func (o *IpEvent) GetDestPort() int32 {\n\tif o == nil || o.DestPort == nil {\n\t\tvar ret int32\n\t\treturn ret\n\t}\n\treturn *o.DestPort\n}", "title": "" }, { "docid": "e2d3e818784adfb6d598764078254016", "score": "0.42980364", "text": "func (o *V1TCPSocketAction) SetPort(v int32) {\n\to.Port = &v\n}", "title": "" }, { "docid": "e7479f7fcd7fa4df77f68667d196a0ad", "score": "0.4290856", "text": "func (o *NiatelemetryHttpsAclFilterDetails) GetDestToPort() string {\n\tif o == nil || o.DestToPort == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.DestToPort\n}", "title": "" }, { "docid": "f39be7bc906dc1ccc2e37b5d08751487", "score": "0.4280443", "text": "func (s *server) SetPort(protocal, addr string) {\n\ts.protocal = protocal\n\ts.ioaddr = addr\n}", "title": "" }, { "docid": "180a2eb4b1cd0b4c189afc737d864a30", "score": "0.4267382", "text": "func (o *LoraCommand) SetPort(v int32) {\n\to.Port = v\n}", "title": "" }, { "docid": "a97620bf36d725de50bdb0a6fa72ece7", "score": "0.42671734", "text": "func (h *IPv4Header) WithSrcAddress(addr string) *IPv4Header {\n\th.pb.SrcAddr = addrRangeSingle(addr)\n\treturn h\n}", "title": "" }, { "docid": "4447412f1d77b79c403bcbcf61edefb5", "score": "0.42657083", "text": "func (configuration *FirewallRuleConfiguration) MatchSourcePort(port int) *FirewallRuleConfiguration {\n\tsourceScope := &configuration.Source\n\tsourceScope.Port = &FirewallRulePort{\n\t\tBegin: port,\n\t}\n\tsourceScope.PortListID = nil\n\n\treturn configuration\n}", "title": "" }, { "docid": "1b9c969fa085858578c8c4f29c538282", "score": "0.4262852", "text": "func WithDispatchPort(dispatchPort int) SpiceDbOption {\n\treturn func(s *Node) {\n\t\ts.DispatchPort = dispatchPort\n\t}\n}", "title": "" }, { "docid": "d284d49587b2b54721d3d9c81ac6478d", "score": "0.42616132", "text": "func (o *NiatelemetryHttpsAclFilterDetails) HasDestToPort() bool {\n\tif o != nil && o.DestToPort != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "title": "" }, { "docid": "014a52b07e7a605eb2226fb8e7669c5d", "score": "0.4257537", "text": "func applySwarmPortConfigOption(rep repo.Repo, ports string) error {\n\tvar parts []string\n\tif ports != \"\" {\n\t\tparts = strings.Split(ports, \",\")\n\t}\n\tvar tcp, ws string\n\n\tswitch len(parts) {\n\tcase 1:\n\t\ttcp = parts[0]\n\tcase 2:\n\t\ttcp = parts[0]\n\t\tws = parts[1]\n\tdefault:\n\t\ttcp = GetRandomPort()\n\t\tws = GetRandomPort()\n\t}\n\n\tlist := []string{\n\t\tfmt.Sprintf(\"/ip4/0.0.0.0/tcp/%s\", tcp),\n\t\tfmt.Sprintf(\"/ip6/::/tcp/%s\", tcp),\n\t}\n\tif ws != \"\" {\n\t\tlist = append(list, fmt.Sprintf(\"/ip4/0.0.0.0/tcp/%s/ws\", ws))\n\t\tlist = append(list, fmt.Sprintf(\"/ip6/::/tcp/%s/ws\", ws))\n\t}\n\n\treturn rep.SetConfigKey(\"Addresses.Swarm\", list)\n}", "title": "" }, { "docid": "1d35664aad931cb8d477fa4c1b375cb7", "score": "0.42452994", "text": "func (s *BaseTrafficClassListener) ExitMatchDstPort(ctx *MatchDstPortContext) {}", "title": "" }, { "docid": "2d9870a6503fca318a562c627a05d48b", "score": "0.4238829", "text": "func (o RouteSpecTcpRouteActionWeightedTargetOutput) Port() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v RouteSpecTcpRouteActionWeightedTarget) *int { return v.Port }).(pulumi.IntPtrOutput)\n}", "title": "" }, { "docid": "f1d5bcad5278eb2bc8b02e4b2bb70356", "score": "0.42375132", "text": "func SetFixedPort(protocol, port string) {\n\tdefaultProtocolPort[protocol] = port\n}", "title": "" }, { "docid": "787abdbe9c40adf6038168f2e6964f88", "score": "0.4228509", "text": "func (o *NiatelemetryHttpsAclFilterDetails) GetSrcToPort() string {\n\tif o == nil || o.SrcToPort == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.SrcToPort\n}", "title": "" }, { "docid": "1fc2e0ddcc21690429a37e3064e3d8ff", "score": "0.4228463", "text": "func Port(port uint16) ConfigOption {\n\treturn func(s *Server) error {\n\t\ts.Port = port\n\t\treturn nil\n\t}\n}", "title": "" }, { "docid": "e239be0398db7772c59bfa4592b8b833", "score": "0.4226858", "text": "func (configuration *FirewallRuleConfiguration) MatchDestinationPort(port int) *FirewallRuleConfiguration {\n\tdestinationScope := &configuration.Destination\n\tdestinationScope.Port = &FirewallRulePort{\n\t\tBegin: port,\n\t}\n\tdestinationScope.PortListID = nil\n\n\treturn configuration\n}", "title": "" }, { "docid": "3357cc5276ea576ecbab3d7ec0979a6d", "score": "0.42251724", "text": "func (o *LoraCommandWeb) SetPort(v int32) {\n\to.Port = v\n}", "title": "" }, { "docid": "c3c49012fbbe8b038f6c7f37a888d575", "score": "0.42190683", "text": "func (l *LogEntry) WithStatsdPort(port int) *LogEntry {\n\treturn l.withField(\"statsd_port\", port)\n}", "title": "" }, { "docid": "8b52a5f10031bcc6b0d0a05f5e86b5a5", "score": "0.4217113", "text": "func (srv *CmdServer) StartWithPort(port int) error {\r\n\r\n\t// Convert port int to string\r\n\tservePort := fmt.Sprintf(\":%d\", port)\r\n\r\n\t// Save and log port\r\n\tsrv.Port = port\r\n\tfmt.Println(\"Server started on port\", srv.Port)\r\n\r\n\t// Start server listening\r\n\tlog.Fatal(http.ListenAndServe(servePort, srv.Router))\r\n\r\n\treturn nil\r\n}", "title": "" }, { "docid": "8431a95835a03534d4642aeeb360830d", "score": "0.42162815", "text": "func HostPort(host string, port uint64) string {\n\treturn host + \":\" + strconv.FormatUint(port, 10)\n}", "title": "" }, { "docid": "a50986766325833a6baf3261a82f7051", "score": "0.4197536", "text": "func (d *Device) ReverseTCP(ctx context.Context, hostPort int) (int, error) {\n\tout, err := d.Command(ctx, \"reverse\", \"tcp:0\", fmt.Sprintf(\"tcp:%d\", hostPort)).Output(testexec.DumpLogOnError)\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\treturn strconv.Atoi(strings.TrimSpace(string(out)))\n}", "title": "" }, { "docid": "5d6979861675ebadbeefdd366172c1cb", "score": "0.41853124", "text": "func (p *Proc) SetAppPort(port routing.Port) {\n\tp.portMx.Lock()\n\tdefer p.portMx.Unlock()\n\tp.port = port\n}", "title": "" }, { "docid": "a9890cee6bd620e73f0df39dc02d64f2", "score": "0.41789716", "text": "func (b *Beacon) SetPort(port int) *Beacon {\n\tb.port = port\n\treturn b\n}", "title": "" }, { "docid": "392ce4521d295d3ad124a7e181a2298b", "score": "0.4176584", "text": "func (o *NiatelemetryHttpsAclFilterDetails) SetSrcFromPort(v string) {\n\to.SrcFromPort = &v\n}", "title": "" }, { "docid": "1ecdf1024f4cc9e2e5a034285bd07d64", "score": "0.41715205", "text": "func (o *IpEvent) GetSrcPort() int32 {\n\tif o == nil || o.SrcPort == nil {\n\t\tvar ret int32\n\t\treturn ret\n\t}\n\treturn *o.SrcPort\n}", "title": "" }, { "docid": "7419626399622d05a57428bcb45f891c", "score": "0.4171369", "text": "func (j *TestJig) CreateTCPServiceWithPort(ctx context.Context, tweak func(svc *v1.Service), port int32) (*v1.Service, error) {\n\tsvc := j.newServiceTemplate(v1.ProtocolTCP, port)\n\tif tweak != nil {\n\t\ttweak(svc)\n\t}\n\tresult, err := j.Client.CoreV1().Services(j.Namespace).Create(ctx, svc, metav1.CreateOptions{})\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create TCP Service %q: %w\", svc.Name, err)\n\t}\n\treturn j.sanityCheckService(result, svc.Spec.Type)\n}", "title": "" }, { "docid": "52d03b5f36e9ee21ca64001f3c762a98", "score": "0.4170338", "text": "func Port(p int) ServerOpt { return ServerOpt(port(p)) }", "title": "" }, { "docid": "3d3781be018e269fccf1d7fe6ac96acc", "score": "0.4167152", "text": "func (c *configurator) SetPortForward(ctx context.Context, port uint16) (err error) {\n\tc.stateMutex.Lock()\n\tdefer c.stateMutex.Unlock()\n\n\tif port == c.portForwarded {\n\t\treturn nil\n\t}\n\n\tif !c.enabled {\n\t\tc.logger.Info(\"firewall disabled, only updating port forwarded internally\")\n\t\tc.portForwarded = port\n\t\treturn nil\n\t}\n\n\tconst tun = string(constants.TUN)\n\tif c.portForwarded > 0 {\n\t\tif err := c.acceptInputToPort(ctx, tun, constants.TCP, c.portForwarded, true); err != nil {\n\t\t\treturn fmt.Errorf(\"cannot remove outdated port forward rule from firewall: %w\", err)\n\t\t}\n\t\tif err := c.acceptInputToPort(ctx, tun, constants.UDP, c.portForwarded, true); err != nil {\n\t\t\treturn fmt.Errorf(\"cannot remove outdated port forward rule from firewall: %w\", err)\n\t\t}\n\t}\n\n\tif port == 0 { // not changing port\n\t\tc.portForwarded = 0\n\t\treturn nil\n\t}\n\n\tif err := c.acceptInputToPort(ctx, tun, constants.TCP, port, false); err != nil {\n\t\treturn fmt.Errorf(\"cannot accept port forwarded through firewall: %w\", err)\n\t}\n\tif err := c.acceptInputToPort(ctx, tun, constants.UDP, port, false); err != nil {\n\t\treturn fmt.Errorf(\"cannot accept port forwarded through firewall: %w\", err)\n\t}\n\treturn nil\n}", "title": "" }, { "docid": "ba27beda24c56598e640305eac88a032", "score": "0.4166844", "text": "func (agent Agent) IPAddrWithPort() string {\n\treturn agent.IP + \":\" + agent.Port\n}", "title": "" }, { "docid": "4e21c0ef8cc9b4038aa5c708b516f97e", "score": "0.41626865", "text": "func OptRelayPort(port uint16) Option {\n\treturn &optRelayPort{DownstreamSourcePort: port}\n}", "title": "" }, { "docid": "f131d22cb588059ba85006c6b739398c", "score": "0.41622043", "text": "func (o *LdapCertificateProvider) SetPort(v int32) {\n\to.Port = v\n}", "title": "" }, { "docid": "aeb00e9d64e9c010f1c3c4e6917f35e5", "score": "0.41586137", "text": "func (o *NiatelemetryHttpsAclFilterDetails) GetDestFromPort() string {\n\tif o == nil || o.DestFromPort == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.DestFromPort\n}", "title": "" }, { "docid": "3c9b71524db7ae92cbbe968fb8574c11", "score": "0.41303363", "text": "func (s Stats) SetOptionStatsdPort(i int) (map[string]interface{}, error) {\n\tm := map[string]string{\n\t\t\"Integer\": strconv.Itoa(i),\n\t}\n\treturn s.c.Request(\"stats/action/setOptionStatsdPort/\", m)\n}", "title": "" }, { "docid": "bdfd02b92971a88ef554cfe17dd9658d", "score": "0.41286367", "text": "func (b *Builder) SetPort(port int) *Builder {\n\tb.port = port\n\treturn b\n}", "title": "" }, { "docid": "f5c1c50ac06a5a7be49d3fddd8eedd9e", "score": "0.41224894", "text": "func ProxyPort(s string) OptionFn {\n\treturn func(srvr *Server) {\n\t\tsrvr.proxyPort = s\n\t}\n}", "title": "" }, { "docid": "7c9470e29334cf1ed9a321fdea178433", "score": "0.4120306", "text": "func (spec *ConnectionSpec) Port(port int) *ConnectionSpec {\n\tspec.port = port\n\treturn spec\n}", "title": "" } ]